Unverified Commit fb6ff5af authored by Adam Tauber's avatar Adam Tauber Committed by GitHub

Merge pull request #1767 from return42/docs

move reST source from gh-pages to master
parents 3f93fe04 02d5173f
......@@ -19,5 +19,8 @@ node_modules/
.tx/
build/
dist/
local/
gh-pages/
searx.egg-info/
# -*- coding: utf-8; mode: makefile-gmake -*-
export GIT_URL=https://github.com/asciimoo/searx
export SEARX_URL=https://searx.me
export DOCS_URL=https://asciimoo.github.io/searx
PYOBJECTS = searx
DOC = docs
PY_SETUP_EXTRAS ?= \[test\]
include utils/makefile.include
include utils/makefile.python
include utils/makefile.sphinx
all: clean install
PHONY += help
help:
@echo ' test - run developer tests'
@echo ' docs - build documentation'
@echo ' docs-live - autobuild HTML documentation while editing'
@echo ' run - run developer instance'
@echo ' install - developer install (./local)'
@echo ' uninstall - uninstall (./local)'
@echo ' gh-pages - build docs & deploy on gh-pages branch'
@echo ''
@$(MAKE) -s -f utils/makefile.include make-help
@echo ''
......@@ -40,6 +49,20 @@ run: pyenvinstall
) &
$(PY_ENV)/bin/python ./searx/webapp.py
# docs
# ----
PHONY += docs
docs: pyenvinstall sphinx-doc
$(call cmd,sphinx,html,docs,docs)
PHONY += docs-live
docs-live: pyenvinstall sphinx-live
$(call cmd,sphinx_autobuild,html,docs,docs)
$(GH_PAGES)::
@echo "doc available at --> $(DOCS_URL)"
# test
# ----
......
@import url("pocoo.css");
a, a.reference, a.footnote-reference {
color: #004b6b;
border-color: #004b6b;
}
a:hover {
color: #6d4100;
border-color: #6d4100;
}
p.version-warning {
background-color: #004b6b;
}
div.sidebar {
background-color: whitesmoke;
border-color: lightsteelblue;
border-radius: 3pt;
}
p.sidebar-title, .sidebar p {
margin: 6pt;
}
.sidebar li {
list-style-type: disclosure-closed;
}
[theme]
inherit = pocoo
stylesheet = searx.css
[options]
touch_icon =
.. _adminapi:
==================
Administration API
==================
Get configuration data
======================
.. code:: http
GET /config HTTP/1.1
Sample response
---------------
.. code:: json
{
"autocomplete": "",
"categories": [
"map",
"it",
"images",
],
"default_locale": "",
"default_theme": "oscar",
"engines": [
{
"categories": [
"map"
],
"enabled": true,
"name": "openstreetmap",
"shortcut": "osm"
},
{
"categories": [
"it"
],
"enabled": true,
"name": "arch linux wiki",
"shortcut": "al"
},
{
"categories": [
"images"
],
"enabled": true,
"name": "google images",
"shortcut": "goi"
},
{
"categories": [
"it"
],
"enabled": false,
"name": "bitbucket",
"shortcut": "bb"
},
],
"instance_name": "searx",
"locales": {
"de": "Deutsch (German)",
"en": "English",
"eo": "Esperanto (Esperanto)",
},
"plugins": [
{
"enabled": true,
"name": "HTTPS rewrite"
},
{
"enabled": false,
"name": "Vim-like hotkeys"
}
],
"safe_search": 0
}
Embed search bar
================
The search bar can be embedded into websites. Just paste the example into the
HTML of the site. URL of the searx instance and values are customizable.
.. code:: html
<form method="post" action="https://searx.me/">
<!-- search --> <input type="text" name="q" />
<!-- categories --> <input type="hidden" name="categories" value="general,social media" />
<!-- language --> <input type="hidden" name="lang" value="all" />
<!-- locale --> <input type="hidden" name="locale" value="en" />
<!-- date filter --> <input type="hidden" name="time_range" value="month" />
</form>
==========================
How to protect an instance
==========================
Searx depens on external search services. To avoid the abuse of these services
it is advised to limit the number of requests processed by searx.
An application firewall, ``filtron`` solves exactly this problem. Information
on how to install it can be found at the `project page of filtron
<https://github.com/asciimoo/filtron>`__.
Sample configuration of filtron
===============================
An example configuration can be find below. This configuration limits the access
of:
- scripts or applications (roboagent limit)
- webcrawlers (botlimit)
- IPs which send too many requests (IP limit)
- too many json, csv, etc. requests (rss/json limit)
- the same UserAgent of if too many requests (useragent limit)
.. code:: json
[{
"name":"search request",
"filters":[
"Param:q",
"Path=^(/|/search)$"
],
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"subrules":[
{
"name":"roboagent limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"filters":[
"Header:User-Agent=(curl|cURL|Wget|python-requests|Scrapy|FeedFetcher|Go-http-client)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"botlimit",
"limit":0,
"stop":true,
"filters":[
"Header:User-Agent=(Googlebot|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"IP limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"stop":true,
"aggregations":[
"Header:X-Forwarded-For"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"rss/json limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"stop":true,
"filters":[
"Param:format=(csv|json|rss)"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
},
{
"name":"useragent limit",
"interval":"<time-interval-in-sec (int)>",
"limit":"<max-request-number-in-interval (int)>",
"aggregations":[
"Header:User-Agent"
],
"actions":[
{
"name":"block",
"params":{
"message":"Rate limit exceeded"
}
}
]
}
]
}]
Route request through filtron
=============================
Filtron can be started using the following command:
.. code:: sh
$ filtron -rules rules.json
It listens on ``127.0.0.1:4004`` and forwards filtered requests to
``127.0.0.1:8888`` by default.
Use it along with ``nginx`` with the following example configuration.
.. code:: nginx
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_pass http://127.0.0.1:4004/;
}
Requests are coming from port 4004 going through filtron and then forwarded to
port 8888 where a searx is being run.
===========================
Administrator documentation
===========================
.. toctree::
:maxdepth: 1
installation
api
filtron
morty
.. _installation:
============
Installation
============
.. contents::
:depth: 3
Basic installation
==================
Step by step installation for Debian/Ubuntu with virtualenv. For Ubuntu, be sure
to have enable universe repository.
Install packages:
.. code:: sh
$ sudo -H apt-get install \
git build-essential libxslt-dev \
python-dev python-virtualenv python-babel \
zlib1g-dev libffi-dev libssl-dev
Install searx:
.. code:: sh
cd /usr/local
sudo -H git clone https://github.com/asciimoo/searx.git
sudo -H useradd searx -d /usr/local/searx
sudo -H chown searx:searx -R /usr/local/searx
Install dependencies in a virtualenv:
.. code:: sh
cd /usr/local/searx
sudo -H -u searx -i
.. code:: sh
(searx)$ virtualenv searx-ve
(searx)$ . ./searx-ve/bin/activate
(searx)$ ./manage.sh update_packages
Configuration
==============
.. code:: sh
sed -i -e "s/ultrasecretkey/`openssl rand -hex 16`/g" searx/settings.yml
Edit searx/settings.yml if necessary.
Check
=====
Start searx:
.. code:: sh
python searx/webapp.py
Go to http://localhost:8888
If everything works fine, disable the debug option in settings.yml:
.. code:: sh
sed -i -e "s/debug : True/debug : False/g" searx/settings.yml
At this point searx is not demonized ; uwsgi allows this.
You can exit the virtualenv and the searx user bash (enter exit command
twice).
uwsgi
=====
Install packages:
.. code:: sh
sudo -H apt-get install \
uwsgi uwsgi-plugin-python
Create the configuration file ``/etc/uwsgi/apps-available/searx.ini`` with this
content:
.. code:: ini
[uwsgi]
# Who will run the code
uid = searx
gid = searx
# disable logging for privacy
disable-logging = true
# Number of workers (usually CPU count)
workers = 4
# The right granted on the created socket
chmod-socket = 666
# Plugin to use and interpretor config
single-interpreter = true
master = true
plugin = python
lazy-apps = true
enable-threads = true
# Module to import
module = searx.webapp
# Virtualenv and python path
virtualenv = /usr/local/searx/searx-ve/
pythonpath = /usr/local/searx/
chdir = /usr/local/searx/searx/
Activate the uwsgi application and restart:
.. code:: sh
cd /etc/uwsgi/apps-enabled
ln -s ../apps-available/searx.ini
/etc/init.d/uwsgi restart
Web server
==========
with nginx
----------
If nginx is not installed (uwsgi will not work with the package
nginx-light):
.. code:: sh
sudo -H apt-get install nginx
Hosted at /
~~~~~~~~~~~
Create the configuration file ``/etc/nginx/sites-available/searx`` with this
content:
.. code:: nginx
server {
listen 80;
server_name searx.example.com;
root /usr/local/searx;
location / {
include uwsgi_params;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
}
Create a symlink to sites-enabled:
.. code:: sh
sudo -H ln -s /etc/nginx/sites-available/searx /etc/nginx/sites-enabled/searx
Restart service:
.. code:: sh
sudo -H service nginx restart
sudo -H service uwsgi restart
from subdirectory URL (/searx)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Add this configuration in the server config file
``/etc/nginx/sites-enabled/default``:
.. code:: nginx
location = /searx { rewrite ^ /searx/; }
location /searx {
try_files $uri @searx;
}
location @searx {
uwsgi_param SCRIPT_NAME /searx;
include uwsgi_params;
uwsgi_modifier1 30;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
**OR** using reverse proxy (Please, note that reverse proxy advised to be used
in case of single-user or low-traffic instances.)
.. code:: nginx
location /searx {
proxy_pass http://127.0.0.1:8888;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /searx;
proxy_buffering off;
}
Enable ``base_url`` in ``searx/settings.yml``
.. code:: yaml
base_url : http://your.domain.tld/searx/
Restart service:
.. code:: sh
sudo -H service nginx restart
sudo -H service uwsgi restart
disable logs
^^^^^^^^^^^^
for better privacy you can disable nginx logs about searx.
how to proceed: below ``uwsgi_pass`` in ``/etc/nginx/sites-available/default``
add:
.. code:: nginx
access_log /dev/null;
error_log /dev/null;
Restart service:
.. code:: sh
sudo -H service nginx restart
with apache
-----------
Add wsgi mod:
.. code:: sh
sudo -H apt-get install libapache2-mod-uwsgi
sudo -H a2enmod uwsgi
Add this configuration in the file ``/etc/apache2/apache2.conf``:
.. code:: apache
<Location />
Options FollowSymLinks Indexes
SetHandler uwsgi-handler
uWSGISocket /run/uwsgi/app/searx/socket
</Location>
Note that if your instance of searx is not at the root, you should change
``<Location />`` by the location of your instance, like ``<Location /searx>``.
Restart Apache:
.. code:: sh
sudo -H /etc/init.d/apache2 restart
disable logs
~~~~~~~~~~~~
For better privacy you can disable Apache logs.
.. warning::
You can only disable logs for the whole (virtual) server not for a specific
path.
Go back to ``/etc/apache2/apache2.conf`` and above ``<Location />`` add:
.. code:: apache
CustomLog /dev/null combined
Restart Apache:
.. code:: sh
sudo -H /etc/init.d/apache2 restart
How to update
=============
.. code:: sh
cd /usr/local/searx
sudo -H -u searx -i
.. code:: sh
(searx)$ . ./searx-ve/bin/activate
(searx)$ git stash
(searx)$ git pull origin master
(searx)$ git stash apply
(searx)$ ./manage.sh update_packages
.. code:: sh
sudo -H service uwsgi restart
Docker
======
Make sure you have installed Docker. For instance, you can deploy searx like this:
.. code:: sh
docker pull wonderfall/searx
docker run -d --name searx -p $PORT:8888 wonderfall/searx
Go to ``http://localhost:$PORT``.
See https://hub.docker.com/r/wonderfall/searx/ for more informations. It's also
possible to build searx from the embedded Dockerfile.
.. code:: sh
git clone https://github.com/asciimoo/searx.git
cd searx
docker build -t whatever/searx .
References
==========
* https://about.okhin.fr/posts/Searx/ with some additions
* How to: `Setup searx in a couple of hours with a free SSL certificate
<https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__
=========================
How to setup result proxy
=========================
.. _morty: https://github.com/asciimoo/morty
.. _morty's README: https://github.com/asciimoo/morty
By default searx can only act as an image proxy for result images, but it is
possible to proxify all the result URLs with an external service, morty_.
To use this feature, morty has to be installed and activated in searx's
``settings.yml``.
Add the following snippet to your ``settings.yml`` and restart searx:
.. code:: yaml
result_proxy:
url : http://127.0.0.1:3000/
key : your_morty_proxy_key
``url``
Is the address of the running morty service.
``key``
Is an optional argument, see `morty's README`_ for more information.
=============================================================
Searx admin interface
=============================================================
.. _searx-admin: https://github.com/kvch/searx-admin#searx-admin
.. _NLnet Foundation: https://nlnet.nl/
manage your instance from your browser
.. sidebar:: Installation
Installation guide can be found in the repository of searx-admin_.
One of the biggest advantages of searx is being extremely customizable. But at
first it can be daunting to newcomers. A barrier of taking advantage of this
feature is our ugly settings file which is sometimes hard to understand and
edit.
To make self-hosting searx more accessible a new tool is introduced, called
``searx-admin``. It is a web application which is capable of managing your
instance and manipulating its settings via a web UI. It aims to replace editing
of ``settings.yml`` for less experienced administrators or people who prefer
graphical admin interfaces.
.. figure:: searx-admin-engines.png
:alt: Screenshot of engine list
Configuration page of engines
Since ``searx-admin`` acts as a supervisor for searx, we have decided to
implement it as a standalone tool instead of part of searx. Another reason for
making it a standalone tool is that the codebase and dependencies of searx
should not grow because of a fully optional feature, which does not affect
existing instances.
Acknowledgements
================
This development was sponsored by `NLnet Foundation`_.
| Happy hacking.
| kvch // 2017.08.22 21:25
====
Blog
====
.. toctree::
:maxdepth: 1
python3
admin
intro-offline
===============================
Preparation for offline engines
===============================
Offline engines
===============
To extend the functionality of searx, offline engines are going to be
introduced. An offline engine is an engine which does not need Internet
connection to perform a search and does not use HTTP to communicate.
Offline engines can be configured as online engines, by adding those to the
`engines` list of :origin:`settings.yml <searx/settings.yml>`. Thus, searx
finds the engine file and imports it.
Example skeleton for the new engines:
.. code:: python
from subprocess import PIPE, Popen
categories = ['general']
offline = True
def init(settings):
pass
def search(query, params):
process = Popen(['ls', query], stdout=PIPE)
return_code = process.wait()
if return_code != 0:
raise RuntimeError('non-zero return code', return_code)
results = []
line = process.stdout.readline()
while line:
result = parse_line(line)
results.append(results)
line = process.stdout.readline()
return results
Development progress
====================
First, a proposal has been created as a Github issue. Then it was moved to the
wiki as a design document. You can read it here: :wiki:`Offline-engines`.
In this development step, searx core was prepared to accept and perform offline
searches. Offline search requests are scheduled together with regular offline
requests.
As offline searches can return arbitrary results depending on the engine, the
current result templates were insufficient to present such results. Thus, a new
template is introduced which is caplable of presenting arbitrary key value pairs
as a table. You can check out the pull request for more details see
:pull:`1700`.
Next steps
==========
Today, it is possible to create/run an offline engine. However, it is going to be publicly available for everyone who knows the searx instance. So the next step is to introduce token based access for engines. This way administrators are able to limit the access to private engines.
Acknowledgement
===============
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
.. _Search and Discovery Fund: https://nlnet.nl/discovery
.. _NLnet Foundation: https://nlnet.nl/
| Happy hacking.
| kvch // 2019.10.21 17:03
============================
Introducing Python 3 support
============================
.. _Python 2.7 clock: https://pythonclock.org/
.. sidebar:: Python 2.7 to 3 upgrade
This chapter exists of historical reasons. Python 2.7 release schedule ends
(`Python 2.7 clock`_) after 11 years Python 3 exists
As most operation systems are coming with Python3 installed by default. So it is
time for searx to support Python3. But don't worry support of Python2.7 won't be
dropped.
.. image:: searxpy3.png
:scale: 50 %
:alt: hurray
:align: center
How to run searx using Python 3
===============================
Please make sure that you run at least Python 3.5.
To run searx, first a Python3 virtualenv should be created. After entering the
virtualenv, dependencies must be installed. Then run searx with python3 instead
of the usual python command.
.. code:: sh
virtualenv -p python3 venv3
source venv3/bin/activate
pip3 install -r requirements.txt
python3 searx/webapp.py
If you want to run searx using Python2.7, you don't have to do anything
differently as before.
Fun facts
=========
- 115 files were changed when implementing the support for both Python versions.
- All of the dependencies was compatible except for the robotframework used for
browser tests. Thus, these tests were migrated to splinter. So from now on
both versions are being tested on Travis and can be tested locally.
If you found bugs
=================
Please open an issue on `GitHub`_. Make sure that you mention your Python
version in your issue, so we can investigate it properly.
.. _GitHub: https://github.com/asciimoo/searx/issues
Acknowledgment
==============
This development was sponsored by `NLnet Foundation`_.
.. _NLnet Foundation: https://nlnet.nl/
| Happy hacking.
| kvch // 2017.05.13 22:57
# -*- coding: utf-8 -*-
import sys, os
from searx.version import VERSION_STRING
from pallets_sphinx_themes import ProjectLink
GIT_URL = os.environ.get("GIT_URL", "https://github.com/asciimoo/searx")
SEARX_URL = os.environ.get("SEARX_URL", "https://searx.me")
DOCS_URL = os.environ.get("DOCS_URL", "https://asciimoo.github.io/searx/")
# Project --------------------------------------------------------------
project = u'searx'
copyright = u'2015-2019, Adam Tauber, Noémi Ványi'
author = u'Adam Tauber'
release, version = VERSION_STRING, VERSION_STRING
# General --------------------------------------------------------------
master_doc = "index"
source_suffix = '.rst'
# usage:: lorem :patch:`f373169` ipsum
extlinks = {}
# upstream links
extlinks['wiki'] = ('https://github.com/asciimoo/searx/wiki/%s', ' ')
extlinks['pull'] = ('https://github.com/asciimoo/searx/pull/%s', 'PR ')
# links to custom brand
extlinks['origin'] = (GIT_URL + '/blob/master/%s', 'git://')
extlinks['patch'] = (GIT_URL + '/commit/%s', '#')
extlinks['search'] = (SEARX_URL + '/%s', '#')
extlinks['docs'] = (DOCS_URL + '/%s', 'docs: ')
extensions = [
'sphinx.ext.extlinks',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"pallets_sphinx_themes",
"sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
]
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
# "flask": ("https://flask.palletsprojects.com/", None),
# "werkzeug": ("https://werkzeug.palletsprojects.com/", None),
# "jinja": ("https://jinja.palletsprojects.com/", None),
}
issues_github_path = "asciimoo/searx"
# HTML -----------------------------------------------------------------
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = "searx"
html_theme_options = {"index_sidebar_logo": True}
html_context = {
"project_links": [
ProjectLink("Source", GIT_URL),
ProjectLink("Wiki", "https://github.com/asciimoo/searx/wiki"),
ProjectLink("Public instances", "https://github.com/asciimoo/searx/wiki/Searx-instances"),
ProjectLink("Twitter", "https://twitter.com/Searx_engine"),
]
}
html_sidebars = {
"**": ["project.html", "relations.html", "searchbox.html"],
}
singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
html_static_path = ["static"]
html_logo = "static/img/searx_logo_small.png"
html_title = "Searx Documentation ({})".format("Searx-{}.tex".format(VERSION_STRING))
html_show_sourcelink = False
# LaTeX ----------------------------------------------------------------
latex_documents = [
(master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
]
=================
How to contribute
=================
Prime directives: Privacy, Hackability
======================================
Searx has two prime directives, **privacy-by-design and hackability** . The
hackability comes in three levels:
- support of search engines
- plugins to alter search behaviour
- hacking searx itself
Note the lack of "world domination" among the directives. Searx has no
intention of wide mass-adoption, rounded corners, etc. The prime directive
"privacy" deserves a separate chapter, as it's quite uncommon unfortunately.
Privacy-by-design
-----------------
Searx was born out of the need for a **privacy-respecting** search tool which
can be extended easily to maximize both, its search and its privacy protecting
capabilities.
A few widely used features work differently or turned off by default or not
implemented at all **as a consequence of privacy-by-design**.
If a feature reduces the privacy preserving aspects of searx, it should be
switched off by default or should not implemented at all. There are plenty of
search engines already providing such features. If a feature reduces the
protection of searx, users must be informed about the effect of choosing to
enable it. Features that protect privacy but differ from the expectations of
the user should also be explained.
Also, if you think that something works weird with searx, it's might be because
of the tool you use is designed in a way to interfere with the privacy respect.
Submitting a bugreport to the vendor of the tool that misbehaves might be a good
feedback to reconsider the disrespect to its customers (e.g. ``GET`` vs ``POST``
requests in various browsers).
Remember the other prime directive of searx is to be hackable, so if the above
privacy concerns do not fancy you, simply fork it.
*Happy hacking.*
Code
====
.. _PEP8: https://www.python.org/dev/peps/pep-0008/
In order to submit a patch, please follow the steps below:
- Follow coding conventions.
- PEP8_ standards apply, except the convention of line length
- Maximum line length is 120 characters
- Check if your code breaks existing tests. If so, update the tests or fix your
code.
- If your code can be unit-tested, add unit tests.
- Add yourself to the :origin:`AUTHORS.rst` file.
- Create a pull request.
For more help on getting started with searx development, see :ref:`devquickstart`.
Translation
===========
Translation currently takes place on :ref:`transifex <translation>`.
.. caution::
Please, do not update translation files in the repo.
Documentation
=============
.. _Sphinx: http://www.sphinx-doc.org
.. _reST: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
.. sidebar:: The reST sources
has been moved from ``gh-branch`` into ``master`` (:origin:`docs`).
The documentation is built using Sphinx_. So in order to be able to generate
the required files, you have to install it on your system. Much easier, use
Makefile our targets.
Here is an example which makes a complete rebuild:
.. code:: sh
$ make docs-clean docs
...
The HTML pages are in dist/docs.
live build
----------
.. sidebar:: docs-clean
It is recommended to assert a complete rebuild before deploying (use
``docs-clean``).
Live build is like WYSIWYG, If you want to edit the documentation, its
recommended to use. The Makefile target ``docs-live`` builds the docs, opens URL
in your favorite browser and rebuilds every time a reST file has been changed.
.. code:: sh
$ make docs-live
...
The HTML pages are in dist/docs.
... Serving on http://0.0.0.0:8080
... Start watching changes
deploy on github.io
-------------------
To deploy documentation at :docs:`github.io <.>` use Makefile target
``gh-pages``, which will builds the documentation, clones searx into a sub
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
needed git add, commit and push:
.. code:: sh
$ make docs-clean gh-pages
...
SPHINX docs --> file://<...>/dist/docs
The HTML pages are in dist/docs.
...
Cloning into 'gh-pages' ...
...
cd gh-pages; git checkout gh-pages >/dev/null
Switched to a new branch 'gh-pages'
...
doc available at --> https://asciimoo.github.io/searx
This diff is collapsed.
=======================
Developer documentation
=======================
.. toctree::
:maxdepth: 1
quickstart
contribution_guide
engine_overview
search_api
plugins
translation
=======
Plugins
=======
Plugins can extend or replace functionality of various components of searx.
Example plugin
==============
.. code:: python
name = 'Example plugin'
description = 'This plugin extends the suggestions with the word "example"'
default_on = False # disabled by default
js_dependencies = tuple() # optional, list of static js files
css_dependencies = tuple() # optional, list of static css files
# attach callback to the post search hook
# request: flask request object
# ctx: the whole local context of the post search hook
def post_search(request, ctx):
ctx['search'].suggestions.add('example')
return True
Plugin entry points
===================
Entry points (hooks) define when a plugin runs. Right now only three hooks are
implemented. So feel free to implement a hook if it fits the behaviour of your
plugin.
Pre search hook
---------------
Runs BEFORE the search request. Function to implement: ``pre_search``
Post search hook
----------------
Runs AFTER the search request. Function to implement: ``post_search``
Result hook
-----------
Runs when a new result is added to the result list. Function to implement:
``on_result``
.. _devquickstart:
======================
Development Quickstart
======================
This quickstart guide gets your environment set up with searx. Furthermore, it
gives a short introduction to the ``manage.sh`` script.
How to setup your development environment
=========================================
First, clone the source code of searx to the desired folder. In this case the
source is cloned to ``~/myprojects/searx``. Then create and activate the
searx-ve virtualenv and install the required packages using manage.sh.
.. code:: sh
cd ~/myprojects
git clone https://github.com/asciimoo/searx.git
cd searx
virtualenv searx-ve
. ./searx-ve/bin/activate
./manage.sh update_dev_packages
How to run tests
================
Tests can be run using the ``manage.sh`` script. Following tests and checks are
available:
- Unit tests
- Selenium tests
- PEP8 validation
- Unit test coverage check
For example unit tests are run with the command below:
.. code:: sh
./manage.sh unit_tests
For further test options, please consult the help of the ``manage.sh`` script.
How to compile styles and javascript
====================================
.. _less: http://lesscss.org/
.. _NodeJS: https://nodejs.org
How to build styles
-------------------
Less_ is required to build the styles of searx. Less_ can be installed using
either NodeJS_ or Apt.
.. code:: sh
sudo -H apt-get install nodejs
sudo -H npm install -g less
OR
.. code:: sh
sudo -H apt-get install node-less
After satisfying the requirements styles can be build using ``manage.sh``
.. code:: sh
./manage.sh styles
How to build the source of the oscar theme
==========================================
.. _grunt: https://gruntjs.com/
Grunt_ must be installed in order to build the javascript sources. It depends on
NodeJS, so first Node has to be installed.
.. code:: sh
sudo -H apt-get install nodejs
sudo -H npm install -g grunt-cli
After installing grunt, the files can be built using the following command:
.. code:: sh
./manage.sh grunt_build
Tips for debugging/development
==============================
Turn on debug logging
Whether you are working on a new engine or trying to eliminate a bug, it is
always a good idea to turn on debug logging. When debug logging is enabled a
stack trace appears, instead of the cryptic ``Internal Server Error``
message. It can be turned on by setting ``debug: False`` to ``debug: True`` in
:origin:`settings.yml <searx/settings.yml>`.
Run ``./manage.sh tests`` before creating a PR.
Failing build on Travis is common because of PEP8 checks. So a new commit
must be created containing these format fixes. This phase can be skipped if
``./manage.sh tests`` is run locally before creating a PR.
==========
Search API
==========
The search supports both ``GET`` and ``POST``.
Furthermore, two enpoints ``/`` and ``/search`` are available for querying.
``GET /``
``GET /search``
Parameters
==========
``q`` : required
The search query. This string is passed to external search services. Thus,
searx supports syntax of each search service. For example, ``site:github.com
searx`` is a valid query for Google. However, if simply the query above is
passed to any search engine which does not filter its results based on this
syntax, you might not get the results you wanted.
See more at :ref:`search-syntax`
``categories`` : optional
Comma separated list, specifies the active search categories
``engines``: optional
Comma separated list, specifies the active search engines.
``lang``: default ``all``
Code of the language.
``pageno``: default ``1``
Search page number.
``time_range``: optional
[ ``day``, ``month``, ``year`` ]
Time range of search for engines which support it. See if an engine supports
time range search in the preferences page of an instance.
``format``: optional
[ ``json``, ``csv``, ``rss`` ]
Output format of results.
``results_on_new_tab``: default ``0``
[ ``0``, ``1`` ]
Open search results on new tab.
``image_proxy``: default ``False``
[ ``True``, ``False`` ]
Proxy image results through searx.
``autocomplete``: default *empty*
[ ``google``, ``dbpedia``, ``duckduckgo``, ``startpage``, ``wikipedia`` ]
Service which completes words as you type.
``safesearch``: default ``None``
[ ``0``, ``1``, ``None`` ]
Filter search results of engines which support safe search. See if an engine
supports safe search in the preferences page of an instance.
``theme``: default ``oscar``
[ ``oscar``, ``simple``, ``legacy``, ``pix-art``, ``courgette`` ]
Theme of instance.
Please note, available themes depend on an instance. It is possible that an
instance administrator deleted, created or renamed themes on his/her instance.
See the available options in the preferences page of the instance.
``oscar-style``: default ``logicodev``
[ ``pointhi``, ``logicodev`` ]
Style of Oscar theme. It is only parsed if the theme of an instance is
``oscar``.
Please note, available styles depend on an instance. It is possible that an
instance administrator deleted, created or renamed styles on his/her
instance. See the available options in the preferences page of the instance.
``enabled_plugins``: optional
List of enabled plugins.
:default: ``HTTPS_rewrite``, ``Self_Informations``,
``Search_on_category_select``, ``Tracker_URL_remover``
:values: [ ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
``Search_on_category_select`` ]
``disabled_plugins``: optional
List of disabled plugins.
:default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``
:values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
``Search_on_category_select``
``enabled_engines``: optional : *all* :origin:`engines <searx/engines>`
List of enabled engines.
``disabled_engines``: optional : *all* :origin:`engines <searx/engines>`
List of disabled engines.
.. _translation:
===========
Translation
===========
.. _searx@transifex: https://www.transifex.com/asciimoo/searx/
Translation currently takes place on `searx@transifex`_
Requirements
============
* Transifex account
* Installed CLI tool of Transifex
Init Transifex project
======================
After installing ``transifex`` using pip, run the following command to
initialize the project.
.. code:: sh
tx init # Transifex instance: https://www.transifex.com/asciimoo/searx/
After ``$HOME/.transifexrc`` is created, get a Transifex API key and insert it
into the configuration file.
Create a configuration file for ``tx`` named ``$HOME/.tx/config``.
.. code:: ini
[main]
host = https://www.transifex.com
[searx.messagespo]
file_filter = searx/translations/<lang>/LC_MESSAGES/messages.po
source_file = messages.pot
source_lang = en
type = PO
Then run ``tx set``:
.. code:: shell
tx set --auto-local -r searx.messagespo 'searx/translations/<lang>/LC_MESSAGES/messages.po' \
--source-lang en --type PO --source-file messages.pot --execute
Update translations
===================
To retrieve the latest translations, pull it from Transifex.
.. code:: sh
tx pull -a
Then check the new languages. If strings translated are not enough, delete those
folders, because those should not be compiled. Call the command below to compile
the ``.po`` files.
.. code:: shell
pybabel compile -d searx/translations
After the compilation is finished commit the ``.po`` and ``.mo`` files and
create a PR.
================
Welcome to searx
================
Search without being tracked.
.. sidebar:: Features
- Self hosted
- No user tracking
- No user profiling
- About 70 supported search engines
- Easy integration with any search engine
- Cookies are not used by default
- Secure, encrypted connections (HTTPS/SSL)
- Hosted by organizations, such as *La Quadrature du Net*, which promote
digital rights
Searx is a free internet metasearch engine which aggregates results from more
than 70 search services. Users are neither tracked nor profiled. Additionally,
searx can be used over Tor for online anonymity.
Get started with searx by using one of the :wiki:`Searx-instances`. If you
don't trust anyone, you can set up your own, see :ref:`installation`.
.. toctree::
:maxdepth: 2
user/index
admin/index
dev/index
blog/index
==================
User documentation
==================
.. toctree::
:maxdepth: 1
search_syntax
own-instance
===========================
Why use a private instance?
===========================
"Is it worth to run my own instance?" is a common question among searx users.
Before answering this question, see what options a searx user has.
Public instances are open to everyone who has access to its URL. Usually, these
are operated by unknown parties (from the users' point of view). Private
instances can be used by a select group of people. It is for example a searx of
group of friends or a company which can be accessed through VPN. Also it can be
single user one which runs on the user's laptop.
To gain more insight on how these instances work let's dive into how searx
protects its users.
How does searx protect privacy?
===============================
Searx protects the privacy of its users in multiple ways regardless of the type
of the instance (private, public). Removal of private data from search requests
comes in three forms:
1. removal of private data from requests going to search services
2. not forwarding anything from a third party services through search services
(e.g. advertisement)
3. removal of private data from requests going to the result pages
Removing private data means not sending cookies to external search engines and
generating a random browser profile for every request. Thus, it does not matter
if a public or private instance handles the request, because it is anonymized in
both cases. IP addresses will be the IP of the instance. But searx can be
configured to use proxy or Tor. `Result proxy
<https://github.com/asciimoo/morty>`__ is supported, too.
Searx does not serve ads or tracking content unlike most search services. So
private data is not forwarded to third parties who might monetize it. Besides
protecting users from search services, both referring page and search query are
hidden from visited result pages.
What are the consequences of using public instances?
----------------------------------------------------
If someone uses a public instance, he/she has to trust the administrator of that
instance. This means that the user of the public instance does not know whether
his/her requests are logged, aggregated and sent or sold to a third party.
Also, public instances without proper protection are more vulnerable to abusing
the search service, In this case the external service in exchange returns
CAPTCHAs or bans the IP of the instance. Thus, search requests return less
results.
I see. What about private instances?
------------------------------------
If users run their own instances, everything is in their control: the source
code, logging settings and private data. Unknown instance administrators do not
have to be trusted.
Furthermore, as the default settings of their instance is editable, there is no
need to use cookies to tailor searx to their needs. So preferences will not be
reset to defaults when clearing browser cookies. As settings are stored on
their computer, it will not be accessible to others as long as their computer is
not compromised.
Conclusion
==========
Always use an instance which is operated by people you trust. The privacy
features of searx are available to users no matter what kind of instance they
use.
If someone is on the go or just wants to try searx for the first time public
instances are the best choices. Additionally, public instance are making a
world a better place, because those who cannot or do not want to run an
instance, have access to a privacy respecting search service.
.. _search-syntax:
=============
Search syntax
=============
Searx allows you to modify the default categories, engines and search language
via the search query.
Prefix ``!``
to set Category/engine
Prefix: ``:``
to set language
Prefix: ``?``
to add engines and categories to the currently selected categories
Abbrevations of the engines and languages are also accepted. Engine/category
modifiers are chainable and inclusive (e.g. with :search:`!it !ddg !wp qwer
<?q=%21it%20%21ddg%20%21wp%20qwer>` search in IT category **and** duckduckgo
**and** wikipedia for ``qwer``).
See the :search:`/preferences page <preferences>` for the list of engines,
categories and languages.
Examples
========
Search in wikipedia for ``qwer``:
- :search:`!wp qwer <?q=%21wp%20qwer>` or
- :search:`!wikipedia qwer :search:<?q=%21wikipedia%20qwer>`
Image search:
- :search:`!images Cthulhu <?q=%21images%20Cthulhu>`
Custom language in wikipedia:
- :search:`:hu !wp hackerspace <?q=%3Ahu%20%21wp%20hackerspace>`
pallets-sphinx-themes
Sphinx
sphinx-issues
mock==2.0.0
nose2[coverage_plugin]
cov-core==1.15.0
......
......@@ -155,7 +155,7 @@ quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
if [ ! -d "./$(PY_ENV)" ];then \
$(VIRTUALENV) $(VIRTUALENV_VERBOSE) $(VTENV_OPTS) $2; \
else \
echo " PYENV using virtualenv from $2"; \
echo "PYENV using virtualenv from $2"; \
fi
# $2 path to lint
......@@ -263,7 +263,7 @@ pydebug: $(PY_ENV)
# install / uninstall python objects into virtualenv (PYENV)
pyenv-install: $(PY_ENV)
@$(PY_ENV_BIN)/pip $(PIP_VERBOSE) install -e .
@echo " ACTIVATE $(call normpath,$(PY_ENV_ACT)) "
@echo "ACTIVATE $(call normpath,$(PY_ENV_ACT)) "
pyenv-uninstall: $(PY_ENV)
@$(PY_ENV_BIN)/pip $(PIP_VERBOSE) uninstall --yes .
......
# -*- coding: utf-8; mode: makefile-gmake -*-
# You can set these variables from the command line.
SPHINXOPTS ?=
SPHINXBUILD ?= $(PY_ENV_BIN)/sphinx-build
SPHINX_CONF ?= conf.py
DOCS_FOLDER ?= docs
DOCS_BUILD ?= build/docs
DOCS_DIST ?= dist/docs
GH_PAGES ?= gh-pages
BOOKS_FOLDER ?= docs
BOOKS_DIST ?= dist/books
ifeq ($(KBUILD_VERBOSE),1)
SPHINX_VERBOSE = "-v"
else
SPHINX_VERBOSE =
endif
## SPHINXVERS variable
## ===================
##
## .. _requirement-specifiers: https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers
##
## Sphinx version to use, when building documentation. Set this when calling
## build target. The default value is empty (install latest), to select a
## specific version use a requirement-specifiers_. E.g. to build your target
## 'doc' with a select sphinx-doc_ version 1.7.9::
##
## make SPHINXVERS='==1.7.9' docs
##
## To build with latest 1.7::
##
## make SPHINXVERS='>=1.7,<1.8' docs
##
SPHINXVERS ?=
docs-help:
@echo 'makefile.sphinx:'
@echo ' docs-clean - clean intermediate doc objects'
@echo ' $(GH_PAGES) - create & upload github pages'
@echo ' sphinx-pdf - run sphinx latex & pdf targets'
echo ''
@echo ' books/{name}.html : build only the HTML of document {name}'
@echo ' valid values for books/{name}.html are:'
@echo ' $(BOOKS_HTML)' | $(FMT)
@echo ' books/{name}.pdf : build only the PDF of document {name}'
@echo ' valid values for books/{name}.pdf are:'
@echo ' $(BOOKS_PDF) ' | $(FMT)
# ------------------------------------------------------------------------------
# requirements
# ------------------------------------------------------------------------------
sphinx-doc: $(PY_ENV)
@echo "PYENV installing Sphinx$(SPHINXVERS)"
$(Q)$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) 'Sphinx$(SPHINXVERS)'
sphinx-live: $(PY_ENV)
@echo "PYENV installing Sphinx$(SPHINXVERS)"
$(Q)$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) 'Sphinx$(SPHINXVERS)' sphinx-autobuild
PHONY += msg-texlive texlive
ifeq ($(shell which xelatex >/dev/null 2>&1; echo $$?), 1)
texlive: msg-TeXLive
$(error The 'xelatex' command was not found)
else
texlive:
@:
endif
msg-texlive:
$(Q)echo "\n\
The TeX/PDF output and the *math* extension require TexLive and latexmk:\n\n\
Make sure you have a updated TeXLive with XeTeX engine installed, grab it\n\
it from https://www.tug.org/texlive or install it from your package manager.\n\n\
Install latexmk from your package manager or visit https://ctan.org/pkg/latexmk\n\n\
Sphinx-doc produce (Xe)LaTeX files which might use additional TeX-packages\n\
and fonts. To process these LaTeX files, a TexLive installation with the\n\
additional packages is required. On debian based OS these requirements\n\
are installed by::\n\n\
sudo -H apt-get install\n\
latexmk\n\
texlive-base texlive-xetex texlive-latex-recommended\n\
texlive-extra-utils dvipng ttf-dejavu\n"
# ------------------------------------------------------------------------------
# commands
# ------------------------------------------------------------------------------
# $2 sphinx builder e.g. "html"
# $3 path where configuration file (conf.py) is located
# $4 sourcedir
# $5 dest subfolder e.g. "man" for man pages at $(DOCS_DIST)/man
quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
cmd_sphinx = SPHINX_CONF=$(abspath $4/$(SPHINX_CONF))\
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
quiet_cmd_sphinx_autobuild = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
cmd_sphinx_autobuild = PATH="$(PY_ENV_BIN):$(PATH)" $(PY_ENV_BIN)/sphinx-autobuild $(SPHINX_VERBOSE) --poll -B --host 0.0.0.0 --port 8080 $(SPHINXOPTS)\
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
quiet_cmd_sphinx_clean = CLEAN $@
cmd_sphinx_clean = rm -rf $(DOCS_BUILD) $(DOCS_DIST) $(GH_PAGES)/* $(GH_PAGES)/.buildinfo
# ------------------------------------------------------------------------------
# targets
# ------------------------------------------------------------------------------
# build PDF of whole documentation in: $(DOCS_DIST)/pdf
PHONY += sphinx-pdf
sphinx-pdf: sphinx-latex
$(Q)cd $(DOCS_BUILD)/latex/; make all-pdf
$(Q)mkdir -p $(DOCS_DIST)/pdf
$(Q)cp $(DOCS_BUILD)/latex/*.pdf $(DOCS_DIST)/pdf
@echo "SPHINX *.pdf --> file://$(abspath $(DOCS_DIST)/pdf)"
PHONY += sphinx-latex
sphinx-latex: texlive sphinx-doc
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b latex \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/.doctrees \
$(DOCS_FOLDER) \
$(DOCS_BUILD)/latex
# Sphinx projects, we call them *books* (what is more common). Books are
# folders under $(BOOKS_FOLDER) containing a conf.py file. The HTML output goes
# to folder $(BOOKS_DIST)/<name> while PDF is placed (BOOKS_DIST)/<name>/pdf
BOOKS=$(patsubst $(BOOKS_FOLDER)/%/conf.py,books/%,$(wildcard $(BOOKS_FOLDER)/*/conf.py))
# fine grained targets
BOOKS_HTML = $(patsubst %,%.html,$(BOOKS))
BOOKS_CLEAN = $(patsubst %,%.clean,$(BOOKS))
BOOKS_LATEX = $(patsubst %,%.latex,$(BOOKS))
BOOKS_PDF = $(patsubst %,%.pdf,$(BOOKS))
BOOKS_LIVE = $(patsubst %,%.live,$(BOOKS))
$(BOOKS_DIST):
mkdir -p $(BOOKS_DIST)
PHONY += $(BOOKS_HTML)
$(BOOKS_HTML): sphinx-doc | $(BOOKS_DIST)
SPHINX_CONF=$(patsubst books/%.html,%,$@)/conf.py \
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b html \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.html,%,$@)/.doctrees \
$(patsubst books/%.html,%,$@) \
$(BOOKS_DIST)/$(patsubst books/%.html,%,$@)
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.html,%,$@))"
PHONY += $(BOOKS_HTML)
$(BOOKS_LIVE): sphinx-live | $(BOOKS_DIST)
PATH="$(PY_ENV_BIN):$(PATH)" \
SPHINX_CONF=$(patsubst books/%.live,%,$@)/conf.py \
$(PY_ENV_BIN)/sphinx-autobuild --poll -B --host 0.0.0.0 --port 8080 $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b html \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.live,%,$@)/.doctrees \
$(patsubst books/%.live,%,$@) \
$(BOOKS_DIST)/$(patsubst books/%.live,%,$@)
$(BOOKS_PDF): %.pdf : %.latex
$(Q)cd $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@); make all-pdf
$(Q)mkdir -p $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
$(Q)cp -v $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@)/*.pdf $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@))/pdf"
PHONY += $(BOOKS_LATEX)
$(BOOKS_LATEX): sphinx-doc | $(BOOKS_DIST)
SPHINX_CONF=$(patsubst books/%.latex,%,$@)/conf.py \
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b latex \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.latex,%,$@)/.doctrees \
$(patsubst books/%.latex,%,$@) \
$(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@)
@echo "SPHINX $@ --> file://$(abspath $(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@))"
$(BOOKS_CLEAN):
$(Q)rm -rf $(BOOKS_DIST)/$(patsubst books/%.clean,%,$@) \
$(DOCS_BUILD)/books/$(patsubst books/%.clean,%,$@) \
$(DOCS_BUILD)/latex/$(patsubst books/%.clean,%,$@)
# github pages
PHONY += $(GH_PAGES)
$(GH_PAGES)::
$(MAKE) docs
[ -d "gh-pages/.git" ] || git clone $(GIT_URL) gh-pages
-cd $(GH_PAGES); git checkout gh-pages >/dev/null
-cd $(GH_PAGES); ls -A | grep -v '.git$$' | xargs rm -rf
cp -r $(DOCS_DIST)/* $(GH_PAGES)/
touch $(GH_PAGES)/.nojekyll
echo "<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>" > $(GH_PAGES)/404.html
cd $(GH_PAGES);\
git add --all . ;\
git commit -m "gh-pages: updated" ;\
git push origin gh-pages
PHONY += docs-clean
docs-clean: $(BOOKS_CLEAN)
$(call cmd,sphinx_clean)
.PHONY: $(PHONY)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment