Skip to content

Commit

Permalink
fixed cli upload issue
Browse files Browse the repository at this point in the history
  • Loading branch information
tikazyq committed Jun 5, 2022
1 parent 94f5a02 commit 1a8f821
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 23 deletions.
19 changes: 12 additions & 7 deletions _ext/python/cli/actions/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from print_color import print as print_color

from cli.client.request import http_put, http_post
from cli.constants import CLI_DEFAULT_UPLOAD_IGNORE_PATTERNS, CLI_DEFAULT_UPLOAD_SPIDER_MODE
from cli.constants import CLI_DEFAULT_UPLOAD_IGNORE_PATTERNS, CLI_DEFAULT_UPLOAD_SPIDER_MODE, \
CLI_DEFAULT_UPLOAD_SPIDER_CMD
from cli.errors import MissingIdException, HttpException
from crawlab.config.spider import get_spider_config

Expand Down Expand Up @@ -76,25 +77,29 @@ def upload(args: Namespace):
print_color(f'failed: {stats["error"]}', tag='info', tag_color='cyan', color='white')


def create_spider(name: str, description: str = None, col_name: str = None, mode: str = None, cmd: str = None,
param: str = None, priority: int = None) -> str:
def create_spider(name: str, description: str = None, mode: str = None, priority: int = None, cmd: str = None,
param: str = None, col_name: str = None) -> str:
# results collection name
if col_name is None:
col_name = f'results_{name}'
col_name = f'results_{"_".join(name.lower().split(" "))}'

# mode
if mode is None:
mode = CLI_DEFAULT_UPLOAD_SPIDER_MODE

# cmd
if cmd is None:
cmd = CLI_DEFAULT_UPLOAD_SPIDER_CMD

# http put
res = http_put(url='/spiders', data={
'name': name,
'description': description,
'mode': mode,
'col_name': col_name,
'priority': priority,
'cmd': cmd,
'param': param,
'priority': priority,
'description': description,
'col_name': col_name,
})

return res.json().get('data').get('_id')
Expand Down
1 change: 1 addition & 0 deletions _ext/python/cli/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,4 @@
r'\.idea/'
]
CLI_DEFAULT_UPLOAD_SPIDER_MODE = 'random'
CLI_DEFAULT_UPLOAD_SPIDER_CMD = 'echo "hello crawlab"'
18 changes: 13 additions & 5 deletions _ext/python/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,23 @@
default=None, type=str)
upload_parser.add_argument('--create', '-c', help='Whether to create a new spider. Default: false', action='store_true',
default=False)
upload_parser.add_argument('--name', '-n', help='Spider name if creating a new spider. Default: directory name',
type=str)
upload_parser.add_argument('--id', '-i', help='Spider ID if uploading to an existing spider.',
type=str)
upload_parser.add_argument('--name', '-n', help='Spider name if creating a new spider. Default: directory name',
type=str)
upload_parser.add_argument('--description', '-D',
help='Spider description if creating a new spider. Default: spider name', type=str)
upload_parser.add_argument('--mode', '-M',
help='Default spider running task mode. Default: random', type=str, default='random')
upload_parser.add_argument('--priority', '-p',
help='Default spider running task priority. Default: 5', type=int, default=5)
upload_parser.add_argument('--cmd', '-m',
help='Spider execute command if creating a new spider')
upload_parser.add_argument('--param', '-P',
help='Spider execute params if creating a new spider')
upload_parser.add_argument('--col_name', '-C',
help='Spider results collection name if creating a new spider. Default: results_<spider_name>',
type=str)
upload_parser.add_argument('--cmd', '-m',
help='Spider execute command if creating a new spider. Default: echo "hello crawlab"',
default='echo "hello crawlab"')
upload_parser.set_defaults(func=upload, action=CLI_ACTION_UPLOAD)

# config parser
Expand All @@ -55,6 +62,7 @@ def main():
try:
args.func(args)
except Exception as e:
print(e)
if getattr(args, 'action') == CLI_ACTION_LOGIN:
login_parser.print_help()
elif getattr(args, 'action') == CLI_ACTION_UPLOAD:
Expand Down
4 changes: 2 additions & 2 deletions _ext/python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
requests>=2.27
pathspec==0.8.0
grpcio==1.39.0
grpcio-tools==1.39.0
grpc-interceptor-headers==0.1.0
print-color==0.4.5
pyyaml==6.0
requests==2.27.1
pyyaml==6.0
13 changes: 4 additions & 9 deletions _ext/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
with open('README.md', 'r') as f:
long_description = f.read()

with open('requirements.txt', 'r') as f:
install_requires = f.read().split('\n')

setup(
name='crawlab-sdk',
version='0.6.0-1',
Expand All @@ -20,15 +23,7 @@
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
],
install_requires=[
'requests==2.22.0',
'prettytable==0.7.2',
'pathspec==0.8.0',
'grpcio==1.39.0',
'grpcio-tools==1.39.0',
'grpc-interceptor-headers==0.1.0',
'print-color==0.4.5',
],
install_requires=install_requires,
entry_points={
'console_scripts': [
'crawlab-cli=cli.main:main'
Expand Down

0 comments on commit 1a8f821

Please sign in to comment.