desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
''
| def reload(self, *args, **kwds):
| self.load(self.path)
|
'è¿é, äŒçŽæ¥ææä»¶äžäŒ å°åœåç®åœ(self.path).
ææŸäºä»¶å·²ç»è¢«å€ç, æä»¥äžäŒè§Šåself.app.windowçææŸåšäœ.'
| def do_drag_data_received(self, drag_context, x, y, data, info, time):
| if (not self.app.profile):
return
if (info == TargetInfo.URI_LIST):
uris = data.get_uris()
source_paths = util.uris_to_paths(uris)
if source_paths:
self.app.upload_page.upload_files(source_paths, self.path)
|
'æåŒsocket'
| def get_req(self, start_size, end_size):
| logger.debug(('DownloadBatch.get_req: %s, %s' % (start_size, end_size)))
opener = request.build_opener()
content_range = 'bytes={0}-{1}'.format(start_size, end_size)
opener.addheaders = [('Range', content_range), ('User-Agent', const.USER_AGENT), ('Referer', const.PAN_REFERER)]
for i in range(RETRIES):
try:
return opener.open(self.url, timeout=self.timeout)
except OSError:
logger.error(traceback.format_exc())
self.queue.put((self.id_, BATCH_ERROR), block=False)
return None
except:
self.queue.put((self.id_, BATCH_ERROR), block=False)
return None
else:
return None
|
''
| def destroy(self):
| self.pause()
|
'å®ç°äºThreadçæ¹æ³, 线çšå¯åšå
¥å£'
| def run(self):
| self.download()
|
''
| def pause(self):
| self.row[STATE_COL] = State.PAUSED
|
''
| def stop(self):
| self.row[STATE_COL] = State.CANCELED
|
'Try to connect to the given dbus services. If successful it will
return a callable dbus proxy and those arguments.'
| def _prepair(self):
| try:
sessionbus = dbus.SessionBus()
systembus = dbus.SystemBus()
except:
return (None, None)
for dbus_props in self.DBUS_SHUTDOWN.values():
try:
if (dbus_props['bus'] == SESSION_BUS):
bus = sessionbus
else:
bus = systembus
interface = bus.get_object(dbus_props['service'], dbus_props['objectPath'])
proxy = interface.get_dbus_method(dbus_props['method'], dbus_props['interface'])
return (proxy, dbus_props['arguments'])
except dbus.exceptions.DBusException:
continue
return (None, None)
|
'Call the dbus proxy to start the shutdown.'
| def shutdown(self):
| if self._proxy:
os.sync()
self._proxy(*self._args)
|
'å 䞺Gtk没æååšQtäžé£ä¹æ¹äŸ¿ç䜿çšSQLite, èå¿
é¡»å°æææ°æ®è¯»å
¥äžäžª
liststoreäžæè¡.'
| def init_db(self):
| cache_path = os.path.join(Config.CACHE_DIR, self.app.profile['username'])
if (not os.path.exists(cache_path)):
os.makedirs(cache_path, exist_ok=True)
db = os.path.join(cache_path, TASK_FILE)
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
sql = 'CREATE TABLE IF NOT EXISTS tasks (\n name CHAR NOT NULL,\n path CHAR NOT NULL,\n fsid CHAR NOT NULL,\n size INTEGER NOT NULL,\n currsize INTEGER NOT NULL,\n link CHAR,\n isdir INTEGER,\n savename CHAR NOT NULL,\n savedir CHAR NOT NULL,\n state INT NOT NULL,\n statename CHAR NOT NULL,\n humansize CHAR NOT NULL,\n percent INT NOT NULL,\n tooltip CHAR\n )\n '
self.cursor.execute(sql)
|
''
| def add_task_db(self, task):
| sql = 'INSERT INTO tasks VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?)'
req = self.cursor.execute(sql, task)
self.check_commit()
|
'仿°æ®åºäžæ¥è¯¢fsidçä¿¡æ¯.
åŠææ²¡æçè¯, å°±è¿åNone'
| def get_task_db(self, fs_id):
| sql = 'SELECT * FROM tasks WHERE fsid=?'
req = self.cursor.execute(sql, [fs_id])
if req:
return req.fetchone()
else:
None
|
'åœä¿®æ¹æ°æ®åºè¶
è¿100次å, å°±èªåšcommitæ°æ®.'
| def check_commit(self, force=False):
| self.commit_count = (self.commit_count + 1)
if (force or (self.commit_count >= 100)):
self.commit_count = 0
self.conn.commit()
|
''
| def update_task_db(self, row):
| sql = 'UPDATE tasks SET \n currsize=?, state=?, statename=?, humansize=?, percent=?\n WHERE fsid=?\n '
self.cursor.execute(sql, [row[CURRSIZE_COL], row[STATE_COL], row[STATENAME_COL], row[HUMANSIZE_COL], row[PERCENT_COL], row[FSID_COL]])
self.check_commit()
|
''
| def remove_task_db(self, fs_id):
| sql = 'DELETE FROM tasks WHERE fsid=?'
self.cursor.execute(sql, [fs_id])
self.check_commit()
|
'确讀åšListstoreäžæ¯åŠååšè¿æ¡ä»»å¡. åŠæååš, è¿åTreeModelRow,
åŠåå°±è¿åNone'
| def get_row_by_fsid(self, fs_id):
| for row in self.liststore:
if (row[FSID_COL] == fs_id):
return row
return None
|
''
| def add_tasks(self, pcs_files, dirname=''):
| def on_list_dir(info, error=None):
(path, pcs_files) = info
if (error or (not pcs_files)):
dialog = Gtk.MessageDialog(self.app.window, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, _('Failed to scan folder to download'))
dialog.format_secondary_text(_('Please download {0} again').format(path))
dialog.run()
dialog.destroy()
return
self.add_tasks(pcs_files, dirname)
self.check_first()
for pcs_file in pcs_files:
if pcs_file['isdir']:
gutil.async_call(pcs.list_dir_all, self.app.cookie, self.app.tokens, pcs_file['path'], callback=on_list_dir)
else:
self.add_task(pcs_file, dirname)
self.check_commit(force=True)
|
''
| def add_task(self, pcs_file, dirname=''):
| if pcs_file['isdir']:
return
fs_id = str(pcs_file['fs_id'])
row = self.get_row_by_fsid(fs_id)
if row:
self.app.toast(_('Task exists: {0}').format(pcs_file['server_filename']))
if (row[STATE_COL] == State.FINISHED):
self.launch_app(fs_id)
return
if (not dirname):
dirname = self.app.profile['save-dir']
save_dir = os.path.dirname(os.path.join(dirname, pcs_file['path'][1:]))
save_name = pcs_file['server_filename']
human_size = util.get_human_size(pcs_file['size'])[0]
tooltip = gutil.escape(_('From {0}\nTo {1}').format(pcs_file['path'], save_dir))
task = (pcs_file['server_filename'], pcs_file['path'], fs_id, pcs_file['size'], 0, '', pcs_file['isdir'], save_name, save_dir, State.WAITING, StateNames[State.WAITING], human_size, 0, tooltip)
self.liststore.append(task)
self.add_task_db(task)
self.scan_tasks()
|
''
| def scan_tasks(self, ignore_shutdown=False):
| for row in self.liststore:
if (len(self.workers.keys()) >= self.app.profile['concurr-download']):
break
if (row[STATE_COL] == State.WAITING):
self.start_worker(row)
if ((not self.shutdown_button.get_active()) or ignore_shutdown):
return
for row in self.liststore:
if (row[STATE_COL] not in (State.PAUSED, State.FINISHED, State.CANCELED)):
return
self.shutdown.shutdown()
|
'䞺taskæ°å»ºäžäžªåå°äžèœœçº¿çš, å¹¶åŒå§äžèœœ.'
| def start_worker(self, row):
| def on_worker_started(worker, fs_id):
pass
def on_worker_received(worker, fs_id, received, received_total):
GLib.idle_add(do_worker_received, fs_id, received, received_total)
def do_worker_received(fs_id, received, received_total):
self.download_speed_add(received)
row = None
if (fs_id in self.workers):
row = self.workers[fs_id][1]
else:
row = self.get_row_by_fsid(fs_id)
if (not row):
return
row[CURRSIZE_COL] = received_total
curr_size = util.get_human_size(row[CURRSIZE_COL], False)[0]
total_size = util.get_human_size(row[SIZE_COL])[0]
row[PERCENT_COL] = int(((row[CURRSIZE_COL] / row[SIZE_COL]) * 100))
row[HUMANSIZE_COL] = '{0} / {1}'.format(curr_size, total_size)
self.update_task_db(row)
def on_worker_downloaded(worker, fs_id):
GLib.idle_add(do_worker_downloaded, fs_id)
def do_worker_downloaded(fs_id):
row = None
if (fs_id in self.workers):
row = self.workers[fs_id][1]
else:
row = self.get_row_by_fsid(fs_id)
if (not row):
return
row[CURRSIZE_COL] = row[SIZE_COL]
row[STATE_COL] = State.FINISHED
row[PERCENT_COL] = 100
total_size = util.get_human_size(row[SIZE_COL])[0]
row[HUMANSIZE_COL] = '{0} / {1}'.format(total_size, total_size)
row[STATENAME_COL] = StateNames[State.FINISHED]
self.update_task_db(row)
self.check_commit(force=True)
self.workers.pop(row[FSID_COL], None)
self.app.toast(_('{0} downloaded'.format(row[NAME_COL])))
self.launch_app(fs_id)
self.scan_tasks()
def on_worker_network_error(worker, fs_id):
GLib.idle_add(do_worker_network_error, fs_id)
def do_worker_network_error(fs_id):
row = self.workers.get(fs_id, None)
if row:
row = row[1]
else:
row = self.get_row_by_fsid(fs_id)
if (not row):
return
row[STATE_COL] = State.ERROR
row[STATENAME_COL] = StateNames[State.ERROR]
self.update_task_db(row)
self.remove_worker(row[FSID_COL], stop=False)
if self.app.profile['retries-each']:
GLib.timeout_add((self.app.profile['retries-each'] * 60000), self.restart_task, row)
else:
self.app.toast(_('Error occurs will downloading {0}').format(row[NAME_COL]))
self.scan_tasks()
def do_worker_disk_error(fs_id, tmp_filepath):
self.app.toast(_('Disk Error: failed to read/write {0}').format(tmp_filepath))
def on_worker_disk_error(worker, fs_id, tmp_filepath):
GLib.idle_add(do_worker_disk_error, fs_id, tmp_filepath)
if ((not row) or (row[FSID_COL] in self.workers)):
return
row[STATE_COL] = State.DOWNLOADING
row[STATENAME_COL] = StateNames[State.DOWNLOADING]
worker = Downloader(self, row)
self.workers[row[FSID_COL]] = (worker, row)
worker.connect('started', on_worker_started)
worker.connect('received', on_worker_received)
worker.connect('downloaded', on_worker_downloaded)
worker.connect('network-error', on_worker_network_error)
worker.connect('disk-error', on_worker_disk_error)
worker.start()
|
'忢è¿äžªtaskçåå°äžèœœçº¿çš'
| def stop_worker(self, row):
| self.remove_worker(row[FSID_COL], stop=True)
|
''
| def restart_task(self, row):
| self.start_task(row)
|
'å°ä»»å¡ç¶æè®Ÿå®äžºDownloading, åŠææ²¡æè¶
è¿æå€§ä»»å¡æ°çè¯;
åŠåå°å®è®Ÿå®äžºWaiting.'
| def start_task(self, row, scan=True):
| if ((not row) or (row[STATE_COL] in RUNNING_STATES)):
return
row[STATE_COL] = State.WAITING
row[STATENAME_COL] = StateNames[State.WAITING]
self.update_task_db(row)
if scan:
self.scan_tasks()
|
''
| def pause_tasks(self):
| if self.first_run:
return
for row in self.liststore:
self.pause_task(row, scan=False)
|
'operator - å€çåœæ°'
| def operate_selected_rows(self, operator):
| (model, tree_paths) = self.selection.get_selected_rows()
if (not tree_paths):
return
fs_ids = []
for tree_path in tree_paths:
fs_ids.append(model[tree_path][FSID_COL])
for fs_id in fs_ids:
row = self.get_row_by_fsid(fs_id)
if (not row):
return
operator(row, scan=False)
self.check_commit(force=True)
self.scan_tasks(ignore_shutdown=True)
|
'Dump profile content to disk'
| def on_app_shutdown(self, app):
| if self.filewatcher:
self.filewatcher.stop()
if self.profile:
self.upload_page.on_destroy()
self.download_page.on_destroy()
|
''
| def on_main_window_drag_data_received(self, window, drag_context, x, y, data, info, time):
| if (not self.profile):
return
if (info == TargetInfo.URI_LIST):
uris = data.get_uris()
source_paths = util.uris_to_paths(uris)
if source_paths:
self.upload_page.upload_files(source_paths)
|
''
| def on_signout_action_activated(self, action, params):
| if self.profile:
self.upload_page.pause_tasks()
self.download_page.pause_tasks()
self.show_signin_dialog(auto_signin=False)
|
''
| def update_quota(self, quota_info, error=None):
| if ((not quota_info) or (quota_info['errno'] != 0)):
return
used = quota_info['used']
total = quota_info['total']
used_size = util.get_human_size(used)[0]
total_size = util.get_human_size(total)[0]
self.capicity_label.set_text('{0} / {1}'.format(used_size, total_size))
self.progressbar.set_fraction((used / total))
|
''
| def update_avatar(self):
| def do_update_avatar(info, error=None):
if (error or (not info)):
logger.error(('Failed to get user avatar: %s, %s' % (info, error)))
else:
(uk, uname, img_path) = info
self.img_avatar.set_from_file(img_path)
self.img_avatar.props.tooltip_text = '\n'.join([self.profile['username'], uname])
if (not self.profile['display-avatar']):
return
self.img_avatar.props.tooltip_text = ''
cache_path = Config.get_cache_path(self.profile['username'])
gutil.async_call(gutil.update_avatar, self.cookie, self.tokens, cache_path, callback=do_update_avatar)
|
'ææç页é¢éœåºè¯¥å®ç°reload()æ¹æ³.'
| def reload_current_page(self, *args, **kwds):
| index = self.notebook.get_current_page()
self.notebook.get_nth_page(index).reload()
|
''
| def update_clipboard(self, text):
| clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
clipboard.set_text(text, (-1))
self.toast(_('{0} copied to clipboard'.format(text)))
|
'å¯ä»¥äœ¿çšç³»ç»æäŸçNotificationå·¥å
·, ä¹å¯ä»¥åšçªå£çæäžæ¹æ»åšåŒ¹åº'
| def toast(self, text):
| if self.notify:
self.notify.update(Config.APPNAME, text, Config.NAME)
self.notify.show()
|
''
| def load(self, pcs_files):
| self.liststore.clear()
self.display_files(pcs_files)
|
''
| def load_next(self, pcs_files):
| self.display_files(pcs_files)
|
'æä»¶çpathéœè¢«æååºæ¥, ç¶åæŸå°äºäžäžªlistäž.'
| def display_files(self, pcs_files):
| tree_iters = []
for pcs_file in pcs_files:
path = pcs_file['path']
(pixbuf, type_) = self.app.mime.get(path, pcs_file['isdir'], icon_size=self.ICON_SIZE)
name = os.path.split(path)[NAME_COL]
tooltip = gutil.escape(name)
size = pcs_file.get('size', 0)
if pcs_file['isdir']:
human_size = '--'
else:
human_size = util.get_human_size(pcs_file['size'])[0]
mtime = pcs_file.get('server_mtime', 0)
human_mtime = time.ctime(mtime)
tree_iter = self.liststore.append([pixbuf, name, path, tooltip, size, human_size, pcs_file['isdir'], mtime, human_mtime, type_, json.dumps(pcs_file)])
tree_iters.append(tree_iter)
cache_path = Config.get_cache_path(self.app.profile['username'])
gutil.async_call(gutil.update_liststore_image, self.liststore, tree_iters, PIXBUF_COL, pcs_files, cache_path, self.ICON_SIZE)
|
'è·ååå§çpcsæä»¶ä¿¡æ¯'
| def get_pcs_file(self, tree_path):
| return json.loads(self.liststore[tree_path][PCS_FILE_COL])
|
''
| def on_drag_data_get(self, widget, context, data, info, time):
| tree_paths = self.iconview.get_selected_items()
if (not tree_paths):
return
filelist = []
for tree_path in tree_paths:
filelist.append({'path': self.liststore[tree_path][PATH_COL], 'newname': self.liststore[tree_path][NAME_COL]})
filelist_str = json.dumps(filelist)
if (info == TargetInfo.PLAIN_TEXT):
data.set_text(filelist_str, (-1))
elif (info == TargetInfo.URI_LIST):
data.set_uris([])
|
''
| def on_drag_data_received(self, widget, context, x, y, data, info, time):
| if (not data):
return
tree_path = self.iconview.get_path_at_pos(x, y)
if (tree_path is None):
return
target_path = self.liststore[tree_path][PATH_COL]
is_dir = self.liststore[tree_path][ISDIR_COL]
if ((not is_dir) or (info != TargetInfo.PLAIN_TEXT)):
return
filelist_str = data.get_text()
if (not filelist_str):
return
filelist = json.loads(filelist_str)
for file_item in filelist:
if (file_item['path'] == target_path):
self.app.toast(_('Error: Move folder to itself!'))
return
for file_item in filelist:
file_item['dest'] = target_path
gutil.async_call(pcs.move, self.app.cookie, self.app.tokens, filelist, callback=self.parent.reload)
|
''
| def launch_app(self, tree_path):
| file_type = self.liststore[tree_path][TYPE_COL]
app_infos = Gio.AppInfo.get_recommended_for_type(file_type)
if app_infos:
self.launch_app_with_app_info(app_infos[0])
else:
pass
|
'å建犻线äžèœœä»»å¡, äžèœœéäžçBTç§å.'
| def on_cloud_download_item_activated(self, menu_item):
| tree_paths = self.iconview.get_selected_items()
if (not tree_paths):
return
self.app.cloud_page.add_cloud_bt_task(self.liststore[tree_paths[0]][PATH_COL])
|
''
| def on_download_to_activated(self, menu_item):
| tree_paths = self.iconview.get_selected_items()
if (not tree_paths):
return
dialog = Gtk.FileChooserDialog(_('Save to...'), self.app.window, Gtk.FileChooserAction.SELECT_FOLDER, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
response = dialog.run()
if (response != Gtk.ResponseType.OK):
dialog.destroy()
return
dirname = dialog.get_filename()
dialog.destroy()
pcs_files = [self.get_pcs_file(p) for p in tree_paths]
self.app.blink_page(self.app.download_page)
self.app.download_page.add_tasks(pcs_files, dirname)
|
''
| def on_props_activated(self, menu_item):
| tree_paths = self.iconview.get_selected_items()
if (not tree_paths):
dialog = FolderPropertyDialog(self, self.app, self.parent.path)
dialog.run()
dialog.destroy()
else:
for tree_path in tree_paths:
pcs_file = self.get_pcs_file(tree_path)
dialog = PropertiesDialog(self.parent, self.app, pcs_file)
dialog.run()
dialog.destroy()
|
''
| def on_drag_data_received(self, widget, context, x, y, data, info, time):
| if (not data):
return
(bx, by) = self.iconview.convert_widget_to_bin_window_coords(x, y)
selected = Gtk.TreeView.get_path_at_pos(self.iconview, bx, by)
if (not selected):
return
tree_path = selected[0]
if (tree_path is None):
return
target_path = self.liststore[tree_path][PATH_COL]
is_dir = self.liststore[tree_path][ISDIR_COL]
if ((not is_dir) or (info != TargetInfo.PLAIN_TEXT)):
return
filelist_str = data.get_text()
filelist = json.loads(filelist_str)
for file_item in filelist:
if (file_item['path'] == target_path):
self.app.toast(_('Error: Move folder to itself!'))
return
for file_item in filelist:
file_item['dest'] = target_path
gutil.async_call(pcs.move, self.app.cookie, self.app.tokens, filelist, callback=self.parent.reload)
|
'åœä¿®æ¹æ°æ®åºè¶
è¿50次å, å°±èªåšcommitæ°æ®.'
| def check_commit(self, force=False):
| self.commit_count = (self.commit_count + 1)
if (force or (self.commit_count >= 50)):
self.commit_count = 0
self.conn.commit()
|
'åæ°æ®åºäžåå
¥äžäžªæ°çä»»å¡è®°åœ, å¹¶è¿åå®çfid'
| def add_task_db(self, task, force=True):
| sql = 'INSERT INTO upload (\n name, source_path, path, size, curr_size, state, state_name,\n human_size, percent, tooltip, threshold)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
req = self.cursor.execute(sql, task)
self.check_commit(force=force)
return req.lastrowid
|
''
| def add_slice_db(self, fid, slice_end, md5):
| sql = 'INSERT INTO slice VALUES(?, ?, ?)'
self.cursor.execute(sql, (fid, slice_end, md5))
self.check_commit()
|
'仿°æ®åºäžæ¥è¯¢source_pathçä¿¡æ¯.
åŠææ²¡æçè¯, å°±è¿åNone'
| def get_task_db(self, source_path):
| sql = 'SELECT * FROM upload WHERE source_path=?'
req = self.cursor.execute(sql, [source_path])
if req:
return req.fetchone()
else:
None
|
'仿°æ®åºäžååŸfidçææåç.
è¿åçæ¯äžäžªlist, é颿¯æé¡ºåºæå¥œçmd5çåŒ'
| def get_slice_db(self, fid):
| sql = 'SELECT md5 FROM slice WHERE fid=?'
req = self.cursor.execute(sql, [fid])
if req:
return [r[0] for r in req]
else:
return None
|
''
| def update_task_db(self, row, force=False):
| sql = 'UPDATE upload SET \n curr_size=?, state=?, state_name=?, human_size=?, percent=?\n WHERE fid=?\n '
self.cursor.execute(sql, [row[CURRSIZE_COL], row[STATE_COL], row[STATENAME_COL], row[HUMANSIZE_COL], row[PERCENT_COL], row[FID_COL]])
self.check_commit(force=force)
|
''
| def remove_task_db(self, fid, force=False):
| self.remove_slice_db(fid)
sql = 'DELETE FROM upload WHERE fid=?'
self.cursor.execute(sql, [fid])
self.check_commit(force=force)
|
''
| def remove_slice_db(self, fid):
| sql = 'DELETE FROM slice WHERE fid=?'
self.cursor.execute(sql, [fid])
self.check_commit()
|
''
| def add_file_task(self, dir_name=None):
| file_dialog = Gtk.FileChooserDialog(_('Choose Files..'), self.app.window, Gtk.FileChooserAction.OPEN, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
file_dialog.set_modal(True)
file_dialog.set_select_multiple(True)
file_dialog.set_default_response(Gtk.ResponseType.OK)
response = file_dialog.run()
if (response != Gtk.ResponseType.OK):
file_dialog.destroy()
return
source_paths = file_dialog.get_filenames()
file_dialog.destroy()
if source_paths:
self.upload_files(source_paths, dir_name)
|
''
| def add_folder_task(self, dir_name=None):
| folder_dialog = Gtk.FileChooserDialog(_('Choose Folders..'), self.app.window, Gtk.FileChooserAction.SELECT_FOLDER, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
folder_dialog.set_modal(True)
folder_dialog.set_select_multiple(True)
folder_dialog.set_default_response(Gtk.ResponseType.OK)
folder_dialog.set_current_folder(Config.HOME_DIR)
response = folder_dialog.run()
if (response != Gtk.ResponseType.OK):
folder_dialog.destroy()
return
source_paths = folder_dialog.get_filenames()
folder_dialog.destroy()
if source_paths:
self.upload_files(source_paths, dir_name)
|
'source_path - æ¬å°æä»¶çç»å¯¹è·¯åŸ
dir_name - æä»¶åšæå¡åšäžçç¶ç®åœ, åŠæäžºNoneçè¯, äŒåŒ¹åºäžäžª'
| def upload_files(self, source_paths, dir_name=None):
| def scan_folders(folder_path):
file_list = os.listdir(folder_path)
source_paths = [os.path.join(folder_path, f) for f in file_list]
self.upload_files(source_paths, os.path.join(dir_name, os.path.split(folder_path)[1]))
self.check_first()
if (not dir_name):
folder_dialog = FolderBrowserDialog(self, self.app)
response = folder_dialog.run()
if (response != Gtk.ResponseType.OK):
folder_dialog.destroy()
return
dir_name = folder_dialog.get_path()
folder_dialog.destroy()
invalid_paths = []
for source_path in source_paths:
if (util.validate_pathname(source_path) != ValidatePathState.OK):
invalid_paths.append(source_path)
continue
if (os.path.split(source_path)[1].startswith('.') and (not self.app.profile['upload-hidden-files'])):
continue
if os.path.isfile(source_path):
self.upload_file(source_path, dir_name)
elif os.path.isdir(source_path):
scan_folders(source_path)
self.app.blink_page(self)
self.scan_tasks()
if (not invalid_paths):
return
dialog = Gtk.Dialog(_('Invalid Filepath'), self.app.window, Gtk.DialogFlags.MODAL, (Gtk.STOCK_CLOSE, Gtk.ResponseType.OK))
dialog.set_default_size(640, 480)
dialog.set_border_width(10)
box = dialog.get_content_area()
scrolled_window = Gtk.ScrolledWindow()
box.pack_start(scrolled_window, True, True, 0)
text_buffer = Gtk.TextBuffer()
textview = Gtk.TextView.new_with_buffer(text_buffer)
scrolled_window.add(textview)
for invalid_path in invalid_paths:
text_buffer.insert_at_cursor(invalid_path)
text_buffer.insert_at_cursor('\n')
infobar = Gtk.InfoBar()
infobar.set_message_type(Gtk.MessageType.ERROR)
box.pack_end(infobar, False, False, 0)
info_label = Gtk.Label()
infobar.get_content_area().pack_start(info_label, False, False, 0)
info_label.set_label(''.join(['* ', ValidatePathStateText[1], '\n', '* ', ValidatePathStateText[2], '\n', '* ', ValidatePathStateText[3], '\n']))
box.show_all()
dialog.run()
dialog.destroy()
|
''
| def upload_file(self, source_path, dir_name):
| row = self.get_task_db(source_path)
(source_dir, filename) = os.path.split(source_path)
path = os.path.join(dir_name, filename)
size = os.path.getsize(source_path)
total_size = util.get_human_size(size)[0]
tooltip = gutil.escape(_('From {0}\nTo {1}').format(source_path, path))
if (size < (2 ** 27)):
threshold = (2 ** 17)
elif (size < (2 ** 29)):
threshold = (2 ** 19)
elif (size < (10 * (2 ** 30))):
threshold = math.ceil((size / 1000))
else:
self.app.toast(_('{0} is too large to upload (>10G).').format(path))
return
task = [filename, source_path, path, size, 0, State.WAITING, StateNames[State.WAITING], '0 / {0}'.format(total_size), 0, tooltip, threshold]
row_id = self.add_task_db(task, force=False)
task.insert(0, row_id)
self.liststore.append(task)
|
'å°ä»»å¡ç¶æè®Ÿå®äžºUploading, åŠææ²¡æè¶
è¿æå€§ä»»å¡æ°çè¯;
åŠåå°å®è®Ÿå®äžºWaiting.'
| def start_task(self, row, scan=True):
| if (row[STATE_COL] in RUNNING_STATES):
self.scan_tasks()
return
row[STATE_COL] = State.WAITING
row[STATENAME_COL] = StateNames[State.WAITING]
self.update_task_db(row)
if scan:
self.scan_tasks()
|
''
| def pause_tasks(self):
| if self.first_run:
return
for row in self.liststore:
self.pause_task(row, scan=False)
|
''
| def pause_task(self, row, scan=True):
| if (row[STATE_COL] == State.UPLOADING):
self.remove_worker(row[FID_COL], stop=False)
if (row[STATE_COL] in (State.UPLOADING, State.WAITING)):
row[STATE_COL] = State.PAUSED
row[STATENAME_COL] = StateNames[State.PAUSED]
self.update_task_db(row)
if scan:
self.scan_tasks()
|
''
| def remove_task(self, row, scan=True):
| if (row[STATE_COL] == State.UPLOADING):
self.remove_worker(row[FID_COL], stop=True)
self.remove_task_db(row[FID_COL])
tree_iter = row.iter
if tree_iter:
self.liststore.remove(tree_iter)
if scan:
self.scan_tasks()
|
'operator - å€çåœæ°'
| def operate_selected_rows(self, operator):
| (model, tree_paths) = self.selection.get_selected_rows()
if (not tree_paths):
return
fids = []
for tree_path in tree_paths:
fids.append(model[tree_path][FID_COL])
for fid in fids:
row = self.get_row_by_fid(fid)
if row:
operator(row)
|
'parent - UploadPage
row - UploadPage.liststoreäžçäžäžªè®°åœ'
| def __init__(self, parent, row, cookie, tokens):
| threading.Thread.__init__(self)
GObject.GObject.__init__(self)
self.daemon = True
self.parent = parent
self.cookie = cookie
self.tokens = tokens
self.upload_mode = self.parent.app.profile['upload-mode']
self.row = row[:]
|
''
| def upload(self):
| info = pcs.upload(self.cookie, self.row[SOURCEPATH_COL], self.row[PATH_COL], self.upload_mode)
if info:
self.emit('uploaded', self.row[FID_COL])
else:
self.emit('network-error', self.row[FID_COL])
|
''
| def rapid_upload(self):
| info = pcs.rapid_upload(self.cookie, self.tokens, self.row[SOURCEPATH_COL], self.row[PATH_COL], self.upload_mode)
if (info and info['md5'] and info['fs_id']):
self.emit('uploaded', self.row[FID_COL])
else:
self.slice_upload()
|
''
| def slice_upload(self):
| self.is_slice_upload = True
fid = self.row[FID_COL]
slice_start = self.row[CURRSIZE_COL]
slice_end = self.row[CURRSIZE_COL]
file_size = os.path.getsize(self.row[SOURCEPATH_COL])
if (file_size < slice_start):
self.emit('disk-error', fid)
return
elif ((file_size == slice_start) and (slice_start == self.row[SIZE_COL])):
self.emit('uploaded', fid)
return
fh = open(self.row[SOURCEPATH_COL], 'rb')
fh.seek(slice_start)
while (self.row[STATE_COL] == State.UPLOADING):
if (slice_end >= file_size):
self.emit('merge-files', self.row[FID_COL])
break
slice_start = slice_end
slice_end = min((slice_start + self.row[THRESHOLD_COL]), file_size)
data = fh.read((slice_end - slice_start))
slice_end = (slice_start + len(data))
info = pcs.slice_upload(self.cookie, data)
if (info and ('md5' in info)):
self.emit('slice-sent', fid, slice_end, info['md5'])
else:
self.emit('network-error', fid)
break
if (not fh.closed):
fh.close()
return
|
'åªèŸåºcookieçkey-valueåäž².
æ¯åŠ: HISTORY=21341; PHPSESSION=3289012u39jsdijf28; token=233129'
| def header_output(self):
| result = []
for key in self.keys():
result.append(((key + '=') + self.get(key).value))
return '; '.join(result)
|
'è·åäžéšåcookie, å¹¶å°å®èŸåºäžºå笊䞲'
| def sub_output(self, *keys):
| result = []
for key in keys:
if self.get(key):
result.append(((key + '=') + self.get(key).value))
return '; '.join(result)
|
'读åå€äžªä»¥å笊䞲圢åŒåæŸçcookie.'
| def load_list(self, raw_items):
| if (not raw_items):
return
for item in raw_items:
self.load(item)
|
''
| def do_response(self, response_id):
| if (response_id != Gtk.ResponseType.OK):
return
filelist = []
for row in self.rows:
if (row[1].get_text() == row[2].get_text()):
continue
filelist.append({'path': row[0], 'newname': row[2].get_text()})
if (len(filelist) == 0):
return
pcs.rename(self.app.cookie, self.app.tokens, filelist)
gutil.async_call(pcs.rename, self.app.cookie, self.app.tokens, filelist, callback=self.app.reload_current_page)
|
'åå§åBTç§åæ¥è¯¢å¯¹è¯æ¡.
source_url - åŠææ¯BTç§åçè¯, å°±æ¯ç§åçç»å¯¹è·¯åŸ.
åŠææ¯ç£éŸçè¯, å°±æ¯ä»¥magent:åŒå€Žçç£éŸéŸæ¥.'
| def __init__(self, parent, app, title, source_url, save_path):
| super().__init__(title, app.window, Gtk.DialogFlags.MODAL, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.app = app
self.source_url = source_url
self.save_path = save_path
self.set_default_response(Gtk.ResponseType.OK)
self.set_default_size(520, 480)
self.set_border_width(10)
box = self.get_content_area()
select_all_button = Gtk.ToggleButton.new_with_label(_('Select All'))
select_all_button.props.halign = Gtk.Align.START
select_all_button.props.margin_bottom = 5
select_all_button.connect('toggled', self.on_select_all_toggled)
box.pack_start(select_all_button, False, False, 0)
scrolled_win = Gtk.ScrolledWindow()
box.pack_start(scrolled_win, True, True, 0)
self.liststore = Gtk.ListStore(bool, str, GObject.TYPE_INT64, str)
self.treeview = Gtk.TreeView(model=self.liststore)
self.treeview.set_tooltip_column(NAME_COL)
scrolled_win.add(self.treeview)
check_cell = Gtk.CellRendererToggle()
check_cell.connect('toggled', self.on_check_cell_toggled)
check_col = Gtk.TreeViewColumn('', check_cell, active=CHECK_COL)
self.treeview.append_column(check_col)
name_cell = Gtk.CellRendererText(ellipsize=Pango.EllipsizeMode.END, ellipsize_set=True)
name_col = Gtk.TreeViewColumn(_('Name'), name_cell, text=NAME_COL)
name_col.set_expand(True)
self.treeview.append_column(name_col)
size_cell = Gtk.CellRendererText()
size_col = Gtk.TreeViewColumn(_('Size'), size_cell, text=HUMANSIZE_COL)
self.treeview.append_column(size_col)
box.show_all()
self.request_data()
|
'åšè°çšdialog.run()ä¹åå
è°çšè¿äžªåœæ°æ¥è·åæ°æ®'
| def request_data(self):
| def on_tasks_received(info, error=None):
if (error or (not info)):
logger.error(('BTBrowserDialog.on_tasks_received: %s, %s.' % (info, error)))
return
if ('magnet_info' in info):
tasks = info['magnet_info']
elif ('torrent_info' in info):
tasks = info['torrent_info']['file_info']
self.file_sha1 = info['torrent_info']['sha1']
elif ('error_code' in info):
logger.error(('BTBrowserDialog.on_tasks_received: %s, %s.' % (info, error)))
self.app.toast(info.get('error_msg', ''))
return
else:
logger.error(('BTBrowserDialog.on_tasks_received: %s, %s.' % (info, error)))
self.app.toast((_('Unknown error occured: %s') % info))
return
for task in tasks:
size = int(task['size'])
human_size = util.get_human_size(size)[0]
select = ((size > MIN_SIZE_TO_CHECK) or task['file_name'].endswith(CHECK_EXT))
self.liststore.append([select, task['file_name'], size, human_size])
if self.source_url.startswith('magnet'):
gutil.async_call(pcs.cloud_query_magnetinfo, self.app.cookie, self.app.tokens, self.source_url, self.save_path, callback=on_tasks_received)
else:
gutil.async_call(pcs.cloud_query_sinfo, self.app.cookie, self.app.tokens, self.source_url, callback=on_tasks_received)
|
'è¿åéäžèŠäžèœœçæä»¶ççŒå·åsha1åŒ, ä»1åŒå§è®¡æ°.'
| def get_selected(self):
| selected_idx = []
for (i, row) in enumerate(self.liststore):
if row[CHECK_COL]:
selected_idx.append((i + 1))
return (selected_idx, self.file_sha1)
|
''
| def load_next(self):
| self.page += 1
self.load_url()
|
''
| def load_url(self):
| def on_load_url(filelist, error=None):
self.url_entry.props.secondary_icon_name = REFRESH_ICON
if (timestamp != self.url_entry.timestamp):
logger.debug('SharePage.load_url, dirname not match, ignored')
return
if (error or (not filelist)):
self.app.toast(_('Failed to get files, please reload this page'))
logger.warn(('SharePage.load_url: %s, %s, %s' % (self.curr_url, filelist, error)))
self.has_next = False
return
state = self.select_all_button.get_active()
tree_iters = []
if (filelist and self.dirname and (self.dirname != '/')):
parent_dirname = os.path.dirname(self.dirname)
(pixbuf, type_) = self.app.mime.get(parent_dirname, True, icon_size=ICON_SIZE)
(large_pixbuf, type_) = self.app.mime.get(parent_dirname, True, icon_size=LARGE_ICON_SIZE)
self.liststore.append([state, pixbuf, large_pixbuf, '..', parent_dirname, True, 0, '0', 0, ''])
for file_ in filelist:
isdir = (file_['isdir'] == '1')
(pixbuf, type_) = self.app.mime.get(file_['path'], isdir, icon_size=ICON_SIZE)
(large_pixbuf, type_) = self.app.mime.get(file_['path'], isdir, icon_size=LARGE_ICON_SIZE)
size = int(file_.get('size', 0))
human_size = util.get_human_size(size)[0]
mtime = int(file_.get('server_mtime', 0))
human_mtime = time.ctime(mtime)
tree_iter = self.liststore.append([state, pixbuf, large_pixbuf, file_['server_filename'], file_['path'], isdir, size, human_size, mtime, human_mtime])
tree_iters.append(tree_iter)
cache_path = Config.get_cache_path(self.app.profile['username'])
gutil.async_call(gutil.update_share_image, self.liststore, tree_iters, ICON_COL, LARGE_ICON_COL, filelist, cache_path, ICON_SIZE, LARGE_ICON_SIZE)
self.url_entry.props.secondary_icon_name = ABORT_ICON
if ((not self.uk) or (not self.shareid)):
self.app.toast(_('Invalid link: {0}!').format(self.curr_url))
self.has_next = False
self.url_entry.props.secondary_icon_name = REFRESH_ICON
return
timestamp = time.time()
self.url_entry.timestamp = timestamp
gutil.async_call(pcs.list_share_files, self.app.cookie, self.app.tokens, self.uk, self.shareid, self.dirname, self.page, callback=on_load_url)
|
''
| def get_mime(self, path, isdir):
| if isdir:
file_type = FOLDER
else:
file_type = mimetypes.guess_type(path)[0]
if (not file_type):
file_type = UNKNOWN
return file_type
|
'path - æä»¶çè·¯åŸ, å¯ä»¥å
æ¬ç»å¯¹è·¯åŸ, ä¹å¯ä»¥æ¯æä»¶å.
isdir - æ¯åŠäžºäžäžªç®åœ.
icon_size - åŸæ ç倧å°, åŠææ¯æŸç€ºåšIconViewäžç, 48å°±å¯ä»¥;
åŠææ¯æŸç€ºåšTreViewçè¯, å¯ä»¥çšGtk.IconSize.MENU
@return äŒè¿åäžäžªPixbuf以象, åè¿äžªæä»¶çç±»å(MIME)'
| def get(self, path, isdir, icon_size=ICON_SIZE):
| file_type = self.get_mime(path, isdir)
key = (file_type, icon_size)
if (key in self._data):
return (self._data.get(key), file_type)
themed_icon = Gio.content_type_get_icon(file_type)
icon_names = themed_icon.to_string().split(' ')[2:]
icon_info = self.app.icon_theme.choose_icon(icon_names, icon_size, Gtk.IconLookupFlags.GENERIC_FALLBACK)
if icon_info:
pixbuf = icon_info.load_icon()
self._data[key] = pixbuf
return (pixbuf, file_type)
else:
key = (UNKNOWN, icon_size)
pixbuf = self._data.get(key, None)
if (not pixbuf):
pixbuf = self.get('/placeholder', isdir, icon_size)[0]
return (pixbuf, file_type)
|
''
| def get_path(self):
| (model, tree_iter) = self.selection.get_selected()
if (not tree_iter):
return '/'
else:
return model[tree_iter][PATH_COL]
|
'Log with level debug.'
| def debug(self, txt):
| self(txt, 'debug')
|
'Log with level info.'
| def info(self, txt):
| self(txt, 'info')
|
'Log with level alert. Alerts have the same urgency as info, but
signals to interctive tools that the user\'s attention should be
drawn to the output even if they\'re not currently looking at the
event log.'
| def alert(self, txt):
| self(txt, 'alert')
|
'Log with level warn.'
| def warn(self, txt):
| self(txt, 'warn')
|
'Log with level error.'
| def error(self, txt):
| self(txt, 'error')
|
'onclick is called on click with the tab offset as argument'
| def __init__(self, offset, content, attr, onclick):
| p = urwid.Text(content, align='center')
p = urwid.Padding(p, align='center', width=('relative', 100))
p = urwid.AttrWrap(p, attr)
urwid.WidgetWrap.__init__(self, p)
self.offset = offset
self.onclick = onclick
|
'We are just about to push a window onto the stack.'
| def layout_pushed(self, prev):
| self.helpctx = prev.keyctx
self.show()
|
'vspace: how much vertical space to keep clear'
| def __init__(self, master, name, vals, vspace):
| (cols, rows) = master.ui.get_cols_rows()
self.ge = grideditor.OptionsEditor(master, name, vals)
super().__init__(urwid.AttrWrap(urwid.LineBox(urwid.BoxAdapter(self.ge, (rows - vspace)), title=name), 'background'))
self.width = math.ceil((cols * 0.8))
|
'Returns the object responding to key input. Usually self, but may be
a wrapped object.'
| def key_responder(self):
| return self
|
'The view focus has changed. Layout objects should implement the API
rather than directly subscribing to events.'
| def focus_changed(self):
| pass
|
'The view list has changed.'
| def view_changed(self):
| pass
|
'We are just about to pop a window off the stack, or exit an overlay.'
| def layout_popping(self):
| pass
|
'We have just pushed a window onto the stack.'
| def layout_pushed(self, prev):
| pass
|
'_testing: disables reloading of the lookup table to make testing
possible.'
| def __init__(self, _testing=False):
| (self.lookup, self.offset) = (None, None)
self.final = None
self._testing = _testing
|
'Returns the next completion for txt, or None if there is no
completion.'
| def complete(self, txt):
| path = os.path.expanduser(txt)
if (not self.lookup):
if (not self._testing):
self.lookup = []
if os.path.isdir(path):
files = glob.glob(os.path.join(path, '*'))
prefix = txt
else:
files = glob.glob((path + '*'))
prefix = os.path.dirname(txt)
prefix = (prefix or './')
for f in files:
display = os.path.join(prefix, os.path.basename(f))
if os.path.isdir(f):
display += '/'
self.lookup.append((display, f))
if (not self.lookup):
self.final = path
return path
self.lookup.sort()
self.offset = (-1)
self.lookup.append((txt, txt))
self.offset += 1
if (self.offset >= len(self.lookup)):
self.offset = 0
ret = self.lookup[self.offset]
self.final = ret[1]
return ret[0]
|
'Args:
fields: (optional) list of ``(name, value)`` header byte tuples,
e.g. ``[(b"Host", b"example.com")]``. All names and values must be bytes.
**headers: Additional headers to set. Will overwrite existing values from `fields`.
For convenience, underscores in header names will be transformed to dashes -
this behaviour does not extend to other methods.
If ``**headers`` contains multiple keys that have equal ``.lower()`` s,
the behavior is undefined.'
| def __init__(self, fields=(), **headers):
| super().__init__(fields)
for (key, value) in self.fields:
if ((not isinstance(key, bytes)) or (not isinstance(value, bytes))):
raise TypeError('Header fields must be bytes.')
headers = {_always_bytes(name).replace('_', '-'): _always_bytes(value) for (name, value) in headers.items()}
self.update(headers)
|
'Like :py:meth:`get`, but does not fold multiple headers into a single one.
This is useful for Set-Cookie headers, which do not support folding.
See also: https://tools.ietf.org/html/rfc7230#section-3.2.2'
| def get_all(self, name):
| name = _always_bytes(name)
return [_native(x) for x in super().get_all(name)]
|
'Explicitly set multiple headers for the given key.
See: :py:meth:`get_all`'
| def set_all(self, name, values):
| name = _always_bytes(name)
values = [_always_bytes(x) for x in values]
return super().set_all(name, values)
|
'Replaces a regular expression pattern with repl in each "name: value"
header line.
Returns:
The number of replacements made.'
| def replace(self, pattern, repl, flags=0, count=0):
| if isinstance(pattern, str):
pattern = strutils.escaped_str_to_bytes(pattern)
if isinstance(repl, str):
repl = strutils.escaped_str_to_bytes(repl)
pattern = re.compile(pattern, flags)
replacements = 0
flag_count = (count > 0)
fields = []
for (name, value) in self.fields:
(line, n) = pattern.subn(repl, ((name + ': ') + value), count=count)
try:
(name, value) = line.split(': ', 1)
except ValueError:
pass
else:
replacements += n
if flag_count:
count -= n
if (count == 0):
break
fields.append((name, value))
self.fields = tuple(fields)
return replacements
|
':param fail_early: If true, a SocksError will be raised if the first byte does not indicate socks5.'
| @classmethod
def from_file(cls, f, fail_early=False):
| (ver, nmethods) = struct.unpack('!BB', f.safe_read(2))
client_greeting = cls(ver, [])
if fail_early:
client_greeting.assert_socks5()
client_greeting.methods.frombytes(f.safe_read(nmethods))
return client_greeting
|
'Raises:
ValueError, if the content-encoding is invalid.'
| def make_environ(self, flow, errsoc, **extra):
| path = strutils.always_str(flow.request.path, 'latin-1')
if ('?' in path):
(path_info, query) = strutils.always_str(path, 'latin-1').split('?', 1)
else:
path_info = path
query = ''
environ = {'wsgi.version': (1, 0), 'wsgi.url_scheme': strutils.always_str(flow.request.scheme, 'latin-1'), 'wsgi.input': io.BytesIO((flow.request.content or '')), 'wsgi.errors': errsoc, 'wsgi.multithread': True, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': self.sversion, 'REQUEST_METHOD': strutils.always_str(flow.request.method, 'latin-1'), 'SCRIPT_NAME': '', 'PATH_INFO': urllib.parse.unquote(path_info), 'QUERY_STRING': query, 'CONTENT_TYPE': strutils.always_str(flow.request.headers.get('Content-Type', ''), 'latin-1'), 'CONTENT_LENGTH': strutils.always_str(flow.request.headers.get('Content-Length', ''), 'latin-1'), 'SERVER_NAME': self.domain, 'SERVER_PORT': str(self.port), 'SERVER_PROTOCOL': strutils.always_str(flow.request.http_version, 'latin-1')}
environ.update(extra)
if flow.client_conn.address:
environ['REMOTE_ADDR'] = strutils.always_str(flow.client_conn.address[0], 'latin-1')
environ['REMOTE_PORT'] = flow.client_conn.address[1]
for (key, value) in flow.request.headers.items():
key = ('HTTP_' + strutils.always_str(key, 'latin-1').upper().replace('-', '_'))
if (key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH')):
environ[key] = value
return environ
|
'Make a best-effort attempt to write an error page. If headers are
already sent, we just bung the error into the page.'
| def error_page(self, soc, headers_sent, s):
| c = '\n <html>\n <h1>Internal Server Error</h1>\n <pre>{err}"</pre>\n </html>\n '.format(err=s).strip().encode()
if (not headers_sent):
soc.write('HTTP/1.1 500 Internal Server Error\r\n')
soc.write('Content-Type: text/html\r\n')
soc.write('Content-Length: {length}\r\n'.format(length=len(c)).encode())
soc.write('\r\n')
soc.write(c)
|
'A WebSocket frame contains an initial length_code, and an optional
extended length code to represent the actual length if length code is
larger than 125'
| @classmethod
def _make_length_code(self, length):
| if (length <= 125):
return length
elif ((length >= 126) and (length <= 65535)):
return 126
else:
return 127
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.