code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
from contextlib import closing from PIL import Image import subprocess from audiotsm import phasevocoder from audiotsm.io.wav import WavReader, WavWriter from scipy.io import wavfile import numpy as np import re import math from shutil import copyfile, rmtree import os import argparse from pytube import YouTube class SilenceCut(): def getMaxVolume(s): maxv = float(np.max(s)) minv = float(np.min(s)) return max(maxv,-minv) def copyFrame(inputFrame,outputFrame): src = TEMP_FOLDER+"/frame{:06d}".format(inputFrame+1)+".jpg" dst = TEMP_FOLDER+"/newFrame{:06d}".format(outputFrame+1)+".jpg" if not os.path.isfile(src): return False copyfile(src, dst) if outputFrame%20 == 19: print(str(outputFrame+1)+" time-altered frames saved.") return True def inputToOutputFilename(filename): dotIndex = filename.rfind(".") return filename[:dotIndex]+"_ALTERED"+filename[dotIndex:] def createPath(s): #assert (not os.path.exists(s)), "The filepath "+s+" already exists. Don't want to overwrite it. Aborting." try: os.mkdir(s) except OSError: assert False, "Creation of the directory %s failed. (The TEMP folder may already exist. Delete or rename it, and try again.)" def deletePath(s): # Dangerous! Watch out! try: rmtree(s,ignore_errors=False) except OSError: print ("Deletion of the directory %s failed" % s) print(OSError) def operate(FRAME_RATE=30,SAMPLE_RATE=44100,SILENT_THRESHOLD=0.03,FRAME_SPREADAGE=1,NEW_SPEED=[5.00,1.00],FRAME_QUALITY=3,INPUT_FILE,OUTPUT_FILE): ''' parser = argparse.ArgumentParser(description='Modifies a video file to play at different speeds when there is sound vs. silence.') parser.add_argument('--input_file', type=str, help='the video file you want modified') parser.add_argument('--url', type=str, help='A youtube url to download and process') parser.add_argument('--output_file', type=str, default="", help="the output file. (optional. if not included, it'll just modify the input file name)") -parser.add_argument('--silent_threshold', type=float, default=0.03, help="the volume amount that frames' audio needs to surpass to be consider \"sounded\". It ranges from 0 (silence) to 1 (max volume)") -parser.add_argument('--sounded_speed', type=float, default=1.00, help="the speed that sounded (spoken) frames should be played at. Typically 1.") -parser.add_argument('--silent_speed', type=float, default=5.00, help="the speed that silent frames should be played at. 999999 for jumpcutting.") -parser.add_argument('--frame_margin', type=float, default=1, help="some silent frames adjacent to sounded frames are included to provide context. How many frames on either the side of speech should be included? That's this variable.") -parser.add_argument('--sample_rate', type=float, default=44100, help="sample rate of the input and output videos") -parser.add_argument('--frame_rate', type=float, default=30, help="frame rate of the input and output videos. optional... I try to find it out myself, but it doesn't always work.") -parser.add_argument('--frame_quality', type=int, default=3, help="quality of frames to be extracted from input video. 1 is highest, 31 is lowest, 3 is the default.") args = parser.parse_args() ''' assert INPUT_FILE != None , "INPUT FILE IS A REQUIRED FIELD" if OUTPUT_FILE==None: OUTPUT_FILE = inputToOutputFilename(INPUT_FILE) TEMP_FOLDER = "TEMP" AUDIO_FADE_ENVELOPE_SIZE = 400 # smooth out transitiion's audio by quickly fading in/out (arbitrary magic number whatever) createPath(TEMP_FOLDER) command = "ffmpeg -i "+INPUT_FILE+" -qscale:v "+str(FRAME_QUALITY)+" "+TEMP_FOLDER+"/frame%06d.jpg -hide_banner" subprocess.call(command, shell=True) command = "ffmpeg -i "+INPUT_FILE+" -ab 160k -ac 2 -ar "+str(SAMPLE_RATE)+" -vn "+TEMP_FOLDER+"/audio.wav" subprocess.call(command, shell=True) command = "ffmpeg -i "+TEMP_FOLDER+"/input.mp4 2>&1" f = open(TEMP_FOLDER+"/params.txt", "w") subprocess.call(command, shell=True, stdout=f) sampleRate, audioData = wavfile.read(TEMP_FOLDER+"/audio.wav") audioSampleCount = audioData.shape[0] maxAudioVolume = getMaxVolume(audioData) f = open(TEMP_FOLDER+"/params.txt", 'r+') pre_params = f.read() f.close() params = pre_params.split('\n') for line in params: m = re.search('Stream #.*Video.* ([0-9]*) fps',line) if m is not None: FRAME_RATE = float(m.group(1)) samplesPerFrame = sampleRate/FRAME_RATE audioFrameCount = int(math.ceil(audioSampleCount/samplesPerFrame)) hasLoudAudio = np.zeros((audioFrameCount)) for i in range(audioFrameCount): start = int(i*samplesPerFrame) end = min(int((i+1)*samplesPerFrame),audioSampleCount) audiochunks = audioData[start:end] maxchunksVolume = float(getMaxVolume(audiochunks))/maxAudioVolume if maxchunksVolume >= SILENT_THRESHOLD: hasLoudAudio[i] = 1 chunks = [[0,0,0]] shouldIncludeFrame = np.zeros((audioFrameCount)) for i in range(audioFrameCount): start = int(max(0,i-FRAME_SPREADAGE)) end = int(min(audioFrameCount,i+1+FRAME_SPREADAGE)) shouldIncludeFrame[i] = np.max(hasLoudAudio[start:end]) if (i >= 1 and shouldIncludeFrame[i] != shouldIncludeFrame[i-1]): # Did we flip? chunks.append([chunks[-1][1],i,shouldIncludeFrame[i-1]]) chunks.append([chunks[-1][1],audioFrameCount,shouldIncludeFrame[i-1]]) chunks = chunks[1:] outputAudioData = np.zeros((0,audioData.shape[1])) outputPointer = 0 lastExistingFrame = None for chunk in chunks: audioChunk = audioData[int(chunk[0]*samplesPerFrame):int(chunk[1]*samplesPerFrame)] sFile = TEMP_FOLDER+"/tempStart.wav" eFile = TEMP_FOLDER+"/tempEnd.wav" wavfile.write(sFile,SAMPLE_RATE,audioChunk) with WavReader(sFile) as reader: with WavWriter(eFile, reader.channels, reader.samplerate) as writer: tsm = phasevocoder(reader.channels, speed=NEW_SPEED[int(chunk[2])]) tsm.run(reader, writer) _, alteredAudioData = wavfile.read(eFile) leng = alteredAudioData.shape[0] endPointer = outputPointer+leng outputAudioData = np.concatenate((outputAudioData,alteredAudioData/maxAudioVolume)) #outputAudioData[outputPointer:endPointer] = alteredAudioData/maxAudioVolume # smooth out transitiion's audio by quickly fading in/out if leng < AUDIO_FADE_ENVELOPE_SIZE: outputAudioData[outputPointer:endPointer] = 0 # audio is less than 0.01 sec, let's just remove it. else: premask = np.arange(AUDIO_FADE_ENVELOPE_SIZE)/AUDIO_FADE_ENVELOPE_SIZE mask = np.repeat(premask[:, np.newaxis],2,axis=1) # make the fade-envelope mask stereo outputAudioData[outputPointer:outputPointer+AUDIO_FADE_ENVELOPE_SIZE] *= mask outputAudioData[endPointer-AUDIO_FADE_ENVELOPE_SIZE:endPointer] *= 1-mask startOutputFrame = int(math.ceil(outputPointer/samplesPerFrame)) endOutputFrame = int(math.ceil(endPointer/samplesPerFrame)) for outputFrame in range(startOutputFrame, endOutputFrame): inputFrame = int(chunk[0]+NEW_SPEED[int(chunk[2])]*(outputFrame-startOutputFrame)) didItWork = copyFrame(inputFrame,outputFrame) if didItWork: lastExistingFrame = inputFrame else: copyFrame(lastExistingFrame,outputFrame) outputPointer = endPointer wavfile.write(TEMP_FOLDER+"/audioNew.wav",SAMPLE_RATE,outputAudioData) command = "ffmpeg -framerate "+str(FRAME_RATE)+" -i "+TEMP_FOLDER+"/newFrame%06d.jpg -i "+TEMP_FOLDER+"/audioNew.wav -strict -2 "+OUTPUT_FILE subprocess.call(command, shell=True) deletePath(TEMP_FOLDER)
zoom-toolkit
/zoom_toolkit-2.0.tar.gz/zoom_toolkit-2.0/zoom_toolkit/silencecut.py
silencecut.py
from __future__ import print_function import os # Standard PySceneDetect imports: from scenedetect.video_manager import VideoManager from scenedetect.scene_manager import SceneManager # For caching detection metrics and saving/loading to a stats file from scenedetect.stats_manager import StatsManager from scenedetect.video_splitter import split_video_ffmpeg, split_video_mkvmerge # For content-aware scene detection: from scenedetect.detectors.content_detector import ContentDetector import cv2 class SceneDetect(): def find_scenes(video_path): # type: (str) -> List[Tuple[FrameTimecode, FrameTimecode]] video_manager = VideoManager([video_path]) cap = cv2.VideoCapture(video_path) #to read images stats_manager = StatsManager() # Construct our SceneManager and pass it our StatsManager. scene_manager = SceneManager(stats_manager) # Add ContentDetector algorithm (each detector's constructor # takes detector options, e.g. threshold). cont_detector = ContentDetector() scene_manager.add_detector(cont_detector) #frame_list = ContentDetector.process_frame(5, [5]) #print("frame_list", frame_list) base_timecode = video_manager.get_base_timecode() # We save our stats file to {VIDEO_PATH}.stats.csv. stats_file_path = '%s.stats.csv' % video_path scene_list = [] try: # If stats file exists, load it. if os.path.exists(stats_file_path): # Read stats from CSV file opened in read mode: with open(stats_file_path, 'r') as stats_file: stats_manager.load_from_csv(stats_file, base_timecode) # Set downscale factor to improve processing speed. video_manager.set_downscale_factor() # Start video_manager. video_manager.start() # Perform scene detection on video_manager. scene_manager.detect_scenes(frame_source=video_manager) # Obtain list of detected scenes. scene_list = scene_manager.get_scene_list(base_timecode) # Each scene is a tuple of (start, end) FrameTimecodes. print('List of scenes obtained:') for i, scene in enumerate(scene_list): print( 'Scene %2d: Start %s / Frame %d, End %s / Frame %d' % ( i+1, scene[0].get_timecode(), scene[0].get_frames(), scene[1].get_timecode(), scene[1].get_frames(),)) cap.set(cv2.CAP_PROP_POS_FRAMES, scene[1].get_frames()) #ending frame ret, frame = cap.read() # Read the frame #resizing the image scale_percent = 50 width = int(frame.shape[1] * scale_percent / 100) height = int(frame.shape[0] * scale_percent / 100) dsize = (width, height) output_frame = cv2.resize(frame, dsize) cv2.imshow('End frame image of one scene', output_frame) # show frame on window cv2.imwrite("frame%d.jpg" % scene[1].get_frames(), output_frame) cv2.waitKey(0) cv2.destroyAllWindows() # We only write to the stats file if a save is required: if stats_manager.is_save_required(): with open(stats_file_path, 'w') as stats_file: stats_manager.save_to_csv(stats_file, base_timecode) finally: video_manager.release() return scene_list def detect(input_vid_name = "video.mp4"): vid_scene_list = find_scenes(input_vid_name) video_name = input_vid_name.split('.')[0] split_video_mkvmerge([input_vid_name], vid_scene_list, video_name + "scene.mp4", "video.mp4") #split_video_ffmpeg(input_vid_name, vid_scene_list, "ffmpeg.mp4", "video_out.mp4")
zoom-toolkit
/zoom_toolkit-2.0.tar.gz/zoom_toolkit-2.0/zoom_toolkit/scene_detector.py
scene_detector.py
from moviepy.editor import * from moviepy.video.fx import resize import sys import cv2 import numpy as np import cv2 class FaceRemover(): cv2_base_dir = os.path.dirname(os.path.abspath(cv2.__file__)) haar_model = os.path.join(cv2_base_dir, 'data/haarcascade_frontalface_default.xml') faceCascade = cv2.CascadeClassifier(haar_model) haar_model = os.path.join(cv2_base_dir, 'data/haarcascade_upperbody.xml') bodyCascade = cv2.CascadeClassifier(haar_model) haar_model = os.path.join(cv2_base_dir, 'data/haarcascade_eye_tree_eyeglasses.xml') glassCascade = cv2.CascadeClassifier(haar_model) blur_or_remove = 1 def blur_right_corner_auto(image): if(last==None): last=image faces = faceCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) bodies = bodyCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) glasses = glassCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) detected = 0 if(len(faces)>0 or len(bodies)>0 or len(glasses)>0): for(x, y, w, h) in faces: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 for (x, y, w, h) in bodies: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 for(x, y, w, h) in glasses: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 if(detected>=1 or image.mean()>50): frame = np.copy(image) frame[0:58, frame.shape[1]-100 : frame.shape[1]]=cv2.GaussianBlur(frame[0:58, frame.shape[1]-100 : frame.shape[1]],(5,5),3) return frame else: return image else: return image def blur_right_corner(image): frame = np.copy(image) frame[0:58, frame.shape[1]-100 : frame.shape[1]]=cv2.GaussianBlur(frame[0:58, frame.shape[1]-100 : frame.shape[1]],(5,5),3) return frame def remove_right_corner_auto(image): if(last==None): last=image faces = faceCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) bodies = bodyCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) glasses = glassCascade.detectMultiScale( image, scaleFactor=1.1, minNeighbors=3, minSize=(5, 5), flags=cv2.CASCADE_SCALE_IMAGE ) detected = 0 if(len(faces)>0 or len(bodies)>0 or len(glasses)>0): for(x, y, w, h) in faces: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 for (x, y, w, h) in bodies: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 for(x, y, w, h) in glasses: if(y<=58 and y>=0 and image.shape[1]-100<=x and image.shape[1]>=x): detected+=1 if(detected>=1 or image.mean()>50): frame = np.copy(image) frame[0:58, frame.shape[1]-100 : frame.shape[1]]=(0,0,0) return frame else: return image else: return image def remove_right_corner(image): frame = np.copy(image) frame[0:58, frame.shape[1]-100 : frame.shape[1]]=(0,0,0) return frame def face_remove(file,auto=True,start=0,end=0,one_for_blur_zero_for_remove=1): if(auto): clip_of_interest = VideoFileClip(file) W = clip_of_interest.w H = clip_of_interest.h print("Width x Height of clip 1 : ", end = " ") print(str(W) + " x ", str(H)) print("---------------------------------------") clip_of_interest = clip_of_interest.resize((852,480)) clip_blurred = clip_of_interest.fl_image(blur_right_corner_auto) final = concatenate_videoclips([clip_blurred]).set_audio(clip_of_interest.audio) final.write_videofile('modified.mp4', bitrate="3000k") else: clip_of_interest = VideoFileClip(file).subclip(start,end) W = clip_of_interest.w H = clip_of_interest.h print("Width x Height of clip 1 : ", end = " ") print(str(W) + " x ", str(H)) print("---------------------------------------") clip_of_interest = clip_of_interest.resize((852,480)) clip_blurred = clip_of_interest.fl_image(blur_right_corner) final = concatenate_videoclips([clip_blurred]).set_audio(clip_of_interest.audio) final.write_videofile('modified.mp4', bitrate="3000k")
zoom-toolkit
/zoom_toolkit-2.0.tar.gz/zoom_toolkit-2.0/zoom_toolkit/face_remove.py
face_remove.py
TorMySQL ======== |Build Status| The highest performance asynchronous MySQL driver. PyPI page: https://pypi.python.org/pypi/tormysql About ===== Presents a Future-based API and greenlet for non-blocking access to MySQL. Support both `tornado <https://github.com/tornadoweb/tornado>`__ and `asyncio <https://docs.python.org/3/library/asyncio.html>`__. Installation ============ :: pip install TorMySQL Used Tornado ============ example pool ------------ :: from tornado.ioloop import IOLoop from tornado import gen import tormysql pool = tormysql.ConnectionPool( max_connections = 20, #max open connections idle_seconds = 7200, #conntion idle timeout time, 0 is not timeout wait_connection_timeout = 3, #wait connection timeout host = "127.0.0.1", user = "root", passwd = "TEST", db = "test", charset = "utf8" ) @gen.coroutine def test(): with (yield pool.Connection()) as conn: try: with conn.cursor() as cursor: yield cursor.execute("INSERT INTO test(id) VALUES(1)") except: yield conn.rollback() else: yield conn.commit() with conn.cursor() as cursor: yield cursor.execute("SELECT * FROM test") datas = cursor.fetchall() print datas yield pool.close() ioloop = IOLoop.instance() ioloop.run_sync(test) example helpers --------------- :: from tornado.ioloop import IOLoop from tornado import gen import tormysql pool = tormysql.helpers.ConnectionPool( max_connections = 20, #max open connections idle_seconds = 7200, #conntion idle timeout time, 0 is not timeout wait_connection_timeout = 3, #wait connection timeout host = "127.0.0.1", user = "root", passwd = "TEST", db = "test", charset = "utf8" ) @gen.coroutine def test(): tx = yield pool.begin() try: yield tx.execute("INSERT INTO test(id) VALUES(1)") except: yield tx.rollback() else: yield tx.commit() cursor = yield pool.execute("SELECT * FROM test") datas = cursor.fetchall() print datas yield pool.close() ioloop = IOLoop.instance() ioloop.run_sync(test) Used asyncio alone ================== example pool ------------ :: from asyncio import events import tormysql pool = tormysql.ConnectionPool( max_connections = 20, #max open connections idle_seconds = 7200, #conntion idle timeout time, 0 is not timeout wait_connection_timeout = 3, #wait connection timeout host = "127.0.0.1", user = "root", passwd = "TEST", db = "test", charset = "utf8" ) async def test(): async with await pool.Connection() as conn: try: async with conn.cursor() as cursor: await cursor.execute("INSERT INTO test(id) VALUES(1)") except: await conn.rollback() else: await conn.commit() async with conn.cursor() as cursor: await cursor.execute("SELECT * FROM test") datas = cursor.fetchall() print(datas) await pool.close() ioloop = events.get_event_loop() ioloop.run_until_complete(test) example helpers --------------- :: from asyncio import events import tormysql pool = tormysql.helpers.ConnectionPool( max_connections = 20, #max open connections idle_seconds = 7200, #conntion idle timeout time, 0 is not timeout wait_connection_timeout = 3, #wait connection timeout host = "127.0.0.1", user = "root", passwd = "TEST", db = "test", charset = "utf8" ) async def test(): async with await pool.begin() as tx: await tx.execute("INSERT INTO test(id) VALUES(1)") cursor = await pool.execute("SELECT * FROM test") datas = cursor.fetchall() print(datas) await pool.close() ioloop = events.get_event_loop() ioloop.run_until_complete(test) Resources ========= You can read `PyMySQL Documentation <http://pymysql.readthedocs.io/>`__ online for more information. License ======= TorMySQL uses the MIT license, see LICENSE file for the details. .. |Build Status| image:: https://travis-ci.org/snower/TorMySQL.svg?branch=master :target: https://travis-ci.org/snower/TorMySQL
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/README.rst
README.rst
import sys import time from collections import deque from . import platform from pymysql._compat import text_type from .client import Client from . import log from .util import py3 class ConnectionPoolClosedError(Exception): pass class ConnectionPoolUsedError(Exception): pass class ConnectionNotFoundError(Exception): pass class ConnectionNotUsedError(Exception): pass class ConnectionUsedError(Exception): pass class WaitConnectionTimeoutError(Exception): pass class Connection(Client): def __init__(self, pool, *args, **kwargs): self._pool = pool self.idle_time = time.time() self.used_time = time.time() super(Connection, self).__init__(*args, **kwargs) def close(self, remote_close=False): if remote_close: return self.do_close() return self._pool.release_connection(self) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() if py3: exec(""" async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): if self._connection.autocommit_mode: self.close() else: try: if exc_type: await self.rollback() else: await self.commit() except: exc_info = sys.exc_info() self.close(True) try: raise exc_info[1].with_traceback(exc_info[2]) finally: exc_info = None else: self.close() """) def __del__(self): try: self.close() except ConnectionNotUsedError: pass def do_close(self): return super(Connection, self).close() class RecordQueryConnection(Connection): def __init__(self, *args, **kwargs): super(RecordQueryConnection, self).__init__(*args, **kwargs) self._last_query_sql = "" def connect(self): future = super(RecordQueryConnection, self).connect() origin_query = self._connection.query def query(sql, unbuffered=False): self._last_query_sql = sql return origin_query(sql, unbuffered) self._connection.query = query return future def query(self, sql, unbuffered=False): self._last_query_sql = sql return super(RecordQueryConnection, self).query(sql, unbuffered) def get_last_query_sql(self): if isinstance(self._last_query_sql, text_type): return self._last_query_sql.encode("utf-8") return self._last_query_sql class ConnectionPool(object): def __init__(self, *args, **kwargs): self._loop = None self._max_connections = kwargs.pop("max_connections", 32) self._idle_seconds = kwargs.pop("idle_seconds", 7200) self._wait_connection_timeout = kwargs.pop("wait_connection_timeout", 8) self._debug_connection_used = kwargs.pop("debug_connection_used", False) if self._debug_connection_used: self._connection_cls = RecordQueryConnection else: self._connection_cls = Connection self._args = args self._kwargs = kwargs self._connections = deque(maxlen = self._max_connections) self._used_connections = {} self._connections_count = 0 self._wait_connections = deque() self._wait_connection_timeout_futures = deque() self._closed = False self._close_future = None self._check_idle_callback = False @property def closed(self): return self._closed def connection_connected_callback(self, future, connection_future): if (hasattr(connection_future, "_exc_info") and connection_future._exc_info is not None) \ or (hasattr(connection_future, "_exception") and connection_future._exception is not None): future.set_exception(connection_future.exception()) else: future.set_result(connection_future.result()) while self._wait_connections and self._connections: connection = self._connections.pop() if connection.open: if self.continue_next_wait(connection): self._used_connections[id(connection)] = connection else: self._connections.append(connection) break if self._wait_connections and self._connections_count < self._max_connections: wait_future, create_time = self._wait_connections.popleft() wait_time = time.time() - create_time if wait_time >= self._wait_connection_timeout: self._loop.call_soon(wait_future.set_exception, WaitConnectionTimeoutError( "Wait connection timeout, used time %.2fs." % wait_time)) else: self._loop.call_soon(self.init_connection, wait_future) def init_connection(self, future = None): self._loop = platform.current_ioloop() future = future or platform.Future() connection = self._connection_cls(self, *self._args, **self._kwargs) connection.set_close_callback(self.connection_close_callback) connection_future = connection.connect() self._connections_count += 1 self._used_connections[id(connection)] = connection connection_future.add_done_callback(lambda connection_future: self.connection_connected_callback(future, connection_future)) if self._idle_seconds > 0 and not self._check_idle_callback: self._loop.call_later(min(self._idle_seconds, 60), self.check_idle_connections) self._check_idle_callback = True return future def get_connection(self): if self._closed: raise ConnectionPoolClosedError("Connection pool closed.") while self._connections: connection = self._connections.pop() self._used_connections[id(connection)] = connection connection.used_time = time.time() if connection.open: future = platform.Future() future.set_result(connection) return future if self._connections_count < self._max_connections: future = self.init_connection() else: future = platform.Future() self._wait_connections.append((future, time.time())) return future Connection = get_connection connect = get_connection def release_connection(self, connection): if self._closed: return connection.do_close() if not connection.open: future = platform.Future() future.set_result(None) return future if self.continue_next_wait(connection): while self._wait_connections and self._connections: connection = self._connections.pop() if connection.open: if self.continue_next_wait(connection): self._used_connections[id(connection)] = connection else: self._connections.append(connection) break else: try: del self._used_connections[id(connection)] self._connections.append(connection) connection.idle_time = time.time() except KeyError: if connection not in self._connections: self._loop.call_soon(connection.do_close) raise ConnectionNotFoundError("Connection not found.") else: raise ConnectionNotUsedError("Connection is not used, you maybe close wrong connection.") future = platform.Future() future.set_result(None) return future def continue_next_wait(self, connection): now = time.time() if self._wait_connections: wait_future, create_time = self._wait_connections.popleft() wait_time = now - create_time if wait_time >= self._wait_connection_timeout: self._wait_connection_timeout_futures.append((wait_future, wait_time)) self._loop.call_soon(self.do_wait_future_exception_timeout) return False else: connection.used_time = now self._loop.call_soon(wait_future.set_result, connection) return True def do_wait_future_exception_timeout(self): while self._wait_connection_timeout_futures: wait_future, wait_time = self._wait_connection_timeout_futures.popleft() self._loop.call_soon(wait_future.set_exception, WaitConnectionTimeoutError("Wait connection timeout, used time %.2fs." % wait_time)) def close_connection(self, connection): try: self._connections.remove(connection) self._used_connections[id(connection)] = connection return connection.do_close() except ValueError: raise ConnectionUsedError("Connection is used, you can not close it.") def connection_close_callback(self, connection): try: del self._used_connections[id(connection)] self._connections_count -= 1 except KeyError: try: self._connections.remove(connection) self._connections_count -= 1 except ValueError: log.get_log().warning("Close unknown Connection %s.", connection) if self._close_future and not self._used_connections and not self._connections: self._loop.call_soon(self._close_future.set_result, None) self._close_future = None def close(self, timeout = None): self._loop = platform.current_ioloop() if self._closed: raise ConnectionPoolClosedError("Connection pool closed.") self._closed = True self._close_future = close_future = platform.Future() if self._used_connections: if timeout: def on_timeout(): if self._closed and self._close_future and not self._close_future.done(): close_future, self._close_future = self._close_future, None close_future.set_exception( ConnectionPoolUsedError("Connection pool is used, you must wait all query is finish.")) self._loop.call_later(timeout, on_timeout) while len(self._wait_connections): future, create_time = self._wait_connections.popleft() wait_time = time.time() - create_time if wait_time >= self._wait_connection_timeout: self._loop.call_soon(future.set_exception, WaitConnectionTimeoutError( "Wait connection timeout, used time %.2fs." % wait_time)) else: self._loop.call_soon(future.set_exception, ConnectionPoolClosedError("Connection pool closed.")) while len(self._connections): connection = self._connections.popleft() self._used_connections[id(connection)] = connection connection.do_close() if not self._connections_count: close_future.set_result(None) self._close_future = None return close_future def check_idle_connections(self): self._loop = platform.current_ioloop() now = time.time() while self._wait_connections: wait_future, create_time = self._wait_connections[0] wait_time = now - create_time if wait_time < self._wait_connection_timeout: break self._wait_connections.popleft() self._loop.call_soon(wait_future.set_exception, WaitConnectionTimeoutError( "Wait connection timeout, used time %.2fs." % wait_time)) for connection in self._used_connections.values(): if now - connection.used_time > (self._wait_connection_timeout * 4) ** 2: connection.do_close() if self._debug_connection_used: log.get_log().error("Connection used timeout close, used time %.2fs %s %s.\n%s", now - connection.used_time, connection, self, connection.get_last_query_sql()) else: log.get_log().error("Connection used timeout close, used time %.2fs %s %s.", now - connection.used_time, connection, self) elif now - connection.used_time > self._wait_connection_timeout ** 2 * 2: if self._debug_connection_used: log.get_log().warning("Connection maybe not release, used time %.2fs %s %s.\n%s", now - connection.used_time, connection, self, connection.get_last_query_sql()) else: log.get_log().warning("Connection maybe not release, used time %.2fs %s %s.", now - connection.used_time, connection, self) elif self._debug_connection_used: log.get_log().warning("Connection used time %.2fs %s %s.\n%s", now - connection.used_time, connection, self, connection.get_last_query_sql()) next_check_time = now + self._idle_seconds for connection in tuple(self._connections): if now - connection.idle_time > self._idle_seconds: self.close_connection(connection) elif connection.idle_time + self._idle_seconds < next_check_time: next_check_time = connection.idle_time + self._idle_seconds if not self._closed and (self._connections or self._used_connections): self._loop.call_later(min(next_check_time - now, 60), self.check_idle_connections) else: self._check_idle_callback = False def __str__(self): return "%s <%s,%s>" % ( super(ConnectionPool, self).__str__(), len(self._connections), len(self._used_connections))
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/pool.py
pool.py
from . import platform from .util import async_call_method from .connections import Connection from .cursor import Cursor from .util import py3 class Client(object): def __init__(self, *args, **kwargs): self._args = args self._kwargs = kwargs self._connection = None self._closed = False self._close_callback = None if "cursorclass" in kwargs and issubclass(kwargs["cursorclass"], Cursor): kwargs["cursorclass"] = kwargs["cursorclass"].__delegate_class__ def connect(self): future = platform.Future() def on_connected(connection_future): if (hasattr(connection_future, "_exc_info") and connection_future._exc_info is not None) \ or (hasattr(connection_future, "_exception") and connection_future._exception is not None): future.set_exception(connection_future.exception()) else: future.set_result(self) self._connection = Connection(defer_connect = True, *self._args, **self._kwargs) self._connection.set_close_callback(self.connection_close_callback) connection_future = async_call_method(self._connection.connect) connection_future.add_done_callback(on_connected) return future def connection_close_callback(self): self._closed = True if self._close_callback and callable(self._close_callback): close_callback, self._close_callback = self._close_callback, None close_callback(self) def set_close_callback(self, callback): self._close_callback = callback def close(self): if self._closed: return if not self._connection: return return async_call_method(self._connection.close) def autocommit(self, value): return async_call_method(self._connection.autocommit, value) def begin(self): return async_call_method(self._connection.begin) def commit(self): return async_call_method(self._connection.commit) def rollback(self): return async_call_method(self._connection.rollback) def show_warnings(self): return async_call_method(self._connection.show_warnings) def select_db(self, db): return async_call_method(self._connection.select_db, db) def cursor(self, cursor_cls=None): if cursor_cls is None: cursor_cls = self._connection.cursorclass cursor = self._connection.cursor( cursor_cls.__delegate_class__ if cursor_cls and issubclass(cursor_cls, Cursor) else cursor_cls ) if issubclass(cursor_cls, Cursor): return cursor_cls(cursor) else: return cursor.__tormysql_class__(cursor) def query(self, sql, unbuffered=False): return async_call_method(self._connection.query, sql, unbuffered) def next_result(self): return async_call_method(self._connection.next_result) def kill(self, thread_id): return async_call_method(self._connection.kill, thread_id) def ping(self, reconnect=True): return async_call_method(self._connection.ping, reconnect) def set_charset(self, charset): return async_call_method(self._connection.set_charset, charset) def __getattr__(self, name): return getattr(self._connection, name) def __del__(self): try: self.close() except: pass def __enter__(self): return self.cursor() def __exit__(self, exc_type, exc_val, exc_tb): pass if py3: exec(""" async def __aenter__(self): return self.cursor() async def __aexit__(self, exc_type, exc_val, exc_tb): if exc_type: await self.rollback() else: await self.commit() """) def __str__(self): return str(self._connection)
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/client.py
client.py
import sys from . import platform try: from tornado.util import raise_exc_info except ImportError: def raise_exc_info(exc_info): try: raise exc_info[1].with_traceback(exc_info[2]) finally: exc_info = None from .pool import ConnectionPool as BaseConnectionPool from . import log try: from tornado.gen import Return except ImportError: pass from .util import py3 class TransactionClosedError(Exception): pass class Transaction(object): def __init__(self, pool, connection): self._pool = pool self._connection = connection def _ensure_conn(self): if self._connection is None: raise TransactionClosedError("Transaction is closed already.") if py3: exec(""" async def execute(self, query, params=None, cursor_cls=None): self._ensure_conn() async with self._connection.cursor(cursor_cls) as cursor: await cursor.execute(query, params) return cursor async def executemany(self, query, params=None, cursor_cls=None): self._ensure_conn() async with self._connection.cursor(cursor_cls) as cursor: await cursor.executemany(query, params) return cursor async def commit(self): self._ensure_conn() try: await self._connection.commit() except: exc_info = sys.exc_info() self._connection.close(True) raise_exc_info(exc_info) else: self._connection.close() finally: self._connection = None async def rollback(self): self._ensure_conn() try: await self._connection.rollback() except: exc_info = sys.exc_info() self._connection.close(True) raise_exc_info(exc_info) else: self._connection.close() finally: self._connection = None async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): if exc_type: await self.rollback() else: await self.commit() """) else: @platform.coroutine def execute(self, query, params=None, cursor_cls=None): self._ensure_conn() cursor = self._connection.cursor(cursor_cls) try: yield cursor.execute(query, params) finally: yield cursor.close() raise Return(cursor) @platform.coroutine def executemany(self, query, params=None, cursor_cls=None): self._ensure_conn() cursor = self._connection.cursor(cursor_cls) try: yield cursor.executemany(query, params) finally: yield cursor.close() raise Return(cursor) @platform.coroutine def commit(self): self._ensure_conn() try: yield self._connection.commit() except: exc_info = sys.exc_info() self._connection.close(True) raise_exc_info(exc_info) else: self._connection.close() finally: self._connection = None @platform.coroutine def rollback(self): self._ensure_conn() try: yield self._connection.rollback() except: exc_info = sys.exc_info() self._connection.close(True) raise_exc_info(exc_info) else: self._connection.close() finally: self._connection = None def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if self._connection: log.get_log().warning("Transaction has not committed or rollbacked %s.", self._connection) def __del__(self): if self._connection: log.get_log().warning("Transaction has not committed or rollbacked %s.", self._connection) self._connection.do_close() self._connection = None class ConnectionPool(BaseConnectionPool): def __init__(self, *args, **kwargs): super(ConnectionPool, self).__init__(*args, **kwargs) if py3: exec(""" async def execute(self, query, params=None, cursor_cls=None): async with await self.Connection() as connection: async with connection.cursor(cursor_cls) as cursor: await cursor.execute(query, params) return cursor async def executemany(self, query, params=None, cursor_cls=None): async with await self.Connection() as connection: async with connection.cursor(cursor_cls) as cursor: await cursor.executemany(query, params) return cursor async def begin(self): connection = await self.Connection() try: await connection.begin() except: exc_info = sys.exc_info() connection.close() raise_exc_info(exc_info) transaction = Transaction(self, connection) return transaction """) else: @platform.coroutine def execute(self, query, params=None, cursor_cls=None): with (yield self.Connection()) as connection: cursor = connection.cursor(cursor_cls) try: yield cursor.execute(query, params) if not connection._connection.autocommit_mode: yield connection.commit() except: exc_info = sys.exc_info() if not connection._connection.autocommit_mode: yield connection.rollback() raise_exc_info(exc_info) finally: yield cursor.close() raise Return(cursor) @platform.coroutine def executemany(self, query, params=None, cursor_cls=None): with (yield self.Connection()) as connection: cursor = connection.cursor(cursor_cls) try: yield cursor.executemany(query, params) if not connection._connection.autocommit_mode: yield connection.commit() except: exc_info = sys.exc_info() if not connection._connection.autocommit_mode: yield connection.rollback() raise_exc_info(exc_info) finally: yield cursor.close() raise Return(cursor) @platform.coroutine def begin(self): connection = yield self.Connection() try: yield connection.begin() except: exc_info = sys.exc_info() connection.close() raise_exc_info(exc_info) transaction = Transaction(self, connection) raise Return(transaction)
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/helpers.py
helpers.py
from __future__ import absolute_import, division, print_function, with_statement import greenlet import sys import struct import traceback from pymysql import err from pymysql.constants import CR from pymysql.connections import Connection as _Connection from . import platform if sys.version_info[0] >= 3: import io StringIO = io.BytesIO else: import cStringIO StringIO = cStringIO.StringIO class SSLCtx(object): _ctx = None _connection = None def __init__(self, connection, ctx): self._ctx = ctx self._connection = connection def __getattr__(self, item): return getattr(self._ctx, item) def __setattr__(self, key, value): if not self or not self._ctx or not self._connection: return super(SSLCtx, self).__setattr__(key, value) return setattr(self._ctx, key, value) def __getitem__(self, item): return self._ctx[item] def wrap_socket(self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, server_hostname=None, session=None): child_gr = greenlet.getcurrent() main = child_gr.parent assert main is not None, "Execut must be running in child greenlet" def finish(future): if (hasattr(future, "_exc_info") and future._exc_info is not None) \ or (hasattr(future, "_exception") and future._exception is not None): child_gr.throw(future.exception()) else: child_gr.switch(future.result()) future = sock.start_tls(False, self._ctx, server_hostname=server_hostname, connect_timeout=self._connection.connect_timeout) future.add_done_callback(finish) return main.switch() class Connection(_Connection): def __init__(self, *args, **kwargs): super(Connection, self).__init__(*args, **kwargs) self._close_callback = None self._rbuffer = StringIO(b'') self._rbuffer_size = 0 self._loop = None def set_close_callback(self, callback): self._close_callback = callback def stream_close_callback(self): if self._close_callback and callable(self._close_callback): close_callback, self._close_callback = self._close_callback, None close_callback() if self._sock: self._sock.set_close_callback(None) self._sock = None self._rfile = None self.ctx = None @property def open(self): return self._sock and not self._sock.closed() def _force_close(self): if self._sock: try: sock = self._sock self._sock = None self._rfile = None sock.close() except: pass self.ctx = None __del__ = _force_close def _create_ssl_ctx(self, sslp): ctx = super(Connection, self)._create_ssl_ctx(sslp) return SSLCtx(self, ctx) def connect(self): self._closed = False self._loop = platform.current_ioloop() try: if self.unix_socket: self.host_info = "Localhost via UNIX socket" address = self.unix_socket self._secure = True else: self.host_info = "socket %s:%d" % (self.host, self.port) address = (self.host, self.port) sock = platform.IOStream(address, self.bind_address) sock.set_close_callback(self.stream_close_callback) child_gr = greenlet.getcurrent() main = child_gr.parent assert main is not None, "Execut must be running in child greenlet" def connected(future): if (hasattr(future, "_exc_info") and future._exc_info is not None) \ or (hasattr(future, "_exception") and future._exception is not None): child_gr.throw(future.exception()) else: self._sock = sock child_gr.switch() future = sock.connect(address, self.connect_timeout) future.add_done_callback(connected) main.switch() self._rfile = self._sock self._next_seq_id = 0 self._get_server_information() self._request_authentication() if self.sql_mode is not None: c = self.cursor() c.execute("SET sql_mode=%s", (self.sql_mode,)) if self.init_command is not None: c = self.cursor() c.execute(self.init_command) self.commit() if self.autocommit_mode is not None: self.autocommit(self.autocommit_mode) except Exception as e: if self._sock: self._rfile = None self._sock.close() self._sock = None exc = err.OperationalError( 2003, "Can't connect to MySQL server on %s (%r)" % ( self.unix_socket or ("%s:%s" % (self.host, self.port)), e)) # Keep original exception and traceback to investigate error. exc.original_exception = e exc.traceback = traceback.format_exc() raise exc def _read_bytes(self, num_bytes): if num_bytes <= self._rbuffer_size: self._rbuffer_size -= num_bytes return self._rbuffer.read(num_bytes) if self._rbuffer_size > 0: self._sock._read_buffer = self._rbuffer.read() + self._sock._read_buffer self._sock._read_buffer_size += self._rbuffer_size self._rbuffer_size = 0 if num_bytes <= self._sock._read_buffer_size: data, data_len = self._sock._read_buffer, self._sock._read_buffer_size self._sock._read_buffer = bytearray() self._sock._read_buffer_size = 0 if data_len == num_bytes: return bytes(data) self._rbuffer_size = data_len - num_bytes self._rbuffer = StringIO(data) return self._rbuffer.read(num_bytes) child_gr = greenlet.getcurrent() main = child_gr.parent assert main is not None, "Execut must be running in child greenlet" def read_callback(future): try: data = future.result() if len(data) == num_bytes: return child_gr.switch(bytes(data)) self._rbuffer_size = len(data) - num_bytes self._rbuffer = StringIO(data) return child_gr.switch(self._rbuffer.read(num_bytes)) except Exception as e: self._force_close() return child_gr.throw(err.OperationalError( CR.CR_SERVER_LOST, "Lost connection to MySQL server during query (%s)" % (e,))) try: future = self._sock.read_bytes(num_bytes) future.add_done_callback(read_callback) except (AttributeError, IOError) as e: self._force_close() raise err.OperationalError( CR.CR_SERVER_LOST, "Lost connection to MySQL server during query (%s)" % (e,)) return main.switch() def _write_bytes(self, data): try: self._sock.write(data) except (AttributeError, IOError) as e: self._force_close() raise err.OperationalError( CR.CR_SERVER_GONE_ERROR, "MySQL server has gone away (%r)" % (e,)) def _request_authentication(self): super(Connection, self)._request_authentication() self._rfile = self._sock def __str__(self): return "%s %s" % (super(Connection, self).__str__(), {"host": self.host or self.unix_socket, "user": self.user, "database": self.db, "port": self.port})
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/connections.py
connections.py
from . import platform from pymysql.cursors import ( Cursor as OriginCursor, DictCursor as OriginDictCursor, SSCursor as OriginSSCursor, SSDictCursor as OriginSSDictCursor) from .util import async_call_method, py3 class CursorNotReadAllDataError(Exception): pass class CursorNotIterError(Exception): pass class Cursor(object): __delegate_class__ = OriginCursor def __init__(self, cursor): self._cursor = cursor def __del__(self): if self._cursor: try: self.close() except: pass def close(self): if self._cursor.connection is None or not self._cursor._result or not self._cursor._result.has_next: self._cursor.close() future = platform.Future() future.set_result(None) else: future = async_call_method(self._cursor.close) return future def nextset(self): return async_call_method(self._cursor.nextset) def mogrify(self, query, args=None): return self._cursor.mogrify(query, args) def execute(self, query, args=None): return async_call_method(self._cursor.execute, query, args) def executemany(self, query, args): return async_call_method(self._cursor.executemany, query, args) def callproc(self, procname, args=()): return async_call_method(self._cursor.callproc, procname, args) def fetchone(self): return self._cursor.fetchone() def fetchmany(self, size=None): return self._cursor.fetchmany(size) def fetchall(self): return self._cursor.fetchall() def scroll(self, value, mode='relative'): return self._cursor.scroll(value, mode) def __iter__(self): return self._cursor.__iter__() def __getattr__(self, name): return getattr(self._cursor, name) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): "WARING: if cursor not read all data, the connection next query is error" if self._cursor._result and self._cursor._result.has_next: raise CursorNotReadAllDataError("If cursor not read all data, the connection next query is error.") self.close() if py3: exec(""" def __aiter__(self): return self._cursor.__iter__() async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close() """) setattr(OriginCursor, "__tormysql_class__", Cursor) class DictCursor(Cursor): __delegate_class__ = OriginDictCursor setattr(OriginDictCursor, "__tormysql_class__", DictCursor) class SSCursor(Cursor): __delegate_class__ = OriginSSCursor def close(self): if self._cursor.connection is None: future = platform.Future() future.set_result(None) else: future = async_call_method(self._cursor.close) return future def read_next(self): return async_call_method(self._cursor.read_next) def fetchone(self): return async_call_method(self._cursor.fetchone) def fetchmany(self, size=None): return async_call_method(self._cursor.fetchmany, size) def fetchall(self): return async_call_method(self._cursor.fetchall) def scroll(self, value, mode='relative'): return async_call_method(self._cursor.scroll, value, mode) def __iter__(self): def next(): future = async_call_method(self._cursor.fetchone) if future.done() and future._result is None: return None return future return iter(next, None) def __enter__(self): raise AttributeError("SSCursor not support with statement") def __exit__(self, exc_type, exc_val, exc_tb): raise AttributeError("SSCursor not support with statement") if py3: exec(""" def __aiter__(self): return self async def __anext__(self): result = await async_call_method(self._cursor.fetchone) if result is None: raise StopAsyncIteration() return result async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close() """) setattr(OriginSSCursor, "__tormysql_class__", SSCursor) class SSDictCursor(SSCursor): __delegate_class__ = OriginSSDictCursor setattr(OriginSSDictCursor, "__tormysql_class__", SSDictCursor)
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/cursor.py
cursor.py
from __future__ import absolute_import, division, print_function import sys import socket import errno from tornado.iostream import IOStream as BaseIOStream, SSLIOStream as BaseSSLIOStream, StreamClosedError, _ERRNO_WOULDBLOCK, ssl, ssl_wrap_socket, _client_ssl_defaults from tornado.concurrent import Future from tornado.gen import coroutine from tornado.ioloop import IOLoop if sys.version_info[0] >= 3: import io StringIO = io.BytesIO else: import cStringIO StringIO = cStringIO.StringIO def current_ioloop(): return IOLoop.current() class IOStream(BaseIOStream): _read_callback = None _write_callback = None _connect_callback = None _pending_callbacks = None def __init__(self, address, bind_address, socket = None, *args, **kwargs): if socket is None: socket = self.init_socket(address, bind_address) super(IOStream, self).__init__(socket, *args, **kwargs) self._write_buffer = bytearray() self._write_buffer_pos = 0 self._write_buffer_size = 0 def init_socket(self, address, bind_address): if not isinstance(address, tuple): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) else: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if bind_address is not None: sock.bind((bind_address, 0)) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) return sock def connect(self, address, connect_timeout = 0, server_hostname = None): future = Future() if connect_timeout: def timeout(): self._loop_connect_timeout = None if not self._connecting: self.close((None, IOError("Connect timeout"), None)) self._loop_connect_timeout = self.io_loop.call_later(connect_timeout, timeout) def connected(connect_future): if self._loop_connect_timeout: self.io_loop.remove_timeout(self._loop_connect_timeout) self._loop_connect_timeout = None if (hasattr(connect_future, "_exc_info") and connect_future._exc_info is not None) \ or (hasattr(connect_future, "_exception") and connect_future._exception is not None): future.set_exception(connect_future.exception()) else: future.set_result(connect_future.result()) connect_future = super(IOStream, self).connect(address, server_hostname = server_hostname) connect_future.add_done_callback(connected) return future def _handle_events(self, fd, events): if self._closed: return try: if self._connecting: self._handle_connect() if self._closed: return if events & self.io_loop.READ: self._handle_read() if self._closed: return if events & self.io_loop.WRITE: self._handle_write() if self._closed: return if events & self.io_loop.ERROR: self.error = self.get_fd_error() self.io_loop.add_callback(self.close) return except Exception: self.close(exc_info=True) raise def _handle_connect(self): super(IOStream, self)._handle_connect() if not self.closed(): self._state = self.io_loop.ERROR | self.io_loop.READ if self._write_buffer: self._state = self._state | self.io_loop.WRITE self.io_loop.update_handler(self.fileno(), self._state) def _handle_read(self): chunk = True while True: try: chunk = self.socket.recv(self.read_chunk_size) if not chunk: break if self._read_buffer_size: self._read_buffer += chunk else: self._read_buffer = bytearray(chunk) self._read_buffer_size += len(chunk) except (socket.error, IOError, OSError) as e: en = e.errno if hasattr(e, 'errno') else e.args[0] if en in _ERRNO_WOULDBLOCK: break if en == errno.EINTR: continue self.close(exc_info=True) return if self._read_future is not None and self._read_buffer_size >= self._read_bytes: future, self._read_future = self._read_future, None self._read_buffer, data = bytearray(), self._read_buffer self._read_buffer_size = 0 self._read_bytes = 0 future.set_result(data) if not chunk: self.close() return def read(self, num_bytes): assert self._read_future is None, "Already reading" if self._closed: raise StreamClosedError(real_error=self.error) future = self._read_future = Future() self._read_bytes = num_bytes self._read_partial = False if self._read_buffer_size >= self._read_bytes: future, self._read_future = self._read_future, None self._read_buffer, data = bytearray(), self._read_buffer self._read_buffer_size = 0 self._read_bytes = 0 future.set_result(data) return future read_bytes = read def _handle_write(self): try: num_bytes = self.socket.send(memoryview(self._write_buffer)[self._write_buffer_pos: self._write_buffer_pos + self._write_buffer_size]) self._write_buffer_pos += num_bytes self._write_buffer_size -= num_bytes except (socket.error, IOError, OSError) as e: en = e.errno if hasattr(e, 'errno') else e.args[0] if en not in _ERRNO_WOULDBLOCK: self.close(exc_info=True) return if not self._write_buffer_size: if self._write_buffer_pos > 0: self._write_buffer = bytearray() self._write_buffer_pos = 0 if self._state & self.io_loop.WRITE: self._state = self._state & ~self.io_loop.WRITE self.io_loop.update_handler(self.fileno(), self._state) def write(self, data): assert isinstance(data, (bytes, bytearray)) if self._closed: raise StreamClosedError(real_error=self.error) if data: if self._write_buffer_size: self._write_buffer += data else: self._write_buffer = bytearray(data) self._write_buffer_size += len(data) if not self._connecting: self._handle_write() if self._write_buffer_size: if not self._state & self.io_loop.WRITE: self._state = self._state | self.io_loop.WRITE self.io_loop.update_handler(self.fileno(), self._state) def start_tls(self, server_side, ssl_options=None, server_hostname=None, connect_timeout = None): if (self._read_callback or self._read_future or self._write_callback or self._write_futures or self._connect_callback or self._connect_future or self._pending_callbacks or self._closed or self._read_buffer or self._write_buffer): raise ValueError("IOStream is not idle; cannot convert to SSL") if ssl_options is None: ssl_options = _client_ssl_defaults socket = self.socket self.io_loop.remove_handler(socket) self.socket = None socket = ssl_wrap_socket(socket, ssl_options, server_hostname=server_hostname, server_side=server_side, do_handshake_on_connect=False) orig_close_callback = self._close_callback self._close_callback = None future = Future() ssl_stream = SSLIOStream(socket, ssl_options=ssl_options) # Wrap the original close callback so we can fail our Future as well. # If we had an "unwrap" counterpart to this method we would need # to restore the original callback after our Future resolves # so that repeated wrap/unwrap calls don't build up layers. def close_callback(): if not future.done(): # Note that unlike most Futures returned by IOStream, # this one passes the underlying error through directly # instead of wrapping everything in a StreamClosedError # with a real_error attribute. This is because once the # connection is established it's more helpful to raise # the SSLError directly than to hide it behind a # StreamClosedError (and the client is expecting SSL # issues rather than network issues since this method is # named start_tls). future.set_exception(ssl_stream.error or StreamClosedError()) if orig_close_callback is not None: orig_close_callback() if connect_timeout: def timeout(): ssl_stream._loop_connect_timeout = None if not future.done(): ssl_stream.close((None, IOError("Connect timeout"), None)) ssl_stream._loop_connect_timeout = self.io_loop.call_later(connect_timeout, timeout) ssl_stream.set_close_callback(close_callback) ssl_stream._ssl_connect_callback = lambda: future.set_result(ssl_stream) ssl_stream.max_buffer_size = self.max_buffer_size ssl_stream.read_chunk_size = self.read_chunk_size return future class SSLIOStream(IOStream, BaseSSLIOStream): def __init__(self, socket, *args, **kwargs): self._ssl_options = kwargs.pop('ssl_options', _client_ssl_defaults) IOStream.__init__(self, None, None, socket, *args, **kwargs) self._ssl_accepting = True self._handshake_reading = False self._handshake_writing = False self._ssl_connect_callback = None self._loop_connect_timeout = None self._server_hostname = None # If the socket is already connected, attempt to start the handshake. try: self.socket.getpeername() except socket.error: pass else: # Indirectly start the handshake, which will run on the next # IOLoop iteration and then the real IO state will be set in # _handle_events. self._add_io_state(self.io_loop.WRITE) def _handle_read(self): if self._ssl_accepting: self._do_ssl_handshake() return chunk = True while True: try: chunk = self.socket.recv(self.read_chunk_size) if not chunk: break if self._read_buffer_size: self._read_buffer += chunk else: self._read_buffer = bytearray(chunk) self._read_buffer_size += len(chunk) except ssl.SSLError as e: if e.args[0] == ssl.SSL_ERROR_WANT_READ: break self.close(exc_info=True) return except (socket.error, IOError, OSError) as e: en = e.errno if hasattr(e, 'errno') else e.args[0] if en in _ERRNO_WOULDBLOCK: break if en == errno.EINTR: continue self.close(exc_info=True) return if self._read_future is not None and self._read_buffer_size >= self._read_bytes: future, self._read_future = self._read_future, None self._read_buffer, data = bytearray(), self._read_buffer self._read_buffer_size = 0 self._read_bytes = 0 future.set_result(data) if not chunk: self.close() return def _handle_write(self): if self._ssl_accepting: self._do_ssl_handshake() return try: num_bytes = self.socket.send(memoryview(self._write_buffer)[ self._write_buffer_pos: self._write_buffer_pos + self._write_buffer_size]) self._write_buffer_pos += num_bytes self._write_buffer_size -= num_bytes except ssl.SSLError as e: if e.args[0] != ssl.SSL_ERROR_WANT_WRITE: self.close(exc_info=True) return except (socket.error, IOError, OSError) as e: en = e.errno if hasattr(e, 'errno') else e.args[0] if en not in _ERRNO_WOULDBLOCK: self.close(exc_info=True) return if not self._write_buffer_size: if self._write_buffer_pos > 0: self._write_buffer = bytearray() self._write_buffer_pos = 0 if self._state & self.io_loop.WRITE: self._state = self._state & ~self.io_loop.WRITE self.io_loop.update_handler(self.fileno(), self._state) def _run_ssl_connect_callback(self): if self._state & self.io_loop.WRITE: self._state = self._state & ~self.io_loop.WRITE self.io_loop.update_handler(self.fileno(), self._state) if hasattr(BaseSSLIOStream, "_finish_ssl_connect"): BaseSSLIOStream._finish_ssl_connect(self) else: BaseSSLIOStream._run_ssl_connect_callback(self) def makefile(self, mode): return self
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/platform/tornado.py
tornado.py
from __future__ import absolute_import, division, print_function from asyncio import coroutine, Future, events, Protocol, ensure_future def current_ioloop(): return events.get_event_loop() try: from tornado.iostream import StreamClosedError except ImportError: class StreamClosedError(IOError): def __init__(self, real_error=None): super(StreamClosedError, self).__init__('Stream is closed') self.real_error = real_error class IOStream(Protocol): def __init__(self, address, bind_address): self._loop = None self._address = address self._bind_address = bind_address self._sock = None self._transport = None self._close_callback = None self._connect_future = None self._connect_ssl_future = None self._read_future = None self._read_bytes = 0 self._closed = False self._read_buffer_size = 0 self._read_buffer = bytearray() def closed(self): return self._closed def set_close_callback(self, callback): self._close_callback = callback def on_closed(self, exc_info = False): if self._connect_future: if exc_info: self._connect_future.set_exception(exc_info[1] if isinstance(exc_info, tuple) else exc_info) else: self._connect_future.set_exception(StreamClosedError(None)) self._connect_future = None if self._connect_ssl_future: if exc_info: self._connect_ssl_future.set_exception(exc_info[1] if isinstance(exc_info, tuple) else exc_info) else: self._connect_ssl_future.set_exception(StreamClosedError(None)) self._connect_ssl_future = None if self._read_future: if exc_info: self._read_future.set_exception(exc_info[1] if isinstance(exc_info, tuple) else exc_info) else: self._read_future.set_exception(StreamClosedError(None)) self._read_future = None if self._close_callback: close_callback, self._close_callback = self._close_callback, None self._loop.call_soon(close_callback) self._closed = True def close(self, exc_info=False): if self._closed: return if self._transport: self._transport.close() else: self.on_closed(exc_info) @coroutine def _connect(self, address, server_hostname=None): if isinstance(address, (str, bytes)): self._transport, _ = yield from self._loop.create_unix_connection(lambda : self, address, sock=self._sock, server_hostname=server_hostname) else: self._transport, _ = yield from self._loop.create_connection(lambda : self, address[0], address[1], sock=self._sock, server_hostname=server_hostname, local_addr=self._bind_address) def connect(self, address, connect_timeout = 0, server_hostname = None): assert self._connect_future is None, 'Already connecting' self._loop = current_ioloop() future = self._connect_future = Future(loop=self._loop) if connect_timeout: def on_timeout(): self._loop_connect_timeout = None if self._connect_future: self.close((None, IOError("Connect timeout"), None)) self._loop_connect_timeout = self._loop.call_later(connect_timeout, on_timeout) def connected(connect_future): if self._loop_connect_timeout: self._loop_connect_timeout.cancel() self._loop_connect_timeout = None if connect_future._exception is not None: self.on_closed(connect_future.exception()) self._connect_future = None else: self._connect_future = None future.set_result(connect_future.result()) connect_future = ensure_future(self._connect(address, server_hostname)) connect_future.add_done_callback(connected) return self._connect_future def connection_made(self, transport): self._transport = transport if self._connect_future is None and self._connect_ssl_future is None: transport.close() else: self._transport.set_write_buffer_limits(1024 * 1024 * 1024) def data_received(self, data): if self._read_buffer_size: self._read_buffer += data else: self._read_buffer = bytearray(data) self._read_buffer_size += len(data) if self._read_future and self._read_buffer_size >= self._read_bytes: future, self._read_future = self._read_future, None self._read_buffer, data = bytearray(), self._read_buffer self._read_buffer_size = 0 self._read_bytes = 0 future.set_result(data) def connection_lost(self, exc): self.on_closed(exc) self._transport = None def eof_received(self): return False def read_bytes(self, num_bytes): assert self._read_future is None, "Already reading" if self._closed: raise StreamClosedError(IOError('Already Closed')) future = self._read_future = Future() self._read_bytes = num_bytes if self._read_buffer_size >= self._read_bytes: future, self._read_future = self._read_future, None self._read_buffer, data = bytearray(), self._read_buffer self._read_buffer_size = 0 self._read_bytes = 0 future.set_result(data) return future def write(self, data): if self._closed: raise StreamClosedError(IOError('Already Closed')) self._transport.write(data) def start_tls(self, server_side, ssl_options=None, server_hostname=None, connect_timeout=None): if not self._transport or self._read_future: raise ValueError("IOStream is not idle; cannot convert to SSL") self._connect_ssl_future = connect_ssl_future = Future(loop=self._loop) waiter = Future(loop=self._loop) def on_connected(future): if self._loop_connect_timeout: self._loop_connect_timeout.cancel() self._loop_connect_timeout = None if connect_ssl_future._exception is not None: self.on_closed(future.exception()) self._connect_ssl_future = None else: self._connect_ssl_future = None connect_ssl_future.set_result(self) waiter.add_done_callback(on_connected) if connect_timeout: def on_timeout(): self._loop_connect_timeout = None if not waiter.done(): self.close((None, IOError("Connect timeout"), None)) self._loop_connect_timeout = self._loop.call_later(connect_timeout, on_timeout) self._transport.pause_reading() sock, self._transport._sock = self._transport._sock, None self._transport = self._loop._make_ssl_transport( sock, self, ssl_options, waiter, server_side=False, server_hostname=server_hostname) return connect_ssl_future def makefile(self, mode): return self
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/platform/asyncio.py
asyncio.py
from __future__ import absolute_import, division, print_function try: import asyncio from .asyncio import StreamClosedError except ImportError: asyncio = None from tornado.iostream import StreamClosedError class IOLoop(object): _instance = None def __init__(self): self.ioloop = None self.call_soon = None self.call_at = None self.call_later = None self.cancel_timeout = None def __getattr__(self, name): return getattr(self.ioloop, name) IOLoop._instance = IOLoop() Future, coroutine, IOStream = None, None, None current_ioloop = None is_reset = False def use_tornado(reset = True): global Future, coroutine, IOStream, current_ioloop, is_reset if not reset and is_reset: return is_reset = reset from .tornado import Future, coroutine, IOStream def tornado_current_ioloop(): global current_ioloop if IOLoop._instance.ioloop is None: from .tornado import current_ioloop as _current_ioloop IOLoop._instance.ioloop = _current_ioloop() IOLoop._instance.call_soon = IOLoop._instance.ioloop.add_callback IOLoop._instance.call_at = IOLoop._instance.ioloop.call_at IOLoop._instance.call_later = IOLoop._instance.ioloop.call_later IOLoop._instance.cancel_timeout = IOLoop._instance.ioloop.remove_timeout current_ioloop = lambda : IOLoop._instance return IOLoop._instance current_ioloop = tornado_current_ioloop return current_ioloop def use_asyncio(reset = True): global Future, coroutine, IOStream, current_ioloop, is_reset if not reset and is_reset: return is_reset = reset from .asyncio import Future, coroutine, IOStream def asyncio_current_ioloop(): global current_ioloop if IOLoop._instance.ioloop is None: try: from tornado.ioloop import IOLoop as TornadoIOLoop from tornado.platform.asyncio import AsyncIOMainLoop tornado_ioloop = TornadoIOLoop.current(False) if isinstance(tornado_ioloop, TornadoIOLoop) and not isinstance(tornado_ioloop, AsyncIOMainLoop): return use_tornado(False)() except: pass from .asyncio import current_ioloop as _current_ioloop IOLoop._instance.ioloop = _current_ioloop() IOLoop._instance.call_soon = IOLoop._instance.ioloop.call_soon IOLoop._instance.call_at = IOLoop._instance.ioloop.call_at IOLoop._instance.call_later = IOLoop._instance.ioloop.call_later def cancel_timeout(timeout): timeout.cancel() IOLoop._instance.cancel_timeout = cancel_timeout current_ioloop = lambda: IOLoop._instance return IOLoop._instance current_ioloop = asyncio_current_ioloop return current_ioloop if asyncio is None: use_tornado(False) else: use_asyncio(False)
zoom-tormysql
/zoom-tormysql-0.4.2.tar.gz/zoom-tormysql-0.4.2/tormysql/platform/__init__.py
__init__.py
# zoomus [![Build Status](https://img.shields.io/travis/prschmid/zoomus)](https://travis-ci.org/prschmid/zoomus) [![PyPI Downloads](https://img.shields.io/pypi/dm/zoomus)](https://pypi.org/project/zoomus/) [![Python Versions](https://img.shields.io/pypi/pyversions/zoomus)](https://pypi.org/project/zoomus/) [![PyPI Version](https://img.shields.io/pypi/v/zoomus)](https://pypi.org/project/zoomus/) [![PyPI License](https://img.shields.io/pypi/l/zoomus)](https://pypi.org/project/zoomus/) [![Code Style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black/) [https://github.com/prschmid/zoomus](https://github.com/prschmid/zoomus) Python wrapper around the [Zoom.us](http://zoom.us) REST API v1 and v2. This work is heavily inspired by the Ruby GEM of the same name, [Zoomus](https://github.com/mllocs/zoomus) ## Installation ### The easy way ```sh pip install zoomus ``` ## Compatibility `zoomus` has been tested for Python 3.6, 3.7, and 3.8 using [Travis CI](https://travis-ci.org/prschmid/zoomus) Note, as this library heavily depends on the [requests](https://pypi.org/project/requests/) library, official compatibility is limited to the official compatibility of `requests`. ## Example Usage ### Create the client v2 (default) As Zoom's default is now the V2 API, the client will default to the V2 version of the API. ```python import json from zoomus import ZoomClient client = ZoomClient('API_KEY', 'API_SECRET') user_list_response = client.user.list() user_list = json.loads(user_list_response.content) for user in user_list['users']: user_id = user['id'] print(json.loads(client.meeting.list(user_id=user_id).content)) ``` What one will note is that the returned object from a call using the client is a [requests](https://pypi.org/project/requests/) `Response` object. This is done so that if there is any error working with the API that one has complete control of handling all errors. As such, to actually get the list of users in the example above, one will have to load the JSON from the content of the `Response` object that is returned. ### Create the client v1 Zoom has yet to officially remove support for the V1 API, and so to use the V1 API one can instantiate a client as follows. ```python import json from zoomus import ZoomClient client = ZoomClient('API_KEY', 'API_SECRET', version=1) user_list_response = client.user.list() user_list = json.loads(user_list_response.content) for user in user_list['users']: user_id = user['id'] print(json.loads(client.meeting.list(host_id=user_id).content)) ``` ### Using with a manage context ```python with ZoomClient('API_KEY', 'API_SECRET') as client: user_list_response = client.users.list() ... ``` ## Available methods * client.user.create(...) * client.user.cust_create(...) * client.user.update(...)* * client.user.list(...) * client.user.pending(...) * client.user.get(...) * client.user.get_by_email(...) * client.meeting.get(...) * client.meeting.end(...) * client.meeting.create(...) * client.meeting.delete(...) * client.meeting.list(...) * client.meeting.update(...) * client.meeting.add_registrant(...) * client.meeting.list_registrants(...) * client.report.get_account_report(...) * client.report.get_user_report(...) * client.webinar.create(...) * client.webinar.update(...) * client.webinar.delete(...) * client.webinar.list(...) * client.webinar.get(...) * client.webinar.end(...) * client.webinar.register(...) * client.webinar.add_panelists(...) * client.webinar.list_panelists(...) * client.webinar.remove_panelists(...) * client.phone.call_logs(...) * client.phone.calling_plans(...) * client.phone.numbers_get(...) * client.phone.numbers_list(...) * client.phone.users(...) ## Running the Tests ### Simple First, make sure to install the testing requirements ```sh pip install -r requirements-tests.txt ``` Then run the tests via nose ```sh nosetests ``` ## Contributing Please see the [CONTRIBUTING.md](./CONTRIBUTING.md) for the contribution guidelines for this project.
zoom-us
/zoom-us-1.1.4.tar.gz/zoom-us-1.1.4/README.md
README.md
![zoomaker_social_keyvisual](https://github.com/hfg-gmuend/zoomaker/assets/480224/75d3d492-fe54-4711-afbf-02768bbb4033) Zoomaker - Friendly house keeping for your AI model zoo and related resources. ======== Zoomaker is a command-line tool that helps install AI models, git repositories and run scripts. - **single source of truth**: all resources are neatly defined in the `zoo.yaml` file - **freeze versions**: know exactly which revision of a resources is installed at any time - **only download once**: optimize bandwidth and cache your models locally - **optimize disk usage**: downloaded models are symlinked to the installation folder (small files <5MB are duplicate) ## 😻 TL;DR 1. Install Zoomaker `pip install zoomaker` 2. Define your resources in the `zoo.yaml` file 3. Run `zoomaker install` to install them (on Windows: `zoomaker install --no-symlinks`, see [hints](https://github.com/hfg-gmuend/zoomaker#%EF%B8%8F-limitations-on-windows) below) ## 📦 Installation ```bash pip install zoomaker ``` ## 🦁 zoo.yaml Examples Example of the `zoo.yaml` of a Stable Diffusion project with the [Automatic1111](https://github.com/AUTOMATIC1111/stable-diffusion-webui) image generator: ```yaml name: my-automatic1111-model-zoo version: 1.0 description: Lorem ipsum author: your name resources: image_generator: - name: automatic1111 src: https://github.com/AUTOMATIC1111/stable-diffusion-webui.git type: git revision: 22bcc7be428c94e9408f589966c2040187245d81 install_to: ./ models: - name: v2-1_768-ema-pruned src: stabilityai/stable-diffusion-2-1/v2-1_768-ema-pruned.safetensors type: huggingface install_to: ./stable-diffusion-webui/models/Stable-diffusion/ ``` <details> <summary>`zoo.yaml` example long</summary> ```yaml name: my-automatic1111-model-zoo version: 1.0 description: Lorem ipsum author: your name aliases: image_generator: &image_generator ./ models: &models ./stable-diffusion-webui/models/Stable-diffusion/ controlnet: &controlnet ./stable-diffusion-webui/models/ControlNet/ embeddings: &embeddings ./stable-diffusion-webui/embeddings/ extensions: &extensions ./stable-diffusion-webui/extensions/ resources: image_generator: - name: automatic1111 src: https://github.com/AUTOMATIC1111/stable-diffusion-webui.git type: git revision: 22bcc7be428c94e9408f589966c2040187245d81 install_to: *image_generator models: - name: v1-5-pruned-emaonly src: runwayml/stable-diffusion-v1-5/v1-5-pruned-emaonly.safetensors type: huggingface install_to: *models controlnet: - name: control_sd15_canny src: lllyasviel/ControlNet/models/control_sd15_canny.pth type: huggingface install_to: *controlnet embeddings: - name: midjourney-style src: sd-concepts-library/midjourney-style/learned_embeds.bin type: huggingface install_to: *embeddings rename_to: midjourney-style.bin - name: moebius src: sd-concepts-library/moebius/learned_embeds.bin type: huggingface install_to: *embeddings rename_to: moebius.bin extensions: - name: sd-webui-tunnels src: https://github.com/Bing-su/sd-webui-tunnels.git type: git install_to: *extensions ``` </details> <details> <summary>`zoo.yaml` with script snippets</summary> Here are a few examples of how to run scripts snippets from the `zoo.yaml` file. For example for starting the Automatic1111's webui, you could setup snippets like these and then run them with `zoomaker run start_webui`. All scripts are run from the root of the project, please adjust the paths accordingly. ```yaml scripts: start_webui: | cd .\stable-diffusion-webui && call webui.bat ``` ```yaml scripts: start_webui: | conda activate automatic1111 cd /home/$(whoami)/stable-diffusion-webui/ ./webui.sh --theme dark --xformers --no-half ``` </details> <details> <summary>`zoo.yaml` with web download</summary> ```yaml resources: models: - name: analog-diffusion-v1 src: https://civitai.com/api/download/models/1344 type: download install_to: ./stable-diffusion-webui/models/Stable-diffusion/ rename_to: analog-diffusion-v1.safetensors ``` Please note: The resource `type: download` can be seen as the last resort. Currently there is no caching or symlinking of web downloads. Recommended to avoid it :) </details> ## 🧮 zoo.yaml Structure <details> <summary>Top level:</summary> - `name` (mandatory) - `version`, `description`, `author`, `aliases` (optional) - `resources` (mandatory) : `<group-name>` : `[]` (array of resources) - `scripts` (optional) : `<script-name>` </details> <details> <summary>Resource:</summary> - `name`, `src`, `type`, `install_to` (mandatory) - `rename_to` (optional) - `revision` (optional), if none is defined the latest version from the main branch is downloaded - `type` can either be `git`, `huggingface` or `download` </details> ## 🧞 Zoomaker Commands All commands are run from the root of the project, where also your `zoo.yaml` file is located. | Command | Action | | :--------------------- | :----------------------------------------------- | | `zoomaker install` | Installs resources as defined in `zoo.yaml` | | `zoomaker run <script_name>` | Run CLI scripts as defined in `zoo.yaml` | | `zoomaker --help` | Get help using the Zoomaker CLI | | `zoomaker --version` | Show current Zoomaker version | | `zoomaker --no-symlinks` | Do not use symlinks for installing resources | ## ⚠️ Limitations on Windows Symlinks are not widely supported on Windows, which limits the caching mechanism used by Zoomaker. To work around this limitation, you can disable symlinks by using the `--no-symlinks` flag with the install command: ```bash zoomaker install --no-symlinks ``` This will still use the cache directory for checking if files are already cached, but if not, they will be downloaded and duplicated directly to the installation directory, saving bandwidth but increasing disk usage. Alternatively, you can use the [Windows Subsystem for Linux "WSL"](https://docs.microsoft.com/en-us/windows/wsl/install-win10) (don't forget to [enable developer mode](https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development)) or run Zoomaker as an administrator to enable symlink support on Windows. ## 🤗 Hugging Face Access Token You might be asked for a [Hugging Face Access Token](https://huggingface.co/docs/hub/security-tokens) during `zoomaker install`. Some resources on Hugging Face require accepting the terms of use of the model. You can set your access token by running this command in a terminal. The command `huggingface-cli` is automatically shipped alongside zoomaker. ```bash huggingface-cli login ``` ## 🙏 Acknowledgements - Most of the internal heavy lifting is done be the [huggingface_hub library](https://huggingface.co/docs/huggingface_hub/guides/download) by Hugging Face. Thanks! - "Zoomaker Safari Hacker Cat" cover image by Alia Tasler, based on this [OpenMoji](https://openmoji.org/library/emoji-1F431-200D-1F4BB/). Thanks!
zoomaker
/zoomaker-0.7.0.tar.gz/zoomaker-0.7.0/README.md
README.md
import os import subprocess import yaml import argparse from huggingface_hub import hf_hub_download import git import requests from tqdm import tqdm import unicodedata import re class Zoomaker: def __init__(self, yaml_file: str): self.yaml_file = yaml_file with open(yaml_file, "r") as f: self.data = yaml.safe_load(f) self._check_yaml() def _check_yaml(self): if "name" not in self.data: raise Exception("❌ 'name' is missing in zoo.yaml") if "resources" not in self.data: raise Exception("❌ 'resources' is missing in zoo.yaml") for group, resources in self.data["resources"].items(): for resource in resources: if "name" not in resource: raise Exception("❌ Resource must have 'name' attribute") if "src" not in resource: raise Exception("❌ Resource must have 'src' attribute") if "type" not in resource: raise Exception("❌ Resource must have 'type' attribute") if "install_to" not in resource: raise Exception("❌ Resource must have 'install_to' attribute") type = resource["type"] if type not in ["huggingface", "git", "download"]: raise Exception(f"❌ Unknown resource type: {type}") def install(self, no_symlinks: bool = False): print(f"👋 ===> {self.yaml_file} <===") print(f"name: {self.data.get('name', 'N/A')}") print(f"version: {self.data.get('version', 'N/A')}\n") if no_symlinks: print(f"⛔️ installing resources without symlinks ...") print(f"👇 installing resources ...") counter = 0; for group, resources in self.data["resources"].items(): print(f"\n{group}:") for resource in resources: name = resource["name"] src = resource["src"] type = resource["type"] revision = resource.get("revision", None) rename_to = resource.get("rename_to", None) install_to = os.path.abspath(resource["install_to"]) counter += 1 print(f"\t{counter}. {name} to {install_to}") os.makedirs(install_to, exist_ok=True) # Hugging Face Hub if type == "huggingface": repo_id = "/".join(src.split("/")[0:2]) repo_filepath = "/".join(src.split("/")[2:]) downloaded = hf_hub_download(repo_id=repo_id, filename=repo_filepath, local_dir=install_to, revision=revision, local_dir_use_symlinks=False if no_symlinks else "auto") if rename_to: self._rename_file(downloaded, os.path.join(install_to, rename_to)) # Git elif type == "git": repo_path = os.path.join(install_to, self._get_repo_name(src)) if rename_to: print(f"\trename_to is not supported for git repos. Ignoring rename_to: {rename_to}") # existing repo if os.path.exists(repo_path): repo = git.Repo(repo_path) if revision: repo.git.checkout(revision) print(f"\tgit checkout revision: {repo.head.object.hexsha}") else: repo.remotes.origin.pull() print(f"\tgit pull: {repo.head.object.hexsha}") # new repo else: repo = git.Repo.clone_from(src, repo_path, allow_unsafe_protocols=True, allow_unsafe_options=True) if revision: repo.git.checkout(revision) print(f"\tgit checkout revision: {repo.head.object.hexsha}") else: repo.remotes.origin.pull() print(f"\tgit pull latest: {repo.head.object.hexsha}") # Download else: filename = self._slugify(os.path.basename(src)) downloaded = self._download_file(src, os.path.join(install_to, filename)) if rename_to: self._rename_file(downloaded, os.path.join(install_to, rename_to)) if revision: print(f"\trevision is not supported for download. Ignoring revision: {revision}") print(f"\n✅ {counter} resources installed.") def run(self, script_name: str): if script_name not in self.data["scripts"]: print(f"No script found with name: '{script_name}'") if self.data["scripts"]: print(f"\nAvailable scripts:") for script_name in self.data["scripts"]: print(f"zoomaker run {script_name}") return script_string = self.data["scripts"][script_name] subprocess.check_call(script_string, shell=True) def _get_repo_name(self, src: str): if src.endswith(".git"): return os.path.basename(src).replace(".git", "") else: return os.path.basename(src) def _rename_file(self, src, dest): # remove dest if exists due to os.rename limitation in Windows if os.path.exists(dest): os.remove(dest) os.rename(src, dest) else: os.rename(src, dest) def _download_file(self, url, filename): response = requests.get(url, stream=True) total_size_in_bytes = int(response.headers.get('content-length', 0)) block_size = 1024 progress_bar = tqdm(desc="\tdownloading", total=total_size_in_bytes, unit='iB', unit_scale=True, ncols=100) with open(filename, 'wb') as file: for data in response.iter_content(block_size): progress_bar.update(len(data)) file.write(data) progress_bar.close() if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes: print("Error: Failed to download the complete file.") return None return filename def _slugify(self, value, allow_unicode=False): """ Makes a filename safe for usage on all filesystems. Taken from https://github.com/django/django/blob/master/django/utils/text.py Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated dashes to single dashes. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace, dashes, and underscores. """ value = str(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) else: value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub(r'[^\w\s-]', '', value.lower()) return re.sub(r'[-\s]+', '-', value).strip('-_') def main(): parser = argparse.ArgumentParser(description="Install models, git repos and run scripts defined in the zoo.yaml file.") parser.add_argument("command", nargs="?", choices=["install", "run"], help="The command to execute.") parser.add_argument("script", nargs="?", help="The script name to execute.") parser.add_argument("--no-symlinks", action='store_true', help="Do not create symlinks for the installed resources.") parser.add_argument("-v", "--version", action='version', help="The current version of the zoomaker.", version="0.7.0") args = parser.parse_args() if args.command == "install": Zoomaker("zoo.yaml").install(args.no_symlinks) elif args.command == "run": Zoomaker("zoo.yaml").run(args.script) else: parser.print_help() if __name__ == "__main__": main()
zoomaker
/zoomaker-0.7.0.tar.gz/zoomaker-0.7.0/zoomaker.py
zoomaker.py
# zoomascii Faster versions of Python ASCII functions, in the theme of binascii with added zoom. These functions only work on ASCII strings and will trade memory for speed (within reason). Currently targeting Python 2.7.x, but patches welcome for Python 3.x. ## Currently Implemented b2a_qp - Hot code for email sending apps that use QP encoding, and the reason I started this library. Around 3x faster than binascii.b2a_qp. swapcase - over 10x faster than Python's builtin swapcase() for ASCII strings. (I don't expect this is actually useful, just did it as a proof-of-concept.) ## Install Get it with pip: $ pip install zoomascii Or clone it from github and install manually: $ git clone https://github.com/samtregar/zoomascii.git $ cd zoomascii $ sudo python setup.py install ## Example Code import zoomascii # encode data as QP, zoom style encoded_data = zoomascii.b2a_qp(text_data) # optionally turn off encoding leading periods, which is nice # for SMTP but probably your lib already handles this encoded_data = zoomascii.b2a_qp(text_data, encode_leading_dot=False) # swapcase so fast text = zoomascii.swapcase(text) ## Benchmarks ![Benchark Chart](http://i.imgur.com/QBV9z7h.png) You can find the benchmark script in bin/bench.py. It runs each encoder across a series of sample files in data/ and the results are runs per second. The total input size for each run is 472k. The quopri module is being forced to use its Python implementation rather than binascii which it will use if installed. ## Implementation Notes The implementation of b2a_qp follows the specification for Quoted-Printable encoding in RFC 2045 (https://www.ietf.org/rfc/rfc2045.txt). The only exception is that periods at the beginning of lines are always encoded, which is useful for SMTP and allowed by the spec. The implementation does not exactly match binascii's implementation - in particular it does not attempt to pass through CR or LF characters that are not part of CRLF pairs. In my reading of the specification that is illegal although decoders don't appear to care either way. No attempt has been made to provide a non-text mode of operation with respect to CRLF handling. I can't imagine why anyone would be using QP encoding with non-text data, but if you do want that then you shouldn't use this module.
zoomascii
/zoomascii-0.8.tar.gz/zoomascii-0.8/README.md
README.md
# zoombot [![PyPI](https://img.shields.io/pypi/v/zoombot)](https://pypi.org/project/zoombot/) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/zoombot)](https://pypi.org/project/zoombot/) [![PyPI License](https://img.shields.io/pypi/l/zoombot)](https://pypi.org/project/zoombot/) [![Code Style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black/) Python wrapper for Zoom Chatbot API ### Usage ```python from zoombot.core import Bot, Message class MyBot(Bot): async def on_message(self, message: Message): await message.reply("Hello", f"You sent {message.content}") if __name__ == "__main__": bot = MyBot( client_id="CLIENT_ID", client_secret="CLIENT_SECRET", bot_jid="BOT_JID", verification_token="VERIFICATION_TOKEN", ) bot.run() ```
zoombot
/zoombot-0.1.2.tar.gz/zoombot-0.1.2/README.md
README.md
from zoomconnect_sdk.base_client import BaseClient class Client(BaseClient): """ Python SDK for the ZoomConnect.com API. Example usage: from zoomconnect.client import Client c = Client(api_token='api_token', account_email='account_email') try: message = c.send_sms("0000000000", "Welcome to ZoomConnect") except Exception as e: print(e) else: print(res) """ # account : Core information related to your account def get_account_balance(self): """Makes a call to GET api/rest/v1/account/balance. Returns your account's credit balance https://www.zoomconnect.com/interactive-api/#!/account/getBalance """ return self.do('GET', '/api/rest/v1/account/balance', req=None) def get_account_statistics(self): """Makes a call to GET /api/rest/v1/account/statistics. Returns data from the statistics report. Note that by default the statistics shown are based on the number of messages, use the calculateCreditValue should you wish to calculate the statistics based on credit value. https://www.zoomconnect.com/interactive-api/#!/account/getStatistics """ return self.do('GET', '/api/rest/v1/account/statistics', req=None) def account_transfer(self, transferToEmail, numberOfCredits, transferFromEmail): """Makes a call to POST /api/rest/v1/account/transfer. Transfers credits between two users in the same team. The account email address fields as well as the number of credits to transfer are required. https://www.zoomconnect.com/interactive-api/#!/account/getStatistics """ isInt, numberOfCredits = self.isInt(numberOfCredits) if isInt is False: raise Exception(f"numberOfCredits is not a valid number") req = { 'transferToEmailAddress': transferToEmail, 'numberOfCreditsToTransfer': numberOfCredits, 'transferFromEmailAddress': transferFromEmail } return self.do('POST', '/api/rest/v1/account/transfer', req=req) def get_account_user_by_email(self, email): """Makes a call to GET api/rest/v1/account/user. Find a user for a particular email address https://www.zoomconnect.com/interactive-api/#!/account/search """ return self.do('GET', '/api/rest/v1/account/user', req=None, param={"searchEmail": email}) def create_account_user(self, firstName, lastName, password, emailAddress, contactNumber, company=None, userId=None, creditBalance=None): """Makes a call to PUT /api/rest/v1/account/user. Creates a new sub-account in your team. The following fields are required firstname, lastname, email address, contact number and password. https://www.zoomconnect.com/interactive-api/#!/account/create """ if creditBalance: isInt, creditBalance = self.isInt(creditBalance) if isInt is False: raise Exception(f"creditBalance is not a valid number") req = { "firstName": firstName, "lastName": lastName, "password": password, "emailAddress": emailAddress, "contactNumber": contactNumber, "company": company, "userId": userId, "creditBalance": creditBalance } return self.do('PUT', '/api/rest/v1/account/user', req=req) def update_account_user(self, userId, firstName, lastName, password, contactNumber): """Makes a call to POST /api/rest/v1/account/user/{userId}. Updates a sub-account in your team. The following fields can be updated firstname, lastname, contact number and password. https://www.zoomconnect.com/interactive-api/#!/account/update """ req = { "firstName": firstName, "lastName": lastName, "password": password, "contactNumber": contactNumber, "userId": int(userId) } return self.do('POST', f'/api/rest/v1/account/user/{userId}', req=req) def get_account_user_by_userId(self, userId): """Makes a call to GET api/rest/v1/account/user/{userId}. Gets a user from a given user id https://www.zoomconnect.com/interactive-api/#!/account/getUser """ isInt, userId = self.isInt(userId) if isInt is False: raise Exception(f"userId is not a valid number") else: return self.do('GET', f'/api/rest/v1/account/user/{userId}') # sms : Send and schedule messages def get_sms(self): """Makes a call to GET /api/rest/v1/sms/send. Returns an example of the data to POST to send a single message. https://www.zoomconnect.com/interactive-api/#!/sms/send """ return self.do('GET', '/api/rest/v1/sms/send') def send_sms(self, recipientNumber, message, campaign=None, dateToSend=None, dataField=None): """Makes a call to POST /api/rest/v1/sms/send. Sends a single message. The recipientNumber and message fields are required. All other fields are optional https://www.zoomconnect.com/interactive-api/#!/sms/send_0 """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber is not a valid mobile number. recipientNumber must be numeric and length of 10 and more") req = { 'campaign': campaign, 'recipientNumber': recipientNumber, 'dateToSend': dateToSend, 'dataField': dataField, 'message': message } # 5eea0cc1c0fa4d7d7d19501e return self.do('POST', '/api/rest/v1/sms/send', req=req) def get_sms_bulk(self): """Makes a call to GET /api/rest/v1/sms/send-bulk. Returns an example of the data to POST to send multiple messages in one transaction. https://www.zoomconnect.com/interactive-api/#!/sms/sendBulk """ return self.do('GET', '/api/rest/v1/sms/send-bulk') def send_sms_bulk(self, recipientNumbers, messages, campaign=None, dateToSend=None, dataField=None, defaultDateToSend=None, messagesPerMinute=0): """Makes a call to POST /api/rest/v1/sms/send-bulk. Send multiple messages in one transaction. https://www.zoomconnect.com/interactive-api/#!/sms/send_0 """ smsRequestList = [] if isinstance(recipientNumbers, list) and isinstance(messages, list): if len(recipientNumbers) == len(messages): for number, message in zip(recipientNumbers, messages): validNumber, recipientNumber = self.testRecipientNumber(number) if validNumber is False: raise Exception( f"recipientNumber({number}) is not a valid mobile number. recipientNumber must be numeric and length of 10 and more") smsRequestList.append({"recipientNumber": number, "message": message, 'campaign': campaign, 'dateToSend': dateToSend, 'dataField': dataField}) else: raise Exception(f"recipientNumbers list and messages list are not the same length") elif isinstance(recipientNumbers, list) and messages: for number in recipientNumbers: validNumber, recipientNumber = self.testRecipientNumber(number) if validNumber is False: raise Exception( f"recipientNumber({number}) is not a valid mobile number. recipientNumber must be numeric and length of 10 and more") smsRequestList.append({"recipientNumber": number, "message": messages, 'campaign': campaign, 'dateToSend': dateToSend, 'dataField': dataField}) elif isinstance(recipientNumbers, str) and isinstance(messages, str): validNumber, recipientNumbers = self.testRecipientNumber(recipientNumbers) if validNumber is False: raise Exception( f"recipientNumber({recipientNumbers}) is not a valid mobile number. recipientNumber must be numeric and length of 10 and more") smsRequestList.append({"recipientNumber": recipientNumbers, "message": messages, 'campaign': campaign, 'dateToSend': dateToSend, 'dataField': dataField}) else: raise Exception(f"recipientNumbers and messages parameters doesn`t contain valid values") isInt, messagesPerMinute = self.isInt(messagesPerMinute) if isInt is False: raise Exception(f"messagesPerMinute is not a valid number") req = { "defaultDateToSend": defaultDateToSend, "sendSmsRequests": smsRequestList, "messagesPerMinute": messagesPerMinute } return self.do('POST', '/api/rest/v1/sms/send-bulk', req=req) # contacts : Manage contacts def get_contacts_all(self): """Makes a call to GET /api/rest/v1/contacts/all. Returns all contacts https://www.zoomconnect.com/interactive-api/#!/contacts/getAll """ return self.do('GET', '/api/rest/v1/contacts/all') def get_contact(self, contactId): """Makes a call to GET /api/rest/v1/contacts/{contactId}. Returns details for a single contact https://www.zoomconnect.com/interactive-api/#!/contacts/getAll """ return self.do('GET', f'/api/rest/v1/contacts/{contactId}') def create_contact(self, firstName, lastName, contactNumber, title, links=None): """Makes a call to POST /api/rest/v1/contacts/create. Creates a contact https://www.zoomconnect.com/interactive-api/#!/contacts/create """ validNumber, contactNumber = self.testRecipientNumber(contactNumber) if validNumber is False: raise Exception( f"contactNumber({contactNumber}) is not a valid mobile number. contactNumber must be numeric and length of 10 and more") req = { "firstName": firstName, "lastName": lastName, "contactNumber": contactNumber, "links": links, "title": title } return self.do('POST', '/api/rest/v1/contacts/create', req=req) def delete_contact(self, contactId): """Makes a call to DELETE /api/rest/v1/contacts/{contactId}. Delete a contact https://www.zoomconnect.com/interactive-api/#!/contacts/delete """ return self.do('DELETE', f'/api/rest/v1/contacts/{contactId}',text=True) def update_contact(self, contactId, firstName, lastName, contactNumber, title, links=None): """Makes a call to POST /api/rest/v1/contacts/{contactId}. Updates a contact https://www.zoomconnect.com/interactive-api/#!/contacts/update """ validNumber, contactNumber = self.testRecipientNumber(contactNumber) if validNumber is False: raise Exception( f"contactNumber({contactNumber}) is not a valid mobile number. contactNumber must be numeric and length of 10 and more") req = { "firstName": firstName, "lastName": lastName, "contactNumber": contactNumber, "links": links, "title": title } return self.do('POST', f'/api/rest/v1/contacts/{contactId}', req=req) def get_remove_contact_from_group(self, contactId, groupId): """Makes a call to GET /api/rest/v1/contacts/{contactId}/addFromGroup/{groupId}. Remove a contact from a group https://www.zoomconnect.com/interactive-api/#!/contacts/removeFromGroup """ return self.do('GET', f'/api/rest/v1/contacts/{contactId}/addFromGroup/{groupId}',text=True) def remove_contact_from_group(self, contactId, groupId): """Makes a call to POST /api/rest/v1/contacts/{contactId}/addFromGroup/{groupId}. Remove a contact from a group https://www.zoomconnect.com/interactive-api/#!/contacts/removeFromGroup_0 """ return self.do('POST', f'/api/rest/v1/contacts/{contactId}/addFromGroup/{groupId}',text=True) def add_contact_to_group(self, contactId, groupId): """Makes a call to GET /api/rest/v1/contacts/{contactId}/addToGroup/{groupId}. Add a contact to a group https://www.zoomconnect.com/interactive-api/#!/contacts/addToGroup """ return self.do('GET', f'/api/rest/v1/contacts/{contactId}/addToGroup/{groupId}',text=True) # def add_contact_to_group(self, contactId, groupId): # """Makes a call to POST /api/rest/v1/contacts/{contactId}/addToGroup/{groupId}. # Add a contact to a group # # https://www.zoomconnect.com/interactive-api/#!/contacts/addToGroup_0 # """ # return self.do('POST', f'/api/rest/v1/contacts/{contactId}/addToGroup/{groupId}') # groups: Manage groups def get_groups_all(self): """ Makes a call to GET /api/rest/v1/groups/all Returns all groups https://www.zoomconnect.com/interactive-api/#!/groups/getAll """ return self.do('GET', f'/api/rest/v1/groups/all') def get_group(self, groupId): """ Makes a call to GET /api/rest/v1/groups/{groupId} Returns details for a single group https://www.zoomconnect.com/interactive-api/#!/groups/get """ return self.do('GET', f'/api/rest/v1/groups/{groupId}') def create_group(self, name, links=None): """ Makes a call to POST /api/rest/v1/groups/create Create a group https://www.zoomconnect.com/interactive-api/#!/groups/create """ req = { "name": name, "links": links } return self.do('POST', f'/api/rest/v1/groups/create', req=req) def update_group(self, name, groupId, links=None): """ Makes a call to POST /api/rest/v1/groups/{groupId} Update a group https://www.zoomconnect.com/interactive-api/#!/groups/update """ req = { "name": name, "links": links, "groupId": groupId } return self.do('POST', f'/api/rest/v1/groups/{groupId}', req=req) def delete_group(self, groupId): """ Makes a call to DELETE /api/rest/v1/groups/{groupId} Delete a group https://www.zoomconnect.com/interactive-api/#!/groups/delete """ return self.do('DELETE', f'/api/rest/v1/groups/{groupId}',text=True) def add_group_to_contact(self, groupId, contactId): """ Makes a call to GET /api/rest/v1/groups/{groupId}/addContact/{contactId} Add a contact to a group https://www.zoomconnect.com/interactive-api/#!/groups/addContact """ return self.do('GET', f'/api/rest/v1/groups/{groupId}/addContact/{contactId}',text=True) # def add_group_to_contact(self, groupId, contactId): # """ Makes a call to POST /api/rest/v1/groups/{groupId}/addContact/{contactId} # Add a contact to a group # # https://www.zoomconnect.com/interactive-api/#!/groups/addContact_0 # """ # # return self.do('POST', f'/api/rest/v1/groups/{groupId}/addContact/{contactId}') def remove_group_from_contact(self, groupId, contactId): """ Makes a call to GET /api/rest/v1/groups/{groupId}/removeContact/{contactId} Remove a contact from a group https://www.zoomconnect.com/interactive-api/#!/groups/removeContact """ return self.do('GET', f'/api/rest/v1/groups/{groupId}/removeContact/{contactId}',text=True) # def remove_group_from_contact(self, groupId, contactId): # """ Makes a call to POST /api/rest/v1/groups/{groupId}/removeContact/{contactId} # Remove a contact from a group # # https://www.zoomconnect.com/interactive-api/#!/groups/removeContact_0 # """ # # return self.do('POST ', f'/api/rest/v1/groups/{groupId}/removeContact/{contactId}') # messages: Manage your messages def get_all_messages(self): """ Makes a call to GET /api/rest/v1/messages/all Returns all messages https://www.zoomconnect.com/interactive-api/#!/messages/getAll """ return self.do('GET', f'/api/rest/v1/messages/all') def get_message_analyses(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/full Returns full analysis of message https://www.zoomconnect.com/interactive-api/#!/messages/analyse_full """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/full', req=req) def get_message_credit_cost(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/message-credit-cost Returns the number of credit which would be required to send the request message to the requested recipient number https://www.zoomconnect.com/interactive-api/#!/messages/analyse_message_credit_cost """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/message-credit-cost', req=req,text=True) def get_message_encoding(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/message-encoding Returns the message encoding that would be required to send the requested message https://www.zoomconnect.com/interactive-api/#!/messages/analyse_message_encoding """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/message-encoding', req=req,text=True) def get_message_length(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/message-length Returns the number of characters the requested message consists of https://www.zoomconnect.com/interactive-api/#!/messages/analyse_message_length """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/message-length', req=req,text=True) def check_message_length_within_max(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/message-length-within-max-allowed Returns details for a single message https://www.zoomconnect.com/interactive-api/#!/messages/analyse """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/message-length-within-max-allowed', req=req,text=True) def get_number_of_messages(self, message, recipientNumber): """ Makes a call to POST /api/rest/v1/messages/analyse/number-of-messages Returns the number of SMS parts which would be sent when sending the requested message https://www.zoomconnect.com/interactive-api/#!/messages/analyse_number_of_messages """ validNumber, recipientNumber = self.testRecipientNumber(recipientNumber) if validNumber is False: raise Exception( f"recipientNumber({recipientNumber}) is not a valid mobile number. recipientNumber must be numeric and length of {str(self.valid_mobile_number_length)} and more") req = { "message": message, "recipientNumber": recipientNumber } return self.do('POST', f'/api/rest/v1/messages/analyse/number-of-messages', req=req,text=True) def get_message(self, messageId): """ Makes a call to GET /api/rest/v1/messages/{messageId} Returns details for a single message https://www.zoomconnect.com/interactive-api/#!/messages/get """ return self.do('GET', f'/api/rest/v1/messages/{messageId}') def delete_message(self, messageId): """ Makes a call to DELETE /api/rest/v1/messages/{messageId} Deletes a message https://www.zoomconnect.com/interactive-api/#!/messages/delete """ return self.do('DELETE', f'/api/rest/v1/messages/{messageId}',text=True) def mark_message_as_read(self, messageId): """ Makes a call to PUT /api/rest/v1/messages/{messageId}/markRead Marks a message as read https://www.zoomconnect.com/interactive-api/#!/messages/markRead """ return self.do('PUT', f'/api/rest/v1/messages/{messageId}/markRead') # def post_mark_message_as_read(self, messageId): # """ Makes a call to POST /api/rest/v1/messages/{messageId}/markRead # # Marks a message as read # # https://www.zoomconnect.com/interactive-api/#!/messages/markRead_0 # """ # # return self.do('POST', f'/api/rest/v1/messages/{messageId}/markRead') def mark_message_as_unread(self, messageId): """ Makes a call to PUT /api/rest/v1/messages/{messageId}/markUnread Marks a message as read https://www.zoomconnect.com/interactive-api/#!/messages/markUnread """ return self.do('PUT', f'/api/rest/v1/messages/{messageId}/markUnread') # def post_mark_message_as_unread(self, messageId): # """ Makes a call to POST /api/rest/v1/messages/{messageId}/markUnread # # Marks a message as read # # https://www.zoomconnect.com/interactive-api/#!/messages/markUnread_0 # """ # # return self.do('POST', f'/api/rest/v1/messages/{messageId}/markUnread') # templates : Manage your templates def get_all_templates(self): """ Makes a call to GET /api/rest/v1/templates/all Returns all templates https://www.zoomconnect.com/interactive-api/#!/templates/getAll """ return self.do('GET', f'/api/rest/v1/templates/all') def get_template(self, templateId): """ Makes a call to GET /api/rest/v1/templates/{templateId} Returns details for a single template https://www.zoomconnect.com/interactive-api/#!/templates/get """ return self.do('GET', f'/api/rest/v1/templates/{templateId}') def delete_template(self, templateId): """ Makes a call to DELETE /api/rest/v1/templates/{templateId} Returns details for a single template https://www.zoomconnect.com/interactive-api/#!/templates/delete """ return self.do('DELETE', f'/api/rest/v1/templates/{templateId}',text=True) # Pre flight static methods @staticmethod def isInt(i): try: return True, int(i) except ValueError: return False, i def testRecipientNumber(self, number): try: number = str(number).replace("+", "") int(number) if len(str(number)) >= self.valid_mobile_number_length: return True, number else: return False, number except Exception: return False, number
zoomconnect-sdk
/zoomconnect_sdk-0.0.3-py3-none-any.whl/zoomconnect_sdk/client.py
client.py
import json, platform, requests, six, urllib.parse try: from json.decoder import JSONDecodeError except ImportError: JSONDecodeError = ValueError from . import VERSION from .error import APIError DEFAULT_BASE_URL = 'https://www.zoomconnect.com/app/' DEFAULT_TIMEOUT = 10 PYTHON_VERSION = platform.python_version() SYSTEM = platform.system() ARCH = platform.machine() class BaseClient: def __init__(self, base_url='', timeout=0, api_token='', account_email='', valid_mobile_number_length=10): """ :type base_url: str :type timeout: float :type api_token: str :type account_email: str """ self.set_auth(api_token, account_email) self.set_base_url(base_url) self.set_timeout(timeout) self.valid_mobile_number_length = valid_mobile_number_length self.session = requests.Session() def set_auth(self, api_token, account_email): """Provides the client with an API token and account email. :type api_token: str :type account_email: str """ self.api_token = api_token self.account_email = account_email def set_base_url(self, base_url): """Overrides the default base URL. For internal use. :type base_url: str """ if base_url == '': base_url = DEFAULT_BASE_URL self.base_url = base_url.rstrip('/') def set_timeout(self, timeout): """Sets the timeout, in seconds, for requests made by the client. :type timeout: float """ if timeout == 0: timeout = DEFAULT_TIMEOUT self.timeout = timeout def do(self, method, path, req=None, param=None, text=False): """Performs an API request and returns the response. :type method: str :type path: str :type req: dict :type param: dict :type text: bool :return JSON body from API endpoint """ try: body = json.dumps(req) except Exception: body = None res = self.session.request(method, self.make_url(path,param), headers={ "Content-Type": "application/json", "Accept": "application/json", "User-Agent": self.make_user_agent() }, data=body, timeout=self.timeout) try: if text and res.status_code == 200: # e = res.json() str object not callable return True elif text: return False else: e = res.json() if 'error' in e and 'error_code' in e: raise APIError(e['error_code'], e['error']) return e except JSONDecodeError: raise Exception(f'zoomconnect: Response error ({res.status_code})') except Exception as e: raise Exception(f'zoomconnect: API error ({res.status_code} - {e})') def make_url(self, path, params): """ :type path: str :type params: dict :return API endpoint URL """ if params: params.update({"token":self.api_token, "email": self.account_email}) else: params = {"token": self.api_token, "email": self.account_email} return f"{self.base_url}{path}?{'&'.join([f'{k}={urllib.parse.quote(v)}' for k, v in six.iteritems(params)])}" def make_user_agent(self): """ :return: user agent string """ return f"ZoomConnectPythonSDK/{VERSION} python/{PYTHON_VERSION} {SYSTEM} {ARCH}"
zoomconnect-sdk
/zoomconnect_sdk-0.0.3-py3-none-any.whl/zoomconnect_sdk/base_client.py
base_client.py
"""Define useful methods to be used globally.""" from http.cookiejar import MozillaCookieJar import collections import io import sys class ZoomdlCookieJar(MozillaCookieJar): """ Code freely adapted from Youtube-DL's YoutubeCookieJar https://github.com/ytdl-org/youtube-dl/ For file format, see https://curl.haxx.se/docs/http-cookies.html """ _HTTPONLY_PREFIX = '#HttpOnly_' _ENTRY_LEN = 7 _CookieFileEntry = collections.namedtuple( 'CookieFileEntry', ('domain_name', 'include_subdomains', 'path', 'https_only', 'expires_at', 'name', 'value')) def load(self, filename=None, ignore_discard=True, ignore_expires=True): """Load cookies from a file.""" if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError() def prepare_line(line): # print("Prepping line '{}'".format(line)) if line.startswith(self._HTTPONLY_PREFIX): line = line[len(self._HTTPONLY_PREFIX):] # comments and empty lines are fine if line.startswith('#') or not line.strip(): return line cookie_list = line.split('\t') if len(cookie_list) != self._ENTRY_LEN: raise ValueError('invalid length %d' % len(cookie_list)) cookie = self._CookieFileEntry(*cookie_list) if cookie.expires_at and not cookie.expires_at.isdigit(): raise ValueError('invalid expires at %s' % cookie.expires_at) return line cf = io.StringIO() with io.open(filename, encoding='utf-8') as f: for line in f: try: cf.write(prepare_line(line)) except ValueError as e: print( 'WARNING: skipping cookie file entry due to %s: %r\n' % (e, line), sys.stderr) continue cf.seek(0) self._really_load(cf, filename, ignore_discard, ignore_expires) # Session cookies are denoted by either `expires` field set to # an empty string or 0. MozillaCookieJar only recognizes the former # (see [1]). So we need force the latter to be recognized as session # cookies on our own. # Session cookies may be important for cookies-based authentication, # e.g. usually, when user does not check 'Remember me' check box while # logging in on a site, some important cookies are stored as session # cookies so that not recognizing them will result in failed login. # 1. https://bugs.python.org/issue17164 for cookie in self: # Treat `expires=0` cookies as session cookies if cookie.expires == 0: cookie.expires = None cookie.discard = True
zoomdl-2
/zoomdl_2-1970.1.1-py3-none-any.whl/zoom_dl/cookiejar.py
cookiejar.py
"""Define the main ZoomDL class and its methods.""" import os import re import sys import demjson import requests from tqdm import tqdm from zoom_dl.cookiejar import ZoomdlCookieJar class ZoomDL(): """Class for ZoomDL.""" def __init__(self, args): """Init the class.""" self.args = args self.loglevel = args.log_level self.page = None self.url, self.domain, self.subdomain = "", "", "" self.metadata = None self.session = requests.session() self.loglevel = self.args.log_level if self.args.cookies: cookiejar = ZoomdlCookieJar(self.args.cookies) cookiejar.load() self.session.cookies.update(cookiejar) def _print(self, message, level=0): """Print to console, if level is sufficient. This is meant to override the default print. When you need to print something, you use this and specify a level. If the level is sufficient, it will be printed, otherwise it will be discarded. Levels are: * 0: Debug * 1: Info * 2: Warning * 3: Errors * 4: Critical * 5: Quiet (nothing to print) By default, only display Info and higher. Don't input level > 5 Args: level (int, optional): Level of verbosity of the message. Defaults to 2. """ if level < 5 and level >= self.loglevel: print(message) def _change_page(self, url): """Change page, with side methods.""" self._print("Changing page to {}".format(url), 0) self.page = self.session.get(url) self.check_captcha() def get_page_meta(self): """Get metadata by trying multiple ways.""" # default case text = self.page.text meta = dict(re.findall(r'type="hidden" id="([^"]*)" value="([^"]*)"', text)) # if javascript was correctly loaded, look for injected metadata meta2_match = re.search("window.__data__ = ({(?:.*\n)*});", self.page.text) if meta2_match is not None: try: meta2 = demjson.decode(meta2_match.group(1)) except demjson.JSONDecodeError: self._print("[WARNING] Error with the meta parsing. This " "should not be critical. Please contact a dev.", 2) meta.update(meta2) else: self._print("Advanced meta failed", 2) # self._print(self.page.text) self._print("Metas are {}".format(meta), 0) if len(meta) == 0: self._print("Unable to gather metadata in page") return None if "viewMp4Url" not in meta: self._print("No video URL in meta, going bruteforce", 2) vid_url_match = re.search((r"source src=[\"']" "(https?://ssrweb[^\"']+)[\"']"), text) if vid_url_match is None: self._print("[ERROR] Video not found in page. " "Is it login-protected? ", 4) self._print( "Try to refresh the webpage, and export cookies again", 4) return None meta["url"] = vid_url_match.group(1) return meta def download_vid(self, fname, clip=None): """Download one recording, and save it at fname.""" all_urls = {self.metadata.get("viewMp4Url"), self.metadata.get("url"), self.metadata.get("shareMp4Url")} try: all_urls.remove(None) except KeyError: pass if len(all_urls) > 1: self._print("Found {} screens, downloading all of them".format(len(all_urls)), 1) for vid_num, vid_url in enumerate(all_urls): extension = vid_url.split("?")[0].split("/")[-1].split(".")[1] name = (self.metadata.get("topic") or self.metadata.get("r_meeting_topic")).replace(" ", "_") if (self.args.filename_add_date and self.metadata.get("r_meeting_start_time")): name = name + "-" + self.metadata.get("r_meeting_start_time") self._print("Found name is {}, extension is {}" .format(name, extension), 0) name = name if clip is None else "{}-clip{}".format(name, clip) if len(all_urls) > 1: name += f"screen{vid_num}" filepath = get_filepath(fname, name, extension) filepath_tmp = filepath + ".part" self._print("Full filepath is {}, temporary is {}".format( filepath, filepath_tmp), 0) self._print("Downloading '{}'...".format( filepath.split("/")[-1]), 1) vid_header = self.session.head(vid_url) total_size = int(vid_header.headers.get('content-length')) # unit_int, unit_str = ((1024, "KiB") if total_size < 30*1024**2 # else (1024**2, "MiB")) start_bytes = int(os.path.exists(filepath_tmp) and os.path.getsize(filepath_tmp)) if start_bytes > 0: self._print("Incomplete file found ({:.2f}%), resuming..." .format(100*start_bytes/total_size), 1) headers = {"Range": "bytes={}-".format(start_bytes)} vid = self.session.get(vid_url, headers=headers, stream=True) if vid.status_code in [200, 206] and total_size > 0: with open(filepath_tmp, "ab") as f, tqdm(total=total_size, unit='B', initial=start_bytes, dynamic_ncols=True, unit_scale=True, unit_divisor=1024) as pbar: for data in vid.iter_content(1024): if data: pbar.update(len(data)) f.write(data) f.flush() self._print("Done!", 1) os.rename(filepath_tmp, filepath) else: self._print( "Woops, error downloading: '{}'".format(vid_url), 3) self._print("Status code: {}, file size: {}".format( vid.status_code, total_size), 0) sys.exit(1) def download(self, all_urls): """Exposed class to download a list of urls.""" for url in all_urls: self.url = url try: regex = r"(?:https?:\/\/)?([^.]*\.?)(zoom[^.]*\.(?:us|com))" self.subdomain, self.domain = re.findall(regex, self.url)[0] except IndexError: self._print("Unable to extract domain and subdomain " "from url {}, exitting".format(self.url), 4) sys.exit(1) self.session.headers.update({ # set referer 'referer': "https://{}{}/".format(self.subdomain, self.domain), }) if self.args.user_agent is None: if self.args.filename_add_date: self._print("Forcing custom UA to have the date") # if date is required, need invalid UA # 'invalid' User-Agent ua = "ZoomDL http://github.com/battleman/zoomdl" else: self._print("Using standard Windows UA") # somehow standard User-Agent ua = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/74.0.3729.169 Safari/537.36") else: ua = self.args.user_agent self.session.headers.update({ "User-Agent": ua }) self._change_page(url) if self.args.password is not None: self.authenticate() self.metadata = self.get_page_meta() if self.metadata is None: self._print("Unable to find metadata, aborting.", 4) return None total_clips = self.metadata["totalClips"] current_clip = self.metadata["currentClip"] count_clips = self.args.count_clips filename = self.args.filename if count_clips == 1: # only download this self.download_vid(filename) else: # download multiple if count_clips == 0: to_download = total_clips # download this and all nexts else: # download as many as asked (or possible) to_download = min(count_clips, total_clips) for clip in range(current_clip, to_download+1): self.download_vid(filename, clip) url = self.page.url next_time = str(self.metadata["nextClipStartTime"]) curr_time = str(self.metadata["clipStartTime"]) if curr_time in url: url = url.replace(curr_time, next_time) else: url += "&startTime={}".format(next_time) self._change_page(url) def check_captcha(self): """Check whether or not a page is protected by CAPTCHA. TO BE IMPLEMENTED!! """ self._print("Checking CAPTCHA", 0) captcha = False # FIXME if captcha: self._print("The page {} is captcha-protected. Unable to download" .format(self.page.url)) sys.exit(1) def authenticate(self): # that shit has a password # first look for the meet_id self._print("Using password '{}'".format(self.args.password)) meet_id_regex = re.compile("<input[^>]*") input_tags = meet_id_regex.findall(self.page.text) meet_id = None for inp in input_tags: input_split = inp.split() if input_split[2] == 'id="meetId"': meet_id = input_split[3][7:-1] break if meet_id is None: self._print("[CRITICAL]Unable to find meetId in the page", 4) if self.loglevel > 0: self._print("Please re-run with option -v 0 " "and report it " "to http://github.com/battleman/zoomdl", 4) self._print("\n".join(input_tags), 0) sys.exit(1) # create POST request data = {"id": meet_id, "passwd": self.args.password, "action": "viewdetailpage"} check_url = ("https://{}{}/rec/validate_meet_passwd" .format(self.subdomain, self.domain)) self.session.post(check_url, data=data) self._change_page(self.url) # get as if nothing def confirm(message): """ Ask user to enter Y or N (case-insensitive). Inspired and adapted from https://gist.github.com/gurunars/4470c97c916e7b3c4731469c69671d06 `return` {bool} True if the answer is Y. """ answer = None while answer not in ["y", "n", ""]: answer = input(message + " Continue? [y/N]: ").lower() # nosec return answer == "y" def get_filepath(user_fname, file_fname, extension): """Create an filepath.""" if user_fname is None: basedir = os.getcwd() # remove illegal characters name = os.path.join(basedir, re.sub( r"[/\\\?*:\"|><]+", "_", file_fname)) else: name = os.path.abspath(user_fname) filepath = "{}.{}".format(name, extension) # check file doesn't already exist if os.path.isfile(filepath): if not confirm("File {} already exists. This will erase it" .format(filepath)): sys.exit(0) os.remove(filepath) return filepath
zoomdl-2
/zoomdl_2-1970.1.1-py3-none-any.whl/zoom_dl/zoomdl.py
zoomdl.py
"""Provide parsing method for the command line arguments.""" import argparse import os def _check_positive(value): """Ensure a given value is a positive integer.""" int_value = int(value) if int_value < 0: raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value) return int_value def _valid_path(value): if not (os.path.exists(value) and os.path.isfile(value)): raise argparse.ArgumentTypeError("%s doesn't seem to be a valid file." % value) return value def parseOpts(): """Parse command line arguments. Returns: argparse.Namespace: Namespace of the parsed arguments. """ PARSER = argparse.ArgumentParser( description="Utility to download zoom videos", prog="zoomdl", formatter_class=(lambda prog: argparse.HelpFormatter(prog, max_help_position=10, width=200) )) PARSER.add_argument("-u", "--url", help=("Enter the url of the video to download. " "Looks like 'zoom.us/rec/play/...'"), type=str, required=True, metavar="url") PARSER.add_argument("-f", "--filename", help=("The name of the output video file without " "extension. Default to the filename according " "to Zoom. Extension is automatic."), metavar="filename") PARSER.add_argument("-d", "--filename-add-date", help=("Add video meeting date if it is specified. " "Default is not to include the date."), default=False, action='store_true') PARSER.add_argument("--user-agent", help=("Use custom user agent." "Default is real browser user agent."), type=str) PARSER.add_argument("-p", "--password", help="Password of the video (if any)", metavar="password") PARSER.add_argument("-c", "--count-clips", help=("If multiple clips, how many to download. " "1 = only current URL (default). " "0 = all of them. " "Other positive integer, count of clips to " "download, starting from the current one"), metavar="Count", type=_check_positive, default=1) PARSER.add_argument("-v", "--log-level", help=("Chose the level of verbosity. 0=debug, 1=info " "(default), 2=warning 3=Error, 4=Critical, " "5=Quiet (nothing printed)"), metavar="level", type=int, default=1) PARSER.add_argument("--cookies", help="Provide a Netscape-format cookies file", metavar="cookies.txt", type=_valid_path, required=False) return PARSER.parse_args()
zoomdl-2
/zoomdl_2-1970.1.1-py3-none-any.whl/zoom_dl/parser.py
parser.py
# zoome ### Getting Started `$ pip install zoome` #### create ZoomClient object ```python from zoome.api import ZoomClient zc = ZoomClient(api_key='<api_key>', secret_api_key='<secret_api_key>') ``` ##### or ```python from zoome.api import ZoomClient zc = ZoomClient(jwt_token='<jwt_token>') ``` #### get meetings list ```python meetings = zc.get_meetings_list() ``` #### download file ```python zc.download_file(full_path='<full_path>', url='<url>') ``` ---- ### Utils #### get download urls from list of meetings ```python from zoome.api import ZoomClient from zoome.utils import get_meetings_download_urls zc = ZoomClient(jwt_token='<jwt_token>') meetings = zc.get_meetings_list() links = get_meetings_download_urls(meetings) ``` ##### links: ```json [ [ { "download_url": "<download_url>", "recording_type": "<recording_type>", "file_type": "<file_type>" }, ... ], ... ] ``` #### get download links from one meeting ```python from zoome.api import ZoomClient from zoome.utils import get_download_urls_from_meeting zc = ZoomClient(jwt_token='<jwt_token>') meetings = zc.get_meetings_list() links = get_download_urls_from_meeting(meetings[0]) ``` ##### links: ```json [ { "download_url": "<download_url>", "recording_type": "<recording_type>", "file_type": "<file_type>" }, ... ] ```
zoome
/zoome-0.0.5.tar.gz/zoome-0.0.5/README.md
README.md
ZoomEye API ============== ZoomEye is a search engine for cyberspace that lets the user find specific network components(ip, services, etc.). ZoomEye API is a web service that provides convenient access to ZoomEye features, data, information over HTTPS. The platform API empowers developers to automate, extend and connected with ZoomEye. You can use the ZoomEye platform API to programmatically create apps, provision some add-ons and perform some automate tasks. Just imagine that what you could do amazing stuff with ZoomEye. How to install ZoomEye SDK ------------------------------ :: $ sudo easy_install zoomeye-SDK or :: $ sudo pip install git+https://github.com/knownsec/ZoomEye.git How to use ZoomEye SDK ------------------------------ locate zoomeye.py, and try to execute it as follow: :: # use API-KEY $ python zoomeye.py ZoomEye API-KEY(If you don't use API-KEY , Press Enter): 3******f-b**9-a***c-3**5-28******fd8 ZoomEye Username: ZoomEye Password: {'plan': 'developer', 'resources': {'search': 9360, 'stats': 100, 'interval': 'month'}} ec2-1*7-**-***-116.compute-1.amazonaws.com ['1*7.**.***.116'] myh****life.com ['**.35.*.5'] ... 113.**.**.161 1611 113.**.***.63 1611 or :: # use username and password to login $ python zoomeye.py ZoomEye API-KEY(If you don't use API-KEY , Press Enter): ZoomEye Username: [email protected] ZoomEye Password: {'plan': 'developer', 'resources': {'search': 9280, 'stats': 100, 'interval': 'month'}} ec2-1*7-**-***-116.compute-1.amazonaws.com ['1*7.**.***.116'] myh****life.com ['**.35.*.5'] ... 113.***.*.35 1611 113.***.**.162 1611 zoomeye.py can be also a library. You can choose to log in with your account Username and Password or use API-KEY to search. API-KEY can be found https://www.zoomeye.org/profile. ex: :: >>> print(zoomeye.ZoomEye(username=username, password=password).login()) or >>> zm = zoomeye.ZoomEye(api_key="3******f-b**9-a***c-3**5-28******fd8") :: $ python3 Python 3.8.5 (default, Aug 19 2020, 14:11:20) [Clang 11.0.3 (clang-1103.0.32.62)] on darwin Type "help", "copyright", "credits" or "license" for more information. >>> import zoomeye >>> dir(zoomeye) ['ZoomEye', '__author__', '__builtins__', '__cached__', '__classes__', '__description__', '__doc__', '__file__', '__funcs__', '__license__', '__loader__', '__name__', '__package__', '__spec__', '__version__', 'getpass', 'raw_input', 'requests', 'show_ip_port', 'show_site_ip', 'sys', 'zoomeye_api_test'] >>> # Use username and password to login >>> zm = zoomeye.ZoomEye() >>> zm.username = '[email protected]' >>> zm.password = 'password' >>> print(zm.login()) ....JIUzI1NiIsInR5cCI6IkpXVCJ9..... >>> data = zm.dork_search('apache country:cn') >>> zoomeye.show_site_ip(data) 213.***.***.46.rev.vo***one.pt ['46.***.***.213'] me*****on.o****e.net.pg ['203.***.***.114'] soft********63221110.b***c.net ['126.***.***.110'] soft********26216022.b***c.net ['126.***.***.22'] soft********5084068.b***c.net ['126.***.***.68'] soft********11180040.b***c.net ['126.***.***.40'] >>> # Use API-KEY >>> zm = zoomeye.ZoomEye(api_key="3******f-b**9-a***c-3**5-28******fd8") >>> data = zm.dork_search('apache country:cn') >>> zoomeye.show_site_ip(data) 213.***.***.46.rev.vo***one.pt ['46.***.***.213'] me*****on.o****e.net.pg ['203.***.***.114'] soft********63221110.b***c.net ['126.***.***.110'] soft********26216022.b***c.net ['126.***.***.22'] soft********5084068.b***c.net ['126.***.***.68'] soft********11180040.b***c.net ['126.***.***.40'] How to use ZoomEye API ------------------------------ 1) Authenticate If a valid ZoomEye credential (username and password), please use the credential for authentication. :: curl -XPOST https://api.zoomeye.org/user/login -d '{ "username": "[email protected]", "password": "foobar" }' 2) ZoomEye Dorks When everything goes ok, you can try to search ZoomEye Dorks with ZoomEye API Token. :: curl -X GET https://api.zoomeye.org/host/search?query="port:21"&page=1&facet=app,os \ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5..." If you want more, please access ZoomEye API References. Change Log ------------------------------ v1.0.6(10 Nov 2020):Add API-KEY usage;Change default search resource type to "host" Links ------------------------------ https://www.zoomeye.org/ https://www.zoomeye.org/doc
zoomeye-sdk
/zoomeye-sdk-1.0.6.tar.gz/zoomeye-sdk-1.0.6/README.rst
README.rst
__author__ = "nixawk" __version__ = "1.0.6" __license__ = "GPL-2.0" __description__ = ("ZoomEye is a search engine for cyberspace " "that lets the user find specific network components" "(ip, services, etc.).") __classes__ = ["ZoomEye"] __funcs__ = [ "login", "dork_search", "resources_info", "show_site_ip", "show_ip_port", "zoomeye_api_test" ] import requests import getpass import sys raw_input = raw_input if sys.version_info.major <= 2 else input class ZoomEye(object): def __init__(self, username=None, password=None, api_key=''): self.username = username self.password = password self.token = '' self.api_key = api_key self.zoomeye_login_api = "https://api.zoomeye.org/user/login" self.zoomeye_dork_api = "https://api.zoomeye.org/{}/search" self.zoomeye_history_api = "https://api.zoomeye.org/both/search?history=true&ip={}" def login(self): """Please access https://www.zoomeye.org/api/doc#login """ data = '{{"username": "{}", "password": "{}"}}'.format(self.username, self.password) resp = requests.post(self.zoomeye_login_api, data=data) if resp and resp.status_code == 200 and 'access_token' in resp.json(): self.token = resp.json().get('access_token') return self.token def dork_search(self, dork, page=0, resource='host', facet=['ip']): """Search records with ZoomEye dorks. param: dork ex: country:cn access https://www.zoomeye.org/search/dorks for more details. param: page total page(s) number param: resource set a search resource type, ex: [web, host] param: facet ex: [app, device] A comma-separated list of properties to get summary information """ result = [] if isinstance(facet, (tuple, list)): facet = ','.join(facet) zoomeye_api = self.zoomeye_dork_api.format(resource) headers = {'Authorization': 'JWT %s' % self.token, 'API-KEY': self.api_key, } params = {'query': dork, 'page': page + 1, 'facet': facet} resp = requests.get(zoomeye_api, params=params, headers=headers) if resp and resp.status_code == 200 and 'matches' in resp.json(): matches = resp.json().get('matches') # total = resp.json().get('total') # all matches items num result = matches # Every match item incudes the following information: # geoinfo # description # check_time # title # ip # site # system # headers # keywords # server # domains return result def history_ip(self, ip): """Query IP History Information . param: ip """ result = [] zoomeye_api = self.zoomeye_history_api.format(ip) headers = {'Authorization': 'JWT %s' % self.token, 'API-KEY': self.api_key, } resp = requests.get(zoomeye_api, headers=headers) if resp and resp.status_code == 200 and 'data' in resp.json(): matches = resp.json() print(matches.get('count')) result = matches return result def resources_info(self): """Resource info shows us available search times. host-search: total number of available host records to search web-search: total number of available web records to search """ data = None zoomeye_api = "https://api.zoomeye.org/resources-info" headers = {'Authorization': 'JWT %s' % self.token, 'API-KEY': self.api_key, } resp = requests.get(zoomeye_api, headers=headers) if resp and resp.status_code == 200 and 'plan' in resp.json(): data = resp.json() return data def show_site_ip(data): if data: for i in data: print(i.get('site'), i.get('ip')) def show_ip_port(data): if data: for i in data: print(i.get('ip'), i.get('portinfo').get('port')) def zoomeye_api_test(): zoomeye = ZoomEye() zoomeye.api_key = raw_input('ZoomEye API-KEY(If you don\'t use API-KEY , Press Enter): ') zoomeye.username = raw_input('ZoomEye Username: ') zoomeye.password = getpass.getpass(prompt='ZoomEye Password: ') if zoomeye.username != "" and zoomeye.password != "": zoomeye.login() print(zoomeye.resources_info()) data = zoomeye.dork_search('solr') show_site_ip(data) data = zoomeye.dork_search('country:cn') show_site_ip(data) data = zoomeye.dork_search('solr country:cn') show_site_ip(data) data = zoomeye.dork_search('solr country:cn', resource='web') show_ip_port(data) if __name__ == "__main__": zoomeye_api_test()
zoomeye-sdk
/zoomeye-sdk-1.0.6.tar.gz/zoomeye-sdk-1.0.6/zoomeye.py
zoomeye.py
ZoomEye-python -------------- English | `中文文档 <docs/README_CN.md>`_ ``ZoomEye`` is a cyberspace search engine, users can search for network devices using a browser https://www.zoomeye.org. ``ZoomEye-python`` is a Python library developed based on the ``ZoomEye API``. It provides the ``ZoomEye command line`` mode and can also be integrated into other tools as an ``SDK``. The library allows technicians to **search**, **filter**, and **export** ``ZoomEye`` data more conveniently. 0x01 installation ~~~~~~~~~~~~~~~~~ It can be installed directly from ``pypi``: :: pip3 install zoomeye or installed from ``github``: :: pip3 install git+https://github.com/knownsec/ZoomEye-python.git 0x02 how to use cli ~~~~~~~~~~~~~~~~~~~ After successfully installing ``ZoomEye-python``, you can use the ``zoomeye`` command directly, as follows: :: $ zoomeye -h usage: zoomeye [-h] [-v] {info,search,init,ip,history,clear} ... positional arguments: {info,search,init,ip,history,clear} info Show ZoomEye account info search Search the ZoomEye database init Initialize the token for ZoomEye-python ip Query IP information history Query device history clear Manually clear the cache and user information optional arguments: -h, --help show this help message and exit -v, --version show program's version number and exit 1.initialize token ^^^^^^^^^^^^^^^^^^ Before using the ``ZoomEye-python cli``, the user ``token`` needs to be initialized. The credential is used to verify the user’s identity to query data from ``ZoomEye``; only support API-KEY authentication methods. You can view the help through ``zoomeye init -h``, and use ``APIKEY`` to demonstrate below: :: $ zoomeye init -apikey "01234567-acbd-00000-1111-22222222222" successfully initialized Role: developer Quota: 10000 Users can login to ``ZoomEye`` and obtain ``APIKEY`` in personal information (https://www.zoomeye.org/profile); ``APIKEY`` will not expire, users can reset in personal information according to their needs. 2.query quota ^^^^^^^^^^^^^ Users can query personal information and data quota through the ``info`` command, as follows: :: $ zoomeye info user_info: { "email": "", "name": "", "nick_name": "", "api_key": "", "role": "", # service level "phone", "", "expired_at": "" } quota: { "remain_free_quota": "", # This month remaining free amount "remain_pay_quota": "", # Amount of remaining payment this month "remain_total_quota": "" # Total amount remaining by the service date } 3.search ^^^^^^^^ Search is the core function of ``ZoomEye-python``, which is used through the ``search`` command. the ``search`` command needs to specify the search keyword (``dork``), let's perform a simple search below: :: $ zoomeye search "telnet" -num 1 ip:port service country app banner 222.*.*.*:23 telnet Japan Pocket CMD telnetd \xff\xfb\x01\xff\xfb\x03\xff\x... total: 1 Using the ``search`` command is as simple as using a browser to search in ``ZoomEye``. by default, we display five more important fields. users can use these data to understand the target information: :: 1.ip:port ip address and port 2.service the service that the port is open 3.country country of this ip address 4.app application type 5.banner characteristic response of the port In the above example, the number to be displayed is specified using the ``-num`` parameter. in addition, ``search`` also supports the following parameters (``zoomeye search -h``) so that users can handle the data. we will explain and demonstrate below. :: -num set the number of displays/searches, support 'all' -count query the total amount of this dork in the ZoomEye database -facet query the distribution of the full data of the dork -stat the distribution of statistical data result sets -filter query the list of a certain area in the data result set, or filter according to the content -save the result set can be exported according to the filter conditions -force ignore the local cache and force the data to be obtained from the API -type select web or host search 4.number of data ^^^^^^^^^^^^^^^^ Through the ``-num`` parameter, we can specify the number of search and display, and the specified number is the number of consumed quantities. you can query the volume of the ``dork`` in the ZoomEye database through the ``-count`` parameter, as follows: :: $ zoomeye search "telnet" -count 56903258 .. One thing to note, the consumption of the ``-num`` parameter is an integer multiple of 20, because the minimum number of a single query of the ``ZoomEye API`` is 20. 5.statistics ^^^^^^^^^^^^ We can use ``-facet`` and ``-stat`` to perform data statistics, use ``-facet`` to query the statistics of the dork's full data (obtained through ``API`` after statistics by ``ZoomEye``), and ``-stat`` You can perform statistics on the query result set. The fields supported by the two commands include: :: # host searhc app statistics by application type device statistics by device type service statistics by service type os statistics by operating system type port statistics by port country statistics by country city statistics by city # web search webapp statistics by Web application component statistics by Web container framework statistics by Web framework server statistics by Web server waf statistics by Web firewall(WAF) os statistics by operating system country statistics by country use ``-facet`` to count the application types of all ``telnet`` devices: :: $ zoomeye search "telnet" -facet app app count [unknown] 28317914 BusyBox telnetd 10176313 Linux telnetd 3054856 Cisco IOS telnetd 1505802 Huawei Home Gateway telnetd 1229112 MikroTik router config httpd 1066947 Huawei telnetd 965378 Busybox telnetd 962470 Netgear broadband router... 593346 NASLite-SMB/Sveasoft Alc... 491957 use ``-stat`` to count and query the application types of 20 ``telnet`` devices: :: $ zoomeye search "telnet" -stat app app count Cisco IOS telnetd 7 [unknown] 5 BusyBox telnetd 4 Linux telnetd 3 Pocket CMD telnetd 1 6.data filter ^^^^^^^^^^^^^ Use the ``-filter`` parameter to query the list of partial segments in the data result set, or filter based on content. The segments supported by this command include: :: # host/search app show application type details version show version information details device show device type details port show port information details city show city details country show country details asn show as number details banner show details of characteristic response timestamp show record data time * when this symbol is included, show all field details # web/search app show application type details headers HTTP header keywords meta keyword title HTTP Title information site site search city show city details country show country details webapp Web application component Web container framework Web framework server Web server waf Web firewall(WAF) os operating system timestamp updated timestamp * when this symbol is included, show all field details Compared to the omitted display by default, the complete data can be viewed through ``-filter``, as follows: :: $ zoomeye search "telnet" -num 1 -filter banner ip banner 222.*.*.* \xff\xfb\x01\xff\xfb\x03\xff\xfd\x03TELNET session now in ESTABLISHED state\r\n\r\n total: 1 When using ``-filter`` to filter, the syntax is: ``key1,key2,key3=value``, where ``key3=value`` is the filter condition, and the displayed content is ``key1,key2`` Example: :: $ zoomeye search telnet -num 1 -filter port,app,banner=Telnet ip port app 240e:*:*:*::3 23 LANDesk remote management In the above example: ``banner=Telnet`` is the filter condition, and ``port,app`` is the displayed content. If you need to display ``banner``, the filter statement is like this :: $ zoomeye search telnet -num 1 -filter port,app,banner,banner=Telnet 7.data export ^^^^^^^^^^^^^ The ``-save`` parameter can export data. the syntax of this parameter is the same as that of ``-filter``, and the result is saved to a file in the format of line json, as follows: :: $ zoomeye search "telnet" -save banner=telnet save file to telnet_1_1610446755.json successful! $ cat telnet_1_1610446755.json {'ip': '218.223.21.91', 'banner': '\\xff\\xfb\\x01\\xff\\xfb\\x03\\xff\\xfd\\x03TELNET session now in ESTABLISHED state\\r\\n\\r\\n'} .. if you use ``-save`` without any parameters, the query result will be saved as a file according to the json format of ``ZoomEye API``. this method is generally used to integrate data while retaining metadata; the file can be as input, it is parsed and processed again through ``cli``, such as ``zoomeye search "xxxxx.json"``. 8.graphical data ^^^^^^^^^^^^^^^^ The ``-figure`` parameter is a data visualization parameter. This parameter provides two display methods: ``pie (pie chart)`` and ``hist (histogram)``. The data will still be displayed without specifying it. When ``-figure`` is specified , Only graphics will be displayed. The pie chart is as follows: .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205004653480.png :width: 500px .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205005016399.png :width: 500px The histogram is as follows: .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205004806739.png :width: 500px .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205005117712.png :width: 500px 9. IP history ^^^^^^^^^^^^^ ``ZoomEye-python`` provides the function of querying IP historical device data. Use the command ``history [ip]`` to query the historical data of IP devices. The usage is as follows: :: $zoomeye history "207.xx.xx.13" -num 1 207.xx.xx.13 Hostnames: [unknown] Country: United States City: Lake Charles Organization: fulair.com Lastupdated: 2021-02-18T03:44:06 Number of open ports: 1 Number of historical probes: 1 timestamp port/service app raw_data 2021-02-18 03:44:06 80/http Apache httpd HTTP/1.0 301 Moved Permanently... By default, five fields are shown to users: :: 1. time recorded time 2. service Open service 3. port port 4. app web application 5. raw fingerprint information Use ``zoomeye history -h`` to view the parameters provided by ``history``. :: $zoomeye history -h usage: zoomeye history [-h] [-filter filed=regexp] [-force] ip positional arguments: ip search historical device IP optional arguments: -h, --help show this help message and exit -filter filed=regexp filter data and print raw data detail. field: [time,port,service,app,raw] -force ignore the local cache and force the data to be obtained from the API The following is a demonstration of ``-filter``: :: $zoomeye history "207.xx.xx.13" -filter "time=^2019-08,port,service" 207.xx.xx.13 Hostnames: [unknown] Country: United States City: Lake Charles Organization: fulair.com Lastupdated: 2019-08-16T10:53:46 Number of open ports: 3 Number of historical probes: 3 time port service 2019-08-16 10:53:46 389 ldap 2019-08-08 23:32:30 22 ssh 2019-08-03 01:55:59 80 http The `-filter` parameter supports the filtering of the following five fields: :: 1.time scan time 2.port port information 3.service open service 4.app web application 5.banner original fingerprint information * when this symbol is included, show all field details A display of the ``id`` field is added during the display. ``id`` is the serial number. For the convenience of viewing, it cannot be used as a filtered field. .. Note: At present, only the above five fields are allowed to filter. The user quota will also be consumed when using the ``history`` command. The user quota will be deducted for the number of pieces of data returned in the ``history`` command. For example: IP "8.8.8.8" has a total of ``944`` historical records, and the user quota of ``944`` is deducted for one query. 10. search IP information ^^^^^^^^^^^^^^^^^^^^^^^^^ You can query the information of the specified IP through the ``zoomeye ip`` command, for example: :: $ zoomeye ip 185.*.*.57 185.*.*.57 Hostnames: [unknown] Isp: [unknown] Country: Saudi Arabia City: [unknown] Organization: [unknown] Lastupdated: 2021-03-02T11:14:33 Number of open ports: 4{2002, 9002, 123, 25} port service app banner 9002 telnet \xff\xfb\x01\xff\xfb\x0... 123 ntp ntpd \x16\x82\x00\x01\x05\x0... 2002 telnet Pocket CMD telnetd \xff\xfb\x01\xff\xfb\x0... 25 smtp Cisco IOS NetWor... 220 10.1.10.2 Cisco Net... The ``zoomeye ip`` command also supports the filter parameter ``-filter``, and the syntax is the same as that of ``zoomeye search``. E.g: :: $ zoomeye ip "185.*.*.57" -filter "app,app=ntpd" Hostnames: [unknown] Isp: [unknown] Country: Saudi Arabia City: [unknown] Organization: [unknown] Lastupdated: 2021-02-17T02:15:06 Number of open ports: 0 Number of historical probes: 1 app ntpd The fields supported by the ``filter`` parameter are: :: 1.port port information 2.service open service 3.app web application 4.banner original fingerprint information .. Note: This function limits the number of queries per user per day based on different user levels. Registered users and developers can query 10 times a day Advanced users can query 20 times a day VIP users can query 30 times a day After the number of times per day is used up, it will be refreshed after 24 hours, that is, counting from the time of the first IP check, and the number of refreshes after 24 hours. 11.cleanup function ^^^^^^^^^^^^^^^^^^^^ Users search for a large amount of data every day, which causes the storage space occupied by the cache folder to gradually increase; if users use ``ZoomEye-python`` on a public server, it may cause their own ``API KEY`` and ``ACCESS TOKEN`` to leak . For this reason, ``ZoomEye-python`` provides the clear command ``zoomeye clear``, which can clear the cached data and user configuration. The usage is as follows: :: $zoomeye clear -h usage: zoomeye clear [-h] [-setting] [-cache] optional arguments: -h, --help show this help message and exit -setting clear user api key and access token -cache clear local cache file 11.data cache ^^^^^^^^^^^^^ ``ZoomEye-python`` provides a caching in ``cli`` mode, which is located under ``~/.config/zoomeye/cache`` to save user quota as much as possible; the data set that the user has queried will be cached locally for 5 days. when users query the same data set, quotas are not consumed. 13.domain name query ^^^^^^^^^^^^^^^^^^^^ ``ZoomEye-python`` provides the domain name query function (including associated domain name query and subdomain name query). To query a domain name, run the domain [domain name] [query type] command as follows: :: $ python cli.py domain baidu.com 0 name timestamp ip zszelle.baidu30a72.bf.3dtops.com 2021-06-27 204.11.56.48 zpvpcxa.baidu.3dtops.com 2021-06-27 204.11.56.48 zsrob.baidu.3dtops.com 2021-06-27 204.11.56.48 zw8uch.7928.iwo7y0.baidu82.com 2021-06-27 59.188.232.88 zydsrdxd.baidu.3dtops.com 2021-06-27 204.11.56.48 zycoccz.baidu.3dtops.com 2021-06-27 204.11.56.48 ... total: 30/79882 By default, the user is presented with three more important fields: :: 1. name 域名全称 2. timestamp 建立时间戳 3. ip ip地址 Use ``zoomeye domain -h`` to view parameters provided by the ``domain``. :: $ python cli.py domain -h usage: zoomeye domain [-h] [-page PAGE] [-dot] q {0,1} positional arguments: q search key word(eg:baidu.com) {0,1} 0: search associated domain;1: search sub domain optional arguments: -h, --help show this help message and exit -page PAGE view the page of the query result -dot generate a network map of the domain name The following is a demonstration of ``-page`` :(default query for the first page when not specified) :: $ python cli.py domain baidu.com 0 -page 3 name timestamp ip zvptcfua.baidu6c7be.mm.3dtops.com 2021-06-27 204.11.56.48 zmukxtd.baidu65c78.iw.3dtops.com 2021-06-27 204.11.56.48 zhengwanghuangguanxianjinkaihu.baidu.fschangshi.com 2021-06-27 23.224.194.175 zibo-baidu.com 2021-06-27 194.56.78.148 zuwxb4.jingyan.baidu.66players.com 2021-06-27 208.91.197.46 zhannei.baidu.com.hypestat.com 2021-06-27 67.212.187.108 zrr.sjz-baidu.com 2021-06-27 204.11.56.48 zp5hd1.baidu.com.ojsdi.cn 2021-06-27 104.149.242.155 ... zhidao.baidu.com.39883.wxeve.cn 2021-06-27 39.98.202.39 zhizhao.baidu.com 2021-06-27 182.61.45.108 zfamnje.baidu.3dtops.com 2021-06-27 204.11.56.48 zjnfza.baidu.3dtops.com 2021-06-27 204.11.56.48 total: 90/79882 The ``-dot`` parameter can generate a network map of domain name and IP,Before using this function, you need to install ``grapvhiz``. Please refer to `grapvhiz <https://graphviz.org/download/>`_ for the installation tutorial. It is supported on Windows/Linux/Mac. The ``-dot`` parameter will generate a picture in ``png`` format and save the original dot language script at the same time. .. figure:: images/image-20211208112710711.png :width: 500px 0x03 video ~~~~~~~~~~ `ZoomEye-python is demonstrated under Windows, Mac, Linux, FreeBSD <https://weibo.com/tv/show/1034:4597603044884556?from=old_pc_videoshow>`_ |asciicast| 0x04 use SDK ~~~~~~~~~~~~ .. _initialize-token-1: 1.initialize token ^^^^^^^^^^^^^^^^^^ Similarly, the SDK also supports API-KEY authentication methods, ``APIKEY``, as follows: **APIKEY** .. code:: python from zoomeye.sdk import ZoomEye zm = ZoomEye(api_key="01234567-acbd-00000-1111-22222222222") 2.SDK API ^^^^^^^^^ The following are the interfaces and instructions provided by the SDK: :: 1.dork_search(dork, page=0, resource="host", facets=None) search the data of the specified page according to dork 2.multi_page_search(dork, page=1, resource="host", facets=None) search multiple pages of data according to dork 3.resources_info() get current user information 4.show_count() get the number of all matching results under the current dork 5.dork_filter(keys) extract the data of the specified field from the search results 6.get_facet() get statistical results of all data from search results 7.history_ip(ip) query historical data information of an ip 8.show_site_ip(data) traverse the web-search result set, and output the domain name and ip address 9.show_ip_port(data) traverse the host-search result set and output the ip address and port 10.generate_dot(self, q, source=0, page=1) Generate graphviz files and pictures written in the domain center 3.SDK example ^^^^^^^^^^^^^ .. code:: python $ python3 >>> import zoomeye.sdk as zoomeye >>> dir(zoomeye) ['ZoomEye', 'ZoomEyeDict', '__builtins__', '__cached__', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__spec__', 'fields_tables_host', 'fields_tables_web', 'getpass', 'requests', 'show_ip_port', 'show_site_ip', 'zoomeye_api_test'] >>> # Use API-KEY search >>> zm = zoomeye.ZoomEye(api_key="01234567-acbd-00000-1111-22222222222") >>> data = zm.dork_search('apache country:cn') >>> zoomeye.show_site_ip(data) 213.***.***.46.rev.vo***one.pt ['46.***.***.213'] me*****on.o****e.net.pg ['203.***.***.114'] soft********63221110.b***c.net ['126.***.***.110'] soft********26216022.b***c.net ['126.***.***.22'] soft********5084068.b***c.net ['126.***.***.68'] soft********11180040.b***c.net ['126.***.***.40'] ... .. _search-1: 4.search ^^^^^^^^ As in the above example, we use ``dork_search()`` to search, and we can also set the ``facets`` parameter to obtain the aggregated statistical results of the full data of the dork. for the fields supported by ``facets``, please refer to **2.use cli - 5.statistics**. as follows: .. code:: python >>> data = zm.dork_search('telnet', facets='app') >>> zm.get_facet() {'product': [{'name': '', 'count': 28323128}, {'name': 'BusyBox telnetd', 'count': 10180912}, {'name': 'Linux telnetd', ...... .. ``multi_page_search()`` can also search. use this function when you need to obtain a large amount of data, where the ``page`` field indicates how many pages of data are obtained; and ``dork_search()`` only obtains the data of a specified page. .. _data-filter-1: 5.data filter ^^^^^^^^^^^^^ the ``dork_filter()`` function is provided in the SDK, we can filter the data more conveniently and extract the specified data fields as follows: .. code:: python >>> data = zm.dork_search("telnet") >>> zm.dork_filter("ip,port") [['180.*.*.166', 5357], ['180.*.*.6', 5357], ...... .. since the fields returned by ``web-search`` and ``host-search`` interfaces are different, you need to fill in the correct fields when filtering. the fields included in ``web-search``: app / headers / keywords / title / ip / site / city / country the fields included in ``host-search``: app / version / device / ip / port / hostname / city / country / asn / banner 0x05 contributions ~~~~~~~~~~~~~~~~~~ | `r0oike@knownsec 404 <https://github.com/r0oike>`__ | `0x7F@knownsec 404 <https://github.com/0x7Fancy>`__ | `fenix@knownsec 404 <https://github.com/13ph03nix>`__ | `dawu@knownsec 404 <https://github.com/d4wu>`__ 0x06 issue ~~~~~~~~~~ | **1.The minimum number of requests for SDK and command line tools is 20** | Due to API limitations, the minimum unit of our query is 20 pieces of data at a time. for a new dork, whether it is to view the total number or specify to search for only 1 piece of data, there will be an overhead of 20 pieces; of course, in the cli, we provide a cache, the data that has been searched is cached locally (``~/.config/zoomeye/cache``), and the validity period is 5 days, which can greatly save quota. | **2.How to enter dork with quotes?** | When using cli to search, you will encounter dork with quotes, for example: ``"<body style=\"margin:0;padding:0\"> <p align=\"center\"> <iframe src=\ "index.xhtml\""``, when dork contains quotation marks or multiple quotation marks, the outermost layer of dork must be wrapped in quotation marks to indicate a parameter as a whole, otherwise command line parameter parsing will cause problems. Then the correct search method for the following dork should be: ``'"<body style=\"margin:0;padding:0\"> <p align=\"center\"> <iframe src=\"index.xhtml\" "'``. .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205131713799.png :width: 500px .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210205131802799.png :width: 500px | **3.Why is there inconsistent data in facet?** | The following figure shows the full data statistics results of ``telnet``. the result of the first query is that 20 data query requests (including the statistical results) were initiated by cli one day ago by default, and cached in a local folder; the second time We set the number of queries to 21, cli will read 20 cached data and initiate a new query request (actually the smallest unit is 20, which also contains statistical results), the first query and the second query a certain period of time is in between. during this period of time, ``ZoomEye`` periodically scans and updates the data, resulting in the above data inconsistency, so cli will use the newer statistical results. .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210111111035187.png :width: 500px | **4.Why may the total amount of data in ZoomEye-python and the browser search the same dork be different?** | ``ZoomEye`` provides two search interfaces: ``/host/search`` and ``/web/search``. In ``ZoomEye-python``, only ``/host/search`` is used by default, and ``/web/search`` is not used. Users can choose the search method according to their needs by specifying the ``type`` parameter. .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210111141028072.png :width: 500px .. figure:: https://raw.githubusercontent.com/knownsec/ZoomEye-python/master/images/image-20210111141114558.png :width: 500px | **5.The quota information obtained by the info command may be inconsistent with the browser side?** | The browser side displays the free quota and recharge quota (https://www.zoomeye.org/profile/record), but only the free quota information is displayed in ``ZoomEye-python``, we will fix it in the subsequent version This question. 0x07 404StarLink Project ~~~~~~~~~~~~~~~~~~~~~~~~ .. figure:: https://github.com/knownsec/404StarLink-Project/raw/master/logo.png ``ZoomEye-python`` is a part of 404Team `Starlink Project <https://github.com/knownsec/404StarLink-Project>`__. If you have any questions about ``ZoomEye-python`` or want to talk to a small partner, you can refer to The way to join the group of Starlink Project. - https://github.com/knownsec/404StarLink-Project#community -------------- | References: | https://www.zoomeye.org/doc | knownsec 404 | Time: 2021.01.12 .. |asciicast| image:: https://asciinema.org/a/qyDaJw9qQc7UjffD04HzMApWa.svg :target: https://asciinema.org/a/qyDaJw9qQc7UjffD04HzMApWa
zoomeye
/zoomeye-2.2.0.tar.gz/zoomeye-2.2.0/README.rst
README.rst
|PyPI version| |License: MIT| |Downloads| zoomg Docs ========== バーチャル背景適用済み動画からバーチャル背景を復元するライブラリ .. figure:: https://github.com/Tsuku43/zoomg/blob/master/images/icon.png?raw=true :alt: icon icon System Requirements / 動作保証環境 ---------------------------------- - python 3.6.4 - pip 20.2.4 - OS - macOS Catalina 10.15.7 (x86_64) - macOS Bug Sur 11.4 (aarch64) - Ubuntu 20.04.1 LTS / 18.04.4 LTS Installing / インストール ------------------------- - aarch64環境の場合 .. code:: shell pip install zoomg - x86_64環境の場合 .. code:: shell pip install git+https://github.com/Tsuku43/zoomg **Windows では動作保証をしていません** How to use / 使用方法 --------------------- 簡単な使用方法 ~~~~~~~~~~~~~~ .. code:: sh python3 sample.py sample_video.mp4 .. code:: python import zoomg import cv2 import sys import numpy # ビデオ読み込みの初期設定 filename = sys.argv[1] video = cv2.VideoCapture(filename) height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)) width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH)) # zoomgの初期化 zoom = zoomg.Zoomg(height, width) while True: # フレーム読み込み ret, frame = video.read() if not ret: break # zoomgに画像を追加 zoom.add_image(frame) # 背景画像を生成 zoom.generate_image() # 背景画像を取得 image = zoom.get_image() # 背景画像を保存 cv2.imwrite("room.png", numpy.array(image)) Zoomg クラス ~~~~~~~~~~~~ 背景画像の生成を行います. **コンストラクタ** ^^^^^^^^^^^^^^^^^^ .. code:: python zoom = zoomg.Zoomg(height, width) - 引数 ========== === ============== ==== ============== パラメータ 型 キーワード引数 省略 説明 ========== === ============== ==== ============== height int 可(h) - 入力画像の高さ width int 可(w) - 入力画像の幅 ========== === ============== ==== ============== - 返り値 ========== ===== ================== パラメータ 型 説明 ========== ===== ================== zoom Zoomg Zoomg オブジェクト ========== ===== ================== -------------- **Zoomg.add_image** ^^^^^^^^^^^^^^^^^^^ zoomg オブジェクトに画像を追加します.動画を入力したい場合はフレーム単位で分割する必要があります. .. code:: python zoom.add_image(frame) - 引数 +------------+---------------+---------------+------+---------------+ | パラメータ | 型 | キ | 省略 | 説明 | | | | ーワード引数 | | | +============+===============+===============+======+===============+ | frame | 3 次元 | - | - | zoomg | | | numpy.ndarray | | | オ | | | | | | ブジェクトに | | | | | | 追加する画像 | +------------+---------------+---------------+------+---------------+ - 返り値 - なし -------------- **Zoomg.generate_image** ^^^^^^^^^^^^^^^^^^^^^^^^ ``Zoomg.add_image``\ で追加した画像から背景画像を復元する. .. code:: python zoom.generate_image(comp, param, noise_frame) - 引数 +---+---+-----+--------------+-----------------------------------------+ | パ | 型 | キ | 省略 | 説明 | | ラ | | ー | | | | メ | | ワ | | | | ー | | ー | | | | タ | | ド | | | | | | 引 | | | | | | 数 | | | +===+===+=====+==============+=========================================+ | c | s | 可 | 可(default= | 色差を決定する.詳 | | o | t | (co | “ciede2000”) | 細は表「\ `色差 <#色差>`__\ 」を参照. | | m | r | mp) | | | | p | i | | | | | | n | | | | | | g | | | | +---+---+-----+--------------+-----------------------------------------+ | p | f | 可( | 可(defau | 各ピクセルが部屋の背景かバーチ | | a | l | par | lt=表「\ `色 | ャル背景か判定するためのパラメータ.詳 | | r | o | am) | 差 <#色差>`_ | 細は表「\ `色差 <#色差>`__\ 」を参照. | | a | a | | _\ 」を参照) | | | m | t | | | | +---+---+-----+--------------+-----------------------------------------+ | n | i | 可( | 可 | 画像にノイズが多く含まれてい | | o | n | noi | (default=1) | る場合,ノイズを軽減するためのパラメータ | | i | t | se_ | | | | s | | fra | | | | e | | me) | | | | _ | | | | | | f | | | | | | r | | | | | | a | | | | | | m | | | | | | e | | | | | +---+---+-----+--------------+-----------------------------------------+ 色差 '''' +---------+----+--------+-------+------------------------------------+ | 色 | co | param | 省略 | 説明 | | 差計算 | mp | 指定可 | | | | アルゴ | | 能範囲 | | | | リズム | | | | | +=========+====+========+=======+====================================+ | コ | ` | [0.0, | 可 | 高くす | | サイン | `c | 1.0] | (def | ると復元できるピクセル数が\ **減り | | 類似度 | os | | ault= | **,低くすると誤判定が\ **増える**. | | | _s | | 0.75) | | | | im | | | | | | `` | | | | +---------+----+--------+-------+------------------------------------+ | CI | ` | [0.0, | 可 | 高くす | | EDE2000 | `c | 100] | (def | ると誤判定が\ **増え**,低くすると | | | ie | | ault= | 復元できるピクセル数が\ **減る**. | | | de | | 5.06) | | | | 20 | | | | | | 00 | | | | | | `` | | | | +---------+----+--------+-------+------------------------------------+ - 返り値 - なし -------------- **Zoomg.get_image** ^^^^^^^^^^^^^^^^^^^ ``Zoomg.generate_image``\ で生成した画像を取得します. .. code:: python image = zoom.get_image() - 引数 - なし - 返り値 ========== ================== ==================== パラメータ 型 説明 ========== ================== ==================== image int 型 3 次元 list 復元した画像ファイル ========== ================== ==================== -------------- **Zoomg.get_omgc** ^^^^^^^^^^^^^^^^^^ ``Zoomg.generate_image``\ を使用した際,正しく復元できたか拘らず,復元したピクセル数を返します.この数は\ ``Zoomg.generate_image``\ の引数\ ``param``\ に影響されます. .. code:: python omgc = zoom.get_omgc() - 引数 - なし - 返り値 ========== === ================== パラメータ 型 説明 ========== === ================== omgc int 復元したピクセル数 ========== === ================== -------------- **Zoomg.verify** ^^^^^^^^^^^^^^^^ ``Zoomg.generate_image``\ の\ ``param``\ 引数の値がどれだけいいものか検証します.部屋の画像と復元した部屋の画像からどれだけ二つの画像が似ているか計測し,復元精度を返します. .. code:: python ok, ng, acc = zoomg.verify(room_image, comp, param) - 引数 +---+-------+----+-------------+--------------------------------------+ | パ | 型 | キ | 省略 | 説明 | | ラ | | ー | | | | メ | | ワ | | | | ー | | ー | | | | タ | | ド | | | | | | 引 | | | | | | 数 | | | +===+=======+====+=============+======================================+ | r | 3 | - | - | 部屋の画像 | | o | 次元 | | | | | o | num | | | | | m | py.nd | | | | | _ | array | | | | | i | | | | | | m | | | | | | a | | | | | | g | | | | | | e | | | | | +---+-------+----+-------------+--------------------------------------+ | c | s | 可 | 可 | 色差を決定する.詳細 | | o | tring | (c | (default=“ | は表「\ `色差 <#色差>`__\ 」を参照. | | m | | om | ciede2000”) | | | p | | p) | | | +---+-------+----+-------------+--------------------------------------+ | p | float | 可 | 可(default= | 各ピクセルが部屋の背景かバーチャル | | a | | ( | 表「\ `色差 | 背景か判定するためのパラメータ.詳細 | | r | | pa | <#色差>`__ | は表「\ `色差 <#色差>`__\ 」を参照. | | a | | ra | \ 」を参照) | | | m | | m) | | | +---+-------+----+-------------+--------------------------------------+ - 返り値 ========== ===== ====================================== パラメータ 型 説明 ========== ===== ====================================== ok int 部屋の画像と復元画像のピクセル一致数 ng int 部屋の画像と復元画像のピクセル不一致数 acc float 精度(``ok / (ok + ng)``) ========== ===== ====================================== -------------- **Zoomg.get_height** ^^^^^^^^^^^^^^^^^^^^ 生成画像の高さを取得します.\ ``zoomg.Zoomg(height, width)``\ の\ ``height``\ と同一の値になります. .. code:: python height = zoomg.get_height() - 引数 - なし - 返り値 ========== === ============== パラメータ 型 説明 ========== === ============== height int 生成画像の高さ ========== === ============== -------------- **Zoomg.get_width** ^^^^^^^^^^^^^^^^^^^ 生成画像の幅を取得します.\ ``zoomg.Zoomg(height, width)``\ の\ ``width``\ と同一の値になります. .. code:: python width = zoomg.get_width() - 引数 - なし - 返り値 ========== === ============ パラメータ 型 説明 ========== === ============ width int 生成画像の幅 ========== === ============ -------------- **Zoomg.get_shape** ^^^^^^^^^^^^^^^^^^^ 生成画像の高さと幅を取得します.\ ``zoomg.Zoomg(height, width)``\ の\ ``height``\ と同一の値になります. .. code:: python height, width = zoomg.get_shape() - 引数 - なし - 返り値 ========== === ============== パラメータ 型 説明 ========== === ============== height int 生成画像の高さ width int 生成画像の幅 ========== === ============== -------------- Function ~~~~~~~~ その他,利用できる関数をこちらに記載します. **add_noise()** ^^^^^^^^^^^^^^^ 背景画像の生成を妨害します.画像に対してノイズを入ります. .. code:: python zoomg.get_shape(image&, height, width, rate) - 引数 +------------+-------------+-------------+-------------+-------------+ | パラメータ | 型 | キー | 省略 | 説明 | | | | ワード引数 | | | +============+=============+=============+=============+=============+ | image | 3 次元 | - | - | ノイズを入 | | | nu | | | れたい画像 | | | mpy.ndarray | | | | | | の参照 | | | | +------------+-------------+-------------+-------------+-------------+ | height | int | 可(h) | - | ``imag | | | | | | e``\ の高さ | +------------+-------------+-------------+-------------+-------------+ | width | int | 可(w) | - | ``im | | | | | | age``\ の幅 | +------------+-------------+-------------+-------------+-------------+ | rate | float | 可(rate) | 可(defa | ノイズの | | | | | ult=0.0003) | 入れる割合 | +------------+-------------+-------------+-------------+-------------+ - 返り値 ========== === ============== パラメータ 型 説明 ========== === ============== height int 生成画像の高さ width int 生成画像の幅 ========== === ============== -------------- Build / ビルド -------------- - 本ソースコードのコンパイル .. code:: sh > pwd ほにゃらら/zoomg/zoomg > clang++ -O3 -Wall -shared -std=c++17 -fPIC `python -m pybind11 --includes` -undefined dynamic_lookup zoomg.cpp ColorSpace/src/Comparison.cpp ColorSpace/src/ColorSpace.cpp ColorSpace/src/Conversion.cpp -o zoomg`python3-config --extension-suffix` - 実行 .. code:: sh > cd test > pip uninstall zoomg # いらないかも > python test.py sample_02.mp4 .. |PyPI version| image:: https://badge.fury.io/py/zoomg.svg :target: https://badge.fury.io/py/zoomg .. |License: MIT| image:: https://img.shields.io/badge/License-MIT-yellow.svg :target: https://opensource.org/licenses/MIT .. |Downloads| image:: https://pepy.tech/badge/zoomg :target: https://pepy.tech/project/zoomg
zoomg
/zoomg-2.0.5.tar.gz/zoomg-2.0.5/README.rst
README.rst
from datetime import datetime default_levels = {"DEBUG": 4, "ERROR": 1, "WARNING": 2, "INFO": 3, "IMPORTANT": 0} default_format = "[[DATE]] [[LEVEL]]:[[LOG_NAME]] >> [[CONTENT]]" DEBUG = 4 ERROR = 1 WARNING = 2 INFO = 3 IMPORTANT = 0 OFF = -1 class Logger: def __init__(self, file=None, level=3, name="root", console_print=True, format=default_format, autoclear=False): self.levels = default_levels.copy() self.file = file self.level = level self.name = name self.print = console_print self.format = format if not autoclear: file = open(str(self.file), encoding="cp1251", mode="a") file.write("\n\n%s\n\n\n" % datetime.now().strftime("%H:%M:%S %d.%m.%y")) else: file = open(str(self.file), encoding="cp1251", mode="w") file.close() def addLevel(self, name, level=1): if not isinstance(level, int): return False self.levels[name] = level return name def removeLevel(self, name): if name not in self.levels: return False del self.levels[name] return name def log(self, name, content, req_print=False): if name not in self.levels or self.levels[name] > self.level: return False date = datetime.now().strftime("%H:%M:%S %d.%m.%y") text = self.format.replace("[[DATE]]", date).replace("[[LEVEL]]", name).replace("[[LOG_NAME]]", self.name).replace("[[CONTENT]]", str(content)) if self.print or req_print: print(text) if self.file: file = open(str(self.file), encoding="cp1251", mode="a") file.write(text + '\n') file.close() return content def debug(self, content, req_print=False): return self.log("DEBUG", content, req_print) def error(self, content, req_print=False): return self.log("ERROR", content, req_print) def warning(self, content, req_print=False): return self.log("WARNING", content, req_print) def info(self, content, req_print=False): return self.log("INFO", content, req_print) def important(self, content, req_print=False): return self.log("IMPORTANT", content, req_print)
zoomlog-ZOOM-DEVELOPER
/zoomlog_ZOOM_DEVELOPER-1.0-py3-none-any.whl/zoomlog/main.py
main.py
<h1>ZOOM LOG</h1> <h3>Simple python logger</h3> ***Quick start:*** ```python from zoomlog import Logger, DEBUG, INFO logger = Logger("test.log", name="test", level=DEBUG) logger.debug("My logger is work!") name = "Sergey" logger.addLevel("NEW USER", INFO) logger.log("NEW USER", "Created new user - %s" % name) ``` ``` 00:00:00 01.01.22 DEBUG:test >> My logger is work! 00:00:01 01.01.22 NEW USER:test >> Created new user - Sergey ``` *test.log* ``` 00:00:00 01.01.22 00:00:00 01.01.22 DEBUG:test >> My logger is work! 00:00:01 01.01.22 NEW USER:test >> Created new user - Sergey ```
zoomlog
/zoomlog-1.0.2.tar.gz/zoomlog-1.0.2/README.md
README.md
# zoomus [![CircleCI](https://dl.circleci.com/status-badge/img/gh/prschmid/zoomus/tree/main.svg?style=shield)](https://dl.circleci.com/status-badge/redirect/gh/prschmid/zoomus/tree/main) [![PyPI Downloads](https://img.shields.io/pypi/dm/zoomus)](https://pypi.org/project/zoomus/) [![Python Versions](https://img.shields.io/pypi/pyversions/zoomus)](https://pypi.org/project/zoomus/) [![PyPI Version](https://img.shields.io/pypi/v/zoomus)](https://pypi.org/project/zoomus/) [![PyPI License](https://img.shields.io/pypi/l/zoomus)](https://pypi.org/project/zoomus/) [![Code Style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black/) [https://github.com/prschmid/zoomus](https://github.com/prschmid/zoomus) Python wrapper around the [Zoom.us](http://zoom.us) REST API v1 and v2. This work is heavily inspired by the Ruby GEM of the same name, [Zoomus](https://github.com/mllocs/zoomus) ## Installation ### The easy way ```sh pip install zoomus ``` ## Compatibility `zoomus` has been tested for Python 3.6, 3.7, and 3.8 using [Travis CI](https://travis-ci.com/github/prschmid/zoomus) Note, as this library heavily depends on the [requests](https://pypi.org/project/requests/) library, official compatibility is limited to the official compatibility of `requests`. ## Example Usage ### Create the client v2 (default) As Zoom's default is now the V2 API, the client will default to the V2 version of the API. ```python import json from zoomus import ZoomClient client = ZoomClient('CLIENT_ID', 'CLIENT_SECRET', 'ACCOUNT_ID') user_list_response = client.user.list() user_list = json.loads(user_list_response.content) for user in user_list['users']: user_id = user['id'] print(json.loads(client.meeting.list(user_id=user_id).content)) ``` What one will note is that the returned object from a call using the client is a [requests](https://pypi.org/project/requests/) `Response` object. This is done so that if there is any error working with the API that one has complete control of handling all errors. As such, to actually get the list of users in the example above, one will have to load the JSON from the content of the `Response` object that is returned. ### Create the client for EU users needing GDPR compliance Zoom has EU specific endpoints that can be used to meet GDPR compliance. In oder for youto make use of those, simply set the base_uri to the appropriate one when initializing the client. For more details on the Zoom API, please refer to the [Zoom API documentation](https://marketplace.zoom.us/docs/api-reference/introduction) Caution, the EU endpoint will not function unless your account is an EU account and has been setup as such with Zoom. ```python import json from zoomus import ZoomClient client = ZoomClient('CLIENT_ID', 'CLIENT_SECRET', 'ACCOUNT_ID', base_uri="https://eu01api-www4local.zoom.us/v2") ``` ### Create the client v1 Zoom has yet to officially remove support for the V1 API, and so to use the V1 API one can instantiate a client as follows. Note, we have stopped support for the V1 API, so there is only limited functionality and no new V1 API functionality is likely to be added. ```python import json from zoomus import ZoomClient client = ZoomClient('CLIENT_ID', 'CLIENT_SECRET', 'ACCOUNT_ID', version=1) ``` ### Using with a manage context ```python with ZoomClient('CLIENT_ID', 'CLIENT_SECRET', 'ACCOUNT_ID') as client: user_list_response = client.users.list() ... ``` ## Available methods * client.user.create(...) * client.user.cust_create(...) * client.user.update(...)* * client.user.check_email(...) * client.user.update_email(...) * client.user.list(...) * client.user.pending(...) * client.user.get(...) * client.user.get_by_email(...) * client.user.get_settings(...) * client.user.update_settings(...) * client.meeting.get(...) * client.meeting.end(...) * client.meeting.create(...) * client.meeting.delete(...) * client.meeting.list(...) * client.meeting.update(...) * client.meeting.add_registrant(...) * client.meeting.list_registrants(...) * client.meeting.update_registrant_status(...) * client.meeting.update_status(...) * client.report.get_account_report(...) * client.report.get_user_report(...) * client.webinar.create(...) * client.webinar.update(...) * client.webinar.delete(...) * client.webinar.list(...) * client.webinar.get(...) * client.webinar.end(...) * client.webinar.register(...) * client.webinar.add_panelists(...) * client.webinar.list_panelists(...) * client.webinar.remove_panelists(...) * client.phone.call_logs(...) * client.phone.calling_plans(...) * client.phone.numbers_get(...) * client.phone.numbers_list(...) * client.phone.users(...) * client.group.list(...) * client.group.create(...) * client.group.get(...) * client.group.delete(...) * client.group.list_members(...) * client.group.add_members(...) * client.group.delete_member(...) * client.room.list(...) * client.room.create(...) * client.room.get(...) * client.room.get_settings(...) * client.room.get_devices(...) * client.room.delete(...) * client.room.check_in_or_out(...) * client.room.update(...) * client.role.assign(...) * client.role.create(...) * client.role.delete(...) * client.role.get(...) * client.role.get_members(...) * client.role.list(...) * client.role.unassign(...) * client.role.update(...) ## Running the Tests ### Simple First, make sure to install the testing requirements ```sh pip install -r requirements-tests.txt ``` Then run the tests via nose ```sh nosetests ``` ## Contributing Please see the [CONTRIBUTING.md](./CONTRIBUTING.md) for the contribution guidelines for this project.
zoomus
/zoomus-1.2.1.tar.gz/zoomus-1.2.1/README.md
README.md
zoomus ========== ![](https://travis-ci.org/exolever/zoomus.svg?branch=master) [https://github.com/exolever/zoomus](https://github.com/exolever/zoomus) Python wrapper around the [Zoom.us](http://zoom.us) REST API v1 and v2. This work is heavily inspired by the Ruby GEM of the same name, [Zoomus](https://github.com/mllocs/zoomus) Installation ------------ ### The easy way ```sh pip install zoomus2 ``` Compatability ------------- Zoomus has been tested for Python 2.6, 3.2, 3.3, 3.4, and pypy using [Travis CI](https://travis-ci.org/exolever/zoomus) Example Usage ------------- ### Create the client v1 ```python from zoomus import ZoomClient client = ZoomClient('API_KEY', 'API_SECRET') for user in json.loads(client.user.list())['users']: user_id = user['id'] print client.meeting.list('host_id': user_id) ``` ### Create the client v2 ```python from zoomus import ZoomClient client = ZoomClient('API_KEY', 'API_SECRET', version=2) for user in json.loads(client.user.list())['users']: user_id = user['id'] print client.meeting.list('host_id': user_id) ``` ### Using with a manage context ```python with ZoomClient('API_KEY', 'API_SECRET') as client: users = client.users.list() ... ``` Available methods ----------------- * client.user.create(...) * client.user.cust_create(...) * client.user.update(...)* * client.user.list(...) * client.user.pending(...) * client.user.get(...) * client.user.get_by_email(...) * client.meeting.get(...) * client.meeting.end(...) * client.meeting.create(...) * client.meeting.delete(...) * client.meeting.list(...) * client.meeting.update(...) * client.report.get_account_report(...) * client.report.get_user_report(...) * client.webinar.create(...) * client.webinar.update(...) * client.webinar.delete(...) * client.webinar.list(...) * client.webinar.get(...) * client.webinar.end(...) * client.webinar.register(...) Running the Tests ----------------- ### Simple First, make sure to install the testing requirements ```sh pip install -r requirements-tests.txt ``` Then run the tests via nose ```sh nosetests ``` ### Running the tests across multiple python versions in parallel If you don't trust our Travis CI badge above, you can run all of the tests across multiple python versions by using [pyenv](https://github.com/yyuu/pyenv) and [detox](https://pypi.python.org/pypi/detox). A good writeup for what you need to do to set this up can be found [here](http://blog.pinaxproject.com/2015/12/08/how-test-against-multiple-python-versions-parallel/). Note: If you are using OS X and installed pyenv with brew, make sure to follow [these instructions](https://github.com/yyuu/pyenv#homebrew-on-mac-os-x) as well. You'll want to make sure that you have all of the different python versions are installed so that they can be tested: ```sh # Install the versions pyenv install 2.7.10 pyenv install 3.3.6 pyenv install 3.4.3 pyenv install 3.5.0 # Set all these to be global versions pyenv global system 2.7.10 3.3.6 3.4.3 3.5.0 # Make sure that they are all there (they should all have a * next to them) pyenv versions ``` Once you get everything installed, you can run the tests across the different versions as follows. ```sh detox ``` Note this assumes that you have detox installed globally. Assuming all goes well, you should see a result akin to ```sh py27-1.7: commands succeeded py27-1.8: commands succeeded py27-1.9: commands succeeded py27-master: commands succeeded py33-1.7: commands succeeded py33-1.8: commands succeeded py34-1.7: commands succeeded py34-1.8: commands succeeded py34-1.9: commands succeeded py34-master: commands succeeded py35-1.8: commands succeeded py35-1.9: commands succeeded py35-master: commands succeeded congratulations :) ``` If you run in to an issue with running detox, make sure that you have the latest version of pip as there are [some issues](https://github.com/yyuu/pyenv/issues/531) with pyenv and older versions of pip. Contributing ------------ If you would like to contribute to this project, you will need to use [git flow](https://github.com/nvie/gitflow). This way, any and all changes happen on the development branch and not on the master branch. As such, after you have git-flow-ified your zoomus git repo, create a pull request for your branch, and we'll take it from there.
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/README.md
README.md
from __future__ import absolute_import import contextlib import json import requests import time import jwt class ApiClient(object): """Simple wrapper for REST API requests""" def __init__(self, base_uri=None, timeout=15, **kwargs): """Setup a new API Client :param base_uri: The base URI to the API :param timeout: The timeout to use for requests :param \*\*kwargs: Any other attributes. These will be added as attributes to the ApiClient object. """ self.base_uri = base_uri self.timeout = timeout for k, v in kwargs.items(): setattr(self, k, v) @property def timeout(self): """The timeout""" return self._timeout @timeout.setter def timeout(self, value): """The default timeout""" if value is not None: try: value = int(value) except: raise ValueError("timeout value must be an integer") self._timeout = value @property def base_uri(self): """The base_uri""" return self._base_uri @base_uri.setter def base_uri(self, value): """The default base_uri""" if value and value.endswith("/"): value = value[:-1] self._base_uri = value def url_for(self, endpoint): """Get the URL for the given endpoint :param endpoint: The endpoint :return: The full URL for the endpoint """ if not endpoint.startswith("/"): endpoint = "/{}".format(endpoint) if endpoint.endswith("/"): endpoint = endpoint[:-1] return self.base_uri + endpoint def get_request(self, endpoint, params=None, headers=None): """Helper function for GET requests :param endpoint: The endpoint :param params: The URL parameters :param headers: request headers :return: The :class:``requests.Response`` object for this request """ if headers is None and self.config.get('version') == 2: headers = {'Authorization': 'Bearer {}'.format(self.config.get('token'))} return requests.get( self.url_for(endpoint), params=params, headers=headers, timeout=self.timeout) def post_request( self, endpoint, params=None, data=None, headers=None, cookies=None): """Helper function for POST requests :param endpoint: The endpoint :param params: The URL parameters :param data: The data (either as a dict or dumped JSON string) to include with the POST :param headers: request headers :param cookies: request cookies :return: The :class:``requests.Response`` object for this request """ if data and not is_str_type(data): data = json.dumps(data) if headers is None and self.config.get('version') == 2: headers = { 'Authorization': 'Bearer {}'.format(self.config.get('token')), 'content-type': 'application/json'} return requests.post( self.url_for(endpoint), params=params, data=data, headers=headers, cookies=cookies, timeout=self.timeout) def patch_request( self, endpoint, params=None, data=None, headers=None, cookies=None): """Helper function for PATCH requests :param endpoint: The endpoint :param params: The URL parameters :param data: The data (either as a dict or dumped JSON string) to include with the PATCH :param headers: request headers :param cookies: request cookies :return: The :class:``requests.Response`` object for this request """ if data and not is_str_type(data): data = json.dumps(data) if headers is None and self.config.get('version') == 2: headers = {'Authorization': 'Bearer {}'.format(self.config.get('token'))} return requests.patch( self.url_for(endpoint), params=params, data=data, headers=headers, cookies=cookies, timeout=self.timeout) def delete_request( self, endpoint, params=None, data=None, headers=None, cookies=None): """Helper function for DELETE requests :param endpoint: The endpoint :param params: The URL parameters :param data: The data (either as a dict or dumped JSON string) to include with the DELETE :param headers: request headers :param cookies: request cookies :return: The :class:``requests.Response`` object for this request """ if data and not is_str_type(data): data = json.dumps(data) if headers is None and self.config.get('version') == 2: headers = {'Authorization': 'Bearer {}'.format(self.config.get('token'))} return requests.delete( self.url_for(endpoint), params=params, data=data, headers=headers, cookies=cookies, timeout=self.timeout) @contextlib.contextmanager def ignored(*exceptions): """Simple context manager to ignore expected Exceptions :param \*exceptions: The exceptions to safely ignore """ try: yield except exceptions: pass def is_str_type(val): """Check whether the input is of a string type. We use this method to ensure python 2-3 capatibility. :param val: The value to check wither it is a string :return: In python2 it will return ``True`` if :attr:`val` is either an instance of str or unicode. In python3 it will return ``True`` if it is an instance of str """ with ignored(NameError): return isinstance(val, basestring) return isinstance(val, str) def require_keys(d, keys, allow_none=True): """Require that the object have the given keys :param d: The dict the check :param keys: The keys to check :attr:`obj` for. This can either be a single string, or an iterable of strings :param allow_none: Whether ``None`` values are allowed :raises: :ValueError: If any of the keys are missing from the obj """ if is_str_type(keys): keys = [keys] for k in keys: if k not in d: raise ValueError("'{}' must be set".format(k)) if not allow_none and d[k] is None: raise ValueError("'{}' cannot be None".format(k)) return True def date_to_str(d): """Convert date and datetime objects to a string Note, this does not do any timezone conversion. :param d: The :class:`datetime.date` or :class:`datetime.datetime` to convert to a string :returns: The string representation of the date """ return d.strftime("%Y-%m-%dT%H:%M:%SZ") def generate_jwt(key, secret): header = { "alg": "HS256", "typ": "JWT" } payload = { "iss": key, "exp": int(time.time() + 3600) } token = jwt.encode(payload, secret, algorithm='HS256', headers=header) return token.decode('utf-8')
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/util.py
util.py
from __future__ import absolute_import from zoomus import ( components, util) class ZoomClient(util.ApiClient): """Zoom.us REST API Python Client""" BASE_URI_V1 = 'https://api.zoom.us/v1' BASE_URI_V2 = 'https://api.zoom.us/v2' """Base URL for Zoom API""" def __init__( self, api_key, api_secret, data_type='json', timeout=15, version=1): """Create a new Zoom client :param api_key: The Zooom.us API key :param api_secret: The Zoom.us API secret :param data_type: The expected return data type. Either 'json' or 'xml' :param timeout: The time out to use for API requets """ BASE_URI = ZoomClient.BASE_URI_V1 if version == 1 else ZoomClient.BASE_URI_V2 super(ZoomClient, self).__init__( base_uri=BASE_URI, timeout=timeout) # Setup the config details self.config = { 'api_key': api_key, 'api_secret': api_secret, 'data_type': data_type, 'version': version, 'token': util.generate_jwt(api_key, api_secret), } class_user_component = components.user.UserComponent if version == 1 else components.user.UserComponentV2 class_meeting_component = components.meeting.MeetingComponent if version == 1 else components.meeting.MeetingComponentV2 class_recording_component = components.recording.RecordingComponent if version == 1 else components.recording.RecordingComponentV2 class_webinar_component = components.webinar.WebinarComponent if version == 1 else components.webinar.WebinarComponentV2 class_report_component = components.report.ReportComponent if version == 1 else components.report.ReportComponentV2 # Register all of the components self.components = { 'meeting': class_meeting_component( base_uri=BASE_URI, config=self.config), 'report': class_report_component( base_uri=BASE_URI, config=self.config), 'user': class_user_component( base_uri=BASE_URI, config=self.config), 'webinar': class_webinar_component( base_uri=BASE_URI, config=self.config), 'recording': class_recording_component( base_uri=BASE_URI, config=self.config) } def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return def refresh_token(self): self.config['token'] = util.generate_jwt( self.config['api_key'], self.config['api_secret']), @property def api_key(self): """The Zoom.us api_key""" return self.config.get('api_key') @api_key.setter def api_key(self, value): """Set the api_key""" self.config['api_key'] = value self.refresh_token() @property def api_secret(self): """The Zoom.us api_secret""" return self.config.get('api_secret') @api_secret.setter def api_secret(self, value): """Set the api_secret""" self.config['api_secret'] = value self.refresh_token() @property def meeting(self): """Get the meeting component""" return self.components.get('meeting') @property def report(self): """Get the report component""" return self.components.get('report') @property def user(self): """Get the user component""" return self.components.get('user') @property def webinar(self): """Get the webinar component""" return self.components.get('webinar') @property def recording(self): """Get the recording component""" return self.components.get('recording')
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/client.py
client.py
from __future__ import absolute_import from zoomus import util from zoomus.components import base class MeetingComponent(base.BaseComponent): """Component dealing with all meeting related matters""" def list(self, **kwargs): util.require_keys(kwargs, 'host_id') if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/meeting/list", params=kwargs) def create(self, **kwargs): util.require_keys(kwargs, ['host_id', 'topic', 'type']) if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/meeting/create", params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/meeting/update", params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/meeting/delete", params=kwargs) def end(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/meeting/end", params=kwargs) def get(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/meeting/get", params=kwargs) class MeetingComponentV2(base.BaseComponent): def list(self, **kwargs): util.require_keys(kwargs, 'user_id') return self.get_request( "users/{}/meetings".format(kwargs.get('user_id')), params=kwargs) def create(self, **kwargs): util.require_keys(kwargs, 'user_id') user_id = kwargs.pop('user_id') return self.post_request( "users/{}/meetings".format(user_id), data=kwargs) def retrieve(self, **kwargs): util.require_keys(kwargs, 'id') return self.get_request( "meetings/{}".format(kwargs.get('id')), params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, 'id') return self.patch_request( "meetings/{}".format(kwargs.get('id')), params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, 'id') return self.delete_request( "meetings/{}".format(kwargs.get('id')), params=kwargs)
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/components/meeting.py
meeting.py
from __future__ import absolute_import from zoomus import util from zoomus.components import base class ReportComponent(base.BaseComponent): """Component dealing with all report related matters""" def get_account_report(self, **kwargs): util.require_keys(kwargs, ['start_time', 'end_time'], kwargs) if kwargs.get('start_time'): kwargs['from'] = util.date_to_str(kwargs['start_time']) del kwargs['start_time'] if kwargs.get('end_time'): kwargs['to'] = util.date_to_str(kwargs['end_time']) del kwargs['end_time'] return self.post_request("/report/getaccountreport", params=kwargs) def get_user_report(self, **kwargs): util.require_keys(kwargs, ['start_time', 'end_time'], kwargs) if kwargs.get('start_time'): kwargs['from'] = util.date_to_str(kwargs['start_time']) del kwargs['start_time'] if kwargs.get('end_time'): kwargs['to'] = util.date_to_str(kwargs['end_time']) del kwargs['end_time'] return self.post_request("/report/getuserreport", params=kwargs) class ReportComponentV2(base.BaseComponent): def get_user_report(self, **kwargs): util.require_keys(kwargs, ['user_id', 'start_time', 'end_time']) if kwargs.get('start_time'): kwargs['from'] = util.date_to_str(kwargs['start_time']) del kwargs['start_time'] if kwargs.get('end_time'): kwargs['to'] = util.date_to_str(kwargs['end_time']) del kwargs['end_time'] return self.get_request( "/report/users/{}/meetings".format(kwargs.get('user_id')), params=kwargs) def get_account_report(self, **kwargs): util.require_keys(kwargs, ['start_time', 'end_time']) if kwargs.get('start_time'): kwargs['from'] = util.date_to_str(kwargs['start_time']) del kwargs['start_time'] if kwargs.get('end_time'): kwargs['to'] = util.date_to_str(kwargs['end_time']) del kwargs['end_time'] return self.get_request( "/report/users", params=kwargs)
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/components/report.py
report.py
from __future__ import absolute_import from zoomus import util from zoomus.components import base class UserComponent(base.BaseComponent): """Component dealing with all user related matters""" def list(self, **kwargs): return self.post_request("/user/list", params=kwargs) def pending(self, **kwargs): return self.post_request("/user/pending", params=kwargs) def create(self, **kwargs): return self.post_request("/user/create", params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, 'id') return self.post_request("/user/update", params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, 'id') return self.post_request("/user/delete", params=kwargs) def cust_create(self, **kwargs): util.require_keys(kwargs, ['type', 'email']) return self.post_request("/user/custcreate", params=kwargs) def get(self, **kwargs): util.require_keys(kwargs, 'id') return self.post_request("/user/get", params=kwargs) def get_by_email(self, **kwargs): util.require_keys(kwargs, ['email', 'login_type']) return self.post_request("/user/getbyemail", params=kwargs) class UserComponentV2(base.BaseComponent): def list(self, **kwargs): return self.get_request("/users", params=kwargs) def create(self, **kwargs): return self.post_request("/users", params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, 'id') return self.patch_request( "/users/{user_id}".format(kwargs.get('id')), params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, 'id') return self.delete_request( "/users/{user_id}".format(kwargs.get('id')), params=kwargs) def retrieve(self, **kwargs): util.require_keys(kwargs, 'id') return self.get_request( "/users/{user_id}".format(kwargs.get('id')), params=kwargs)
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/components/user.py
user.py
from __future__ import absolute_import from zoomus import util from zoomus.components import base class WebinarComponent(base.BaseComponent): """Component dealing with all webinar related matters""" def list(self, **kwargs): util.require_keys(kwargs, 'host_id') if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/webinar/list", params=kwargs) def upcoming(self, **kwargs): util.require_keys(kwargs, 'host_id') if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/webinar/list/registration", params=kwargs) def create(self, **kwargs): util.require_keys(kwargs, ['host_id', 'topic']) if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/webinar/create", params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/webinar/update", params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/webinar/delete", params=kwargs) def end(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/webinar/end", params=kwargs) def get(self, **kwargs): util.require_keys(kwargs, ['id', 'host_id']) return self.post_request("/webinar/get", params=kwargs) def register(self, **kwargs): util.require_keys(kwargs, ['id', 'email', 'first_name', 'last_name']) if kwargs.get('start_time'): kwargs['start_time'] = util.date_to_str(kwargs['start_time']) return self.post_request("/webinar/register", params=kwargs) class WebinarComponentV2(base.BaseComponent): """Component dealing with all webinar related matters""" def list(self, **kwargs): util.require_keys(kwargs, 'user_id') return self.get_request( "/users/{}/webinars".format(kwargs.get('user_id')), params=kwargs) def create(self, **kwargs): util.require_keys(kwargs, 'user_id') return self.post_request( "/users/{}/webinars".format(kwargs.get('user_id')), params=kwargs) def update(self, **kwargs): util.require_keys(kwargs, 'meeting_id') return self.patch_request( "/webinars/{}".format(kwargs.get('meeting_id')), params=kwargs) def delete(self, **kwargs): util.require_keys(kwargs, 'meeting_id') return self.delete_request( "/webinars/{}".format(kwargs.get('meeting_id')), params=kwargs)
zoomus2
/zoomus2-0.2.0.tar.gz/zoomus2-0.2.0/zoomus/components/webinar.py
webinar.py
# zoomwrap A module containing classes that can be easily serialized to JSON for use with Zoom's APIs. ```py import zoomwrap client = zoomwrap.WebhookClient("your_endpoint_url", "your_auth_str") msg = zoomwrap.Message(head=zoomwrap.messageElements.Head("hello, world")) client.send(msg) ``` I have only tested this with Zoom's [incoming webhook API](https://zoomappdocs.docs.stoplight.io/incoming-webhook-chatbot) but it should work with the Chatbot API as well since they use the same object structures. For convenience, there is a `WebhookClient` class with methods for sending API requests using your credentials. ## Documentation Currently, there is no documentaion for this module because it is still a heavy work in progress. For now, have a loop at Zoom's [incoming webhook API docs](https://zoomappdocs.docs.stoplight.io/incoming-webhook-chatbot) and their more detailed docs for both the Chatbot and Webhook APIs [here](https://marketplace.zoom.us/docs/guides/chatbots/customizing-messages). Most of the different types of messages and their attributes have been implemented in this module, with the exception of messages with buttons, because they only work with the Chatbot API. ## Contributions This is my first pypi package and it is not very sophisticated (or neat, for that matter). Contributions are certainly welcome but may not get checked often.
zoomwrap
/zoomwrap-0.1.1.tar.gz/zoomwrap-0.1.1/README.md
README.md
======================================= Zoonado: Async Tornado Zookeeper Client ======================================= .. image:: https://img.shields.io/pypi/v/zoonado.svg :alt: Python Package Version :target: http://pypi.python.org/pypi/zoonado .. image:: https://readthedocs.org/projects/zoonado/badge/?version=latest :alt: Documentation Status :target: http://zoonado.readthedocs.org/en/latest/ .. image:: https://travis-ci.org/wglass/zoonado.svg?branch=master :alt: Build Status :target: https://travis-ci.org/wglass/zoonado .. image:: https://codeclimate.com/github/wglass/zoonado/badges/gpa.svg :alt: Code Climate :target: https://codeclimate.com/github/wglass/zoonado .. image:: https://codeclimate.com/github/wglass/zoonado/badges/coverage.svg :alt: Test Coverage :target: https://codeclimate.com/github/wglass/zoonado/coverage .. Zoonado is a Zookeeper_ python client using Tornado_ to achieve async I/O. .. contents:: :local: Installation ~~~~~~~~~~~~ Zoonado is available via PyPI_, installation is as easy as:: pip install zoonado Quick Example ~~~~~~~~~~~~~ :: from tornado import gen from zoonado import Zoonado @gen.coroutine def run(): zk = Zoonado("zk01,zk02,zk03", chroot="/shared/namespace") yield zk.start() yield zk.create("/foo/bar", data="bazz", ephemeral=True) yield zk.set_data("/foo/bar", "bwee") yield zk.close() Development ~~~~~~~~~~~ The code is hosted on GitHub_ To file a bug or possible enhancement see the `Issue Tracker`_, also found on GitHub. License ~~~~~~~ Zoonado is licensed under the terms of the Apache license (2.0). See the LICENSE_ file for more details. .. _Zookeeper: https://zookeeper.apache.org .. _Tornado: http://tornadoweb.org .. _PyPI: https://pypi.python.org/pypi/zoonado .. _GitHub: https://github.com/wglass/zoonado .. _`Issue Tracker`: https://github.com/wglass/zoonado/issues .. _LICENSE: https://github.com/wglass/zoonado/blob/master/LICENSE
zoonado
/zoonado-0.9.2.tar.gz/zoonado-0.9.2/README.rst
README.rst
import logging from tornado import ioloop, gen from zoonado import exc log = logging.getLogger() def arguments(_): pass @gen.coroutine def run(client, args): config_path = "/exampleconfig" loop = ioloop.IOLoop.current() yield client.start() config = client.recipes.TreeCache(config_path) yield config.start() try: yield client.create(config_path + "/running", data="yes") except exc.NodeExists: yield client.set_data(config_path + "/running", data="yes") for path in ["foo", "bar", "bazz", "bloo"]: try: yield client.create(config_path + "/" + path, data="1") except exc.NodeExists: yield client.set_data(config_path + "/" + path, data="1") loop.add_callback(foo, config) loop.add_callback(bar, config) loop.add_callback(bazz, config) loop.add_callback(bloo, config) yield gen.sleep(1) yield client.set_data(config_path + "/foo", "3") yield gen.sleep(1) yield client.set_data(config_path + "/bar", "2") yield client.set_data(config_path + "/bazz", "5") yield gen.sleep(6) yield client.set_data(config_path + "/running", data="no") yield gen.sleep(2) yield client.close() @gen.coroutine def foo(config): while config.running.value == "yes": log.info("[FOO] doing work for %s seconds!", config.foo.value) yield gen.sleep(int(config.foo.value)) log.info("[FOO] no longer working.") @gen.coroutine def bar(config): while config.running.value == "yes": log.info("[BAR] doing work for %s seconds!", config.bar.value) yield gen.sleep(int(config.bar.value)) log.info("[BAR] no longer working.") @gen.coroutine def bazz(config): while config.running.value == "yes": log.info("[BAZZ] doing work for %s seconds!", config.bazz.value) yield gen.sleep(int(config.bazz.value)) log.info("[BAZZ] no longer working.") @gen.coroutine def bloo(config): while config.running.value == "yes": log.info("[BLOO] doing work for %s seconds!", config.bloo.value) yield gen.sleep(int(config.bloo.value)) log.info("[BLOO] no longer working.")
zoonado
/zoonado-0.9.2.tar.gz/zoonado-0.9.2/examples/runtime_config.py
runtime_config.py
import logging import random import threading from tornado import gen, ioloop from zoonado import Zoonado log = logging.getLogger() monitor_ioloop = None def arguments(_): pass @gen.coroutine def run(client, args): yield client.start() yield client.create("/shared-znode", ephemeral=True) monitor_thread = threading.Thread(target=monitor_data, args=(args,)) monitor_thread.start() threads = [ threading.Thread(name="A", target=launch_loop, args=(args,)), threading.Thread(name="B", target=launch_loop, args=(args,)), threading.Thread(name="C", target=launch_loop, args=(args,)), ] for thread in threads: thread.start() for thread in threads: thread.join() monitor_ioloop.stop() monitor_thread.join() yield client.close() def monitor_data(args): global monitor_ioloop name = threading.current_thread().name log.info("Launching loop in thread %s", name) io_loop = ioloop.IOLoop() io_loop.make_current() monitor_ioloop = io_loop @gen.coroutine def monitor(): client = Zoonado(args.servers, chroot=args.chroot) yield client.start() def data_callback(new_data): log.info("Shared data set to '%s'", new_data) watcher = client.recipes.DataWatcher() watcher.add_callback("/shared-znode", data_callback) yield gen.moment io_loop.add_callback(monitor) io_loop.start() def launch_loop(args): name = threading.current_thread().name log.info("Launching loop in thread %s", name) io_loop = ioloop.IOLoop() io_loop.make_current() io_loop.add_callback(update_loop, name, args, io_loop) io_loop.start() @gen.coroutine def update_loop(name, args, io_loop): log.info("[LOOP %s] starting up!", name) client = Zoonado(args.servers, chroot=args.chroot) try: yield client.start() for i in range(5): yield client.exists("/shared-znode") expected_version = client.stat_cache["/shared-znode"].version yield gen.sleep(random.choice([.2, .4, .5])) log.info( "[LOOP %s] I expect the shared znode to have version %s", name, client.stat_cache["/shared-znode"].version ) txn = client.begin_transaction() txn.create("/znode-" + name, ephemeral=True) txn.check_version("/shared-znode", expected_version) txn.set_data("/shared-znode", "altered by loop %s!" % name) txn.delete("/znode-" + name) log.info("[LOOP %s] committing...", name) result = yield txn.commit() if not result: log.info("[LOOP %s] rolled back!", name) yield client.close() finally: io_loop.stop()
zoonado
/zoonado-0.9.2.tar.gz/zoonado-0.9.2/examples/transactions.py
transactions.py
import collections import itertools import logging import random from tornado import gen, ioloop from zoonado.recipes.allocator import round_robin log = logging.getLogger(__name__) ANIMALS = ["cat", "dog", "mouse", "human"] def arguments(parser): parser.add_argument( "znode", type=str, help="Path of the base znode to use." ) parser.add_argument( "--workers", "-w", type=int, default=5, help="Number of worker coroutines to launch." ) parser.add_argument( "--items", "-n", type=int, default=17, help="Number of items to allocate amongst the workers." ) parser.add_argument( "--alloc-func", "-a", default="round_robin", choices=allocation_functions.keys(), help="Which allocation function to use." ) @gen.coroutine def run(client, args): items = set([ "%s::%s" % (i, random.choice(ANIMALS)) for i in range(args.items) ]) allocation_function = allocation_functions[args.alloc_func] yield client.start() for i in range(args.workers): ioloop.IOLoop.current().add_callback( worker, i, client, args.znode, allocation_function, items ) yield gen.sleep(10) @gen.coroutine def worker(number, client, znode_path, allocation_fn, items): name = "worker_%s" % number allocator = client.recipes.Allocator(znode_path, name, allocation_fn) yield allocator.start() yield allocator.update(items) while True: log.info("[WORKER %d] My set: %s", number, allocator.allocation) yield gen.sleep(2) def animal_buckets(members, items): animal_assignment = {} for member, animal in zip(itertools.cycle(members), ANIMALS): animal_assignment[animal] = member allocation = collections.defaultdict(set) for member, item in zip(itertools.cycle(members), items): animal = item.split("::")[1] allocation[animal_assignment[animal]].add(item) return allocation allocation_functions = { "round_robin": round_robin, "buckets": animal_buckets, }
zoonado
/zoonado-0.9.2.tar.gz/zoonado-0.9.2/examples/allocator.py
allocator.py
from .base import ( BusinessOrIndividualModel, Address, VerificationModel, ) from ..exceptions import FieldError, ValidationError class BankAccountVerificationModel(VerificationModel): """ Have some bank account verification attributes. Attributes: deposit_check: boolean of verification """ @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"deposit_check"}) class BankAccount(BusinessOrIndividualModel): """ Represent a Bank Account. https://docs.zoop.co/reference#conta-banc%C3%A1ria The :attr:`RESOURCE` is used to identify this Model. Used to check against :attr:`.resource`! Attributes: account_number: account number bank_code: code of bank holder_name: name of owner routing_number: agency code in BR type: type of account address: Address model bank_name: name of bank country_code: country code customer: id of owner description: description debitable: boolean of verification fingerprint: ? is_active: boolean of verification is_verified: boolean of verification last4_digits: last 4 digits of account number phone_number: phone number verification_checklist: VerificationCheckList model """ RESOURCE = "bank_account" SAVING_TYPE = "Savings" CHECKING_TYPE = "Checking" TYPES = {SAVING_TYPE, CHECKING_TYPE} def init_custom_fields( self, type=None, address=None, verification_checklist=None, **kwargs ): """ Initialize :attr:`address` as :class:`.Address`.\n Initialize :attr:`verification_checklist` as :class:`.BankAccountVerificationModel`. Args: type (str): value containing type address (dict or :class:`.Address`): address verification_checklist (dict or :class:`.BankAccountVerificationModel`): verifications # noqa **kwargs: """ self.set_identifier(**kwargs) self.validate_type(type) setattr( self, "address", Address.from_dict_or_instance(address, allow_empty=True) ) setattr( self, "verification_checklist", BankAccountVerificationModel.from_dict_or_instance( verification_checklist, allow_empty=True ), ) @classmethod def validate_type(cls, type): """ Validate bank account ``type`` Args: type (str): value of type to be validated Raises: ValidationError: when ``type`` is not in :attr:`TYPES` """ if type not in cls.TYPES: raise ValidationError( cls, FieldError("type", f"type must one of {cls.TYPES}") ) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"type", "holder_name", "bank_code", "routing_number"}) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "account_number", "address", "bank_name", "country_code", "customer", "debitable", "description", "fingerprint", "is_active", "is_verified", "last4_digits", "phone_number", "verification_checklist", } )
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/bank_account.py
bank_account.py
from .base import ResourceModel from ..exceptions import FieldError class Event(ResourceModel): """ Evento assíncrono enviado pela Zoop. https://docs.zoop.co/docs/sobre-os-webhooks#corpo-de-um-evento https://docs.zoop.co/docs/eventos-dispon%C3%ADveis """ # @classmethod # def get_non_required_fields(cls): # fields = super().get_non_required_fields() # return fields.union({"payload"}) class Webhook(ResourceModel): """ Webhook para cadastro de eventos assíncronos enviados pela Zoop. https://docs.zoop.co/reference#webhook Attributes: url: url para o envio da requisição do webhook method: Método da requisição do webhook events: Lista de eventos que acionarão o webhook description: Descrição do webhook authorization: ?? dflag: ?? status: situação do webhook na zoop events_sent: quantidade de vezes que o wehbook foi acionado """ EVENTS = { "buyer.transaction.canceled", "buyer.transaction.charged_back", "buyer.transaction.commission.succeeded", "buyer.transaction.created", "buyer.transaction.dispute.succeeded", "buyer.transaction.disputed", "buyer.transaction.failed", "buyer.transaction.pre_authorized", "buyer.transaction.reversed", "buyer.transaction.succeeded", "buyer.transaction.updated", "document.created", "document.updated", "invoice.canceled", "invoice.created", "invoice.expired", "invoice.overdue", "invoice.paid", "invoice.refunded", "invoice.updated", "plan.created", "plan.deleted", "plan.updated", "seller.activated", "seller.created", "seller.deleted", "seller.denied", "seller.enabled", "seller.tef.disable", "seller.tef.enable", "seller.tef.pending", "seller.updated", "subscription.active", "subscription.canceled", "subscription.created", "subscription.deleted", "subscription.expired", "subscription.overdue", "transaction.failed", "transaction.succeeded", "transaction.canceled", "transaction.created", "transaction.failed", "transaction.charged_back", "transaction.reversed", "transaction.updated", "transaction.capture.failed", "transaction.capture.succeeded", "transaction.dispute.succeeded", "transaction.disputed", "transaction.pre_authorization.failed", "transaction.pre_authorization.succeeded", "transaction.pre_authorized", "transaction.void.failed", "transaction.void.succeeded", "transaction.commission.succeeded", } RESOURCE = "webhook" def init_custom_fields(self, method="POST", events=None, **kwargs) -> None: """ Declara o campo :attr:`method` com o valor padrão se não tiver sido passado. Trata e declara o campo :attr:`events`. .. note:: Se o `events` passado não for uma lista, faz um parse! Args: method: Método da requisição do webhook events: Lista de eventos que acionarão o webhook **kwargs: kwargs """ setattr(self, "method", method) if events is None: setattr(self, "events", []) elif not isinstance(events, list): setattr(self, "events", [events]) def validate_custom_fields(self, **kwargs): """ Valida se o campo :attr:`events` é vazio ou se os valores dele não são eventos válidos. Args: **kwargs: kwargs Returns: lista de erros ocorridos/identificados """ errors = [] events_set = set(self.events) if set().issuperset(events_set): errors.append(FieldError("events", "A lista de eventos não pode ser vazia")) elif not events_set.issubset(self.EVENTS): errors.append( FieldError( "events", f"Os eventos {events_set-self.EVENTS} não são válidos! \n" f"Os possíveis eventos são: {self.EVENTS}", ) ) return errors def get_original_different_fields_mapping(self): return {"events": "event"} @classmethod def get_required_fields(cls) -> set: fields = super().get_required_fields() return fields.union({"method", "url", "events"}) @classmethod def get_non_required_fields(cls) -> set: fields = super().get_non_required_fields() return fields.union( {"description", "authorization", "dflag", "status", "events_sent"} )
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/webhook.py
webhook.py
from .base import ZoopObject, ResourceModel from .card import Card from .invoice import Invoice from .token import Token from ..exceptions import ValidationError, FieldError from zoop_wrapper.utils import convert_currency_float_value_to_cents class PointOfSale(ZoopObject): """ Represents something (?) Attributes: entry_mode: ?? identification_number: ?? """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"entry_mode", "identification_number"}) class History(ZoopObject): """ Represents a update for :class:`.Transaction` Attributes: amount: amount value for the update authorization_code: ?? authorization_nsu: ?? authorizer: ?? authorizer_id: ?? created_at: datetime for the update gatewayResponseTime: ?? id: uuid identifier operation_type: type for the update response_code: ?? response_message: ?? status: status for the update transaction: transaction uuid identifier """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "amount", "authorization_code", "authorization_nsu", "authorizer", "authorizer_id", "created_at", "gatewayResponseTime", "id", "operation_type", "response_code", "response_message", "status", "transaction", } ) class Transaction(ResourceModel): """ Represents a transaction https://docs.zoop.co/reference#transa%C3%A7%C3%A3o The :attr:`RESOURCE` is used to identify this Model. Used to check against :attr:`.resource`! Attributes: amount (int): integer amount value in 'centavos' app_transaction_uid: ?? business: ?? capture (bool): flag que designa se será uma transação simples {true} ou uma composta (com pre autorização) {false} # noqa captured (bool): flag indica se a transação foi capturada ou não confirmed (str): value of cofirmation currency (str): coin currency string customer (str): customer uuid identifier description (str): value description discounts: ?? expected_on (str):datetime string fee_details: ?? fees: ?? gateway_authorizer: ?? history (list of :class:`.History`): transaction updates individual: ?? installment_plan: ?? location_latitude: ?? location_longitude: ?? on_behalf_of (str): seller uuid identifier original_amount (int): original amount value payment_method (:class:`.Card` or :class:`.Invoice`): payment method used payment_type (str): payment type point_of_sale (:class:`.PointOfSale`): ?? pre_authorization: ?? reference_id: ?? refunded (bool): boolean of verification refunds: ?? rewards: ?? sales_receipt: statement_descriptor (str): value description status (str): value for status transaction_number: ?? voided (bool): boolean of verification """ RESOURCE = "transaction" CARD_TYPE = "credit" BOLETO_TYPE = "boleto" PAYMENT_TYPES = {CARD_TYPE, BOLETO_TYPE} def init_custom_fields( self, amount=None, currency="BRL", history=None, id=None, payment_method=None, payment_type=None, point_of_sale=None, source=None, **kwargs, ): """ Initialize :attr:`payment_method` as :class:`.Card` or :class:`.Invoice` based on data. Initialize :attr:`point_of_sale` as :class:`.PointOfSale`. Initialize :attr:`history` as list of :class:`.History`. Args: currency (str): default currency is 'BRL'. So users may not need to pass currency! history (dict or :class:`.History` or list of either): history data. May be a list of dict or list of :class:`.History` # noqa payment_method (dict or :class:`.Card` or :class:`.Invoice`): payment method data # noqa payment_type (str): value for payment type point_of_sale (dict or :class:`.PointOfSale`): point of sale data **kwargs: kwargs """ setattr(self, "currency", currency) if payment_type not in Transaction.PAYMENT_TYPES: raise ValidationError( self, f"payment_type precisa ser um valor " f"do conjunto {Transaction.PAYMENT_TYPES}", ) if amount is not None: amount = convert_currency_float_value_to_cents(amount) setattr(self, "amount", amount) if id is not None and payment_type == Transaction.CARD_TYPE: setattr( self, "payment_method", Card.from_dict_or_instance( payment_method, allow_empty=self._allow_empty ), ) elif id is None and payment_type == Transaction.CARD_TYPE: setattr( self, "source", Source.from_dict_or_instance(source, allow_empty=self._allow_empty), ) elif payment_type == Transaction.BOLETO_TYPE: setattr( self, "payment_method", Invoice.from_dict_or_instance( payment_method, allow_empty=self._allow_empty ), ) else: raise ValidationError(self, "Alguma coisa muito errada aconteceu!!") setattr(self, "payment_type", payment_type) setattr( self, "point_of_sale", PointOfSale.from_dict_or_instance(point_of_sale, allow_empty=True), ) if isinstance(history, list): setattr( self, "history", [ History.from_dict_or_instance(item, allow_empty=True) for item in history ], ) else: setattr( self, "history", [History.from_dict_or_instance(history, allow_empty=True)], ) def get_validation_fields(self): """ Pega os ``campos de validação`` para uma instância.\n O conjunto de campos é feito com base no :attr:`payment_type`. Se for :attr:`CARD_TYPE` utiliza o :meth:`get_card_required_fields`. Se não, ele é :attr:`payment_type` é :attr:`BOLETO_TYPE`! Utiliza o :meth:`get_boleto_required_fields`. Returns: ``set`` de campos para serem validados """ fields = set() if self.payment_type == self.CARD_TYPE: return fields.union(self.get_card_required_fields()) else: return fields.union(self.get_boleto_required_fields()) def get_all_fields(self): """ Pega ``todos os campos`` para instância. O conjunto de campos é construído com base no :meth:`get_validation_fields` com a união do :meth:`get_non_required_fields`. Returns: ``set`` de todos os campos """ fields = self.get_validation_fields() return fields.union(self.get_non_required_fields()) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union( {"currency", "customer", "description", "on_behalf_of", "payment_type"} ) @classmethod def get_card_required_fields(cls): fields = cls.get_required_fields() return fields.union({"source", "capture"}) @classmethod def get_boleto_required_fields(cls): fields = cls.get_required_fields() return fields.union({"payment_method", "amount"}) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "app_transaction_uid", "business", "captured", "confirmed", "discounts", "expected_on", "fee_details", "fees", "gateway_authorizer", "history", "individual", "installment_plan", "location_latitude", "location_longitude", "original_amount", "point_of_sale", "pre_authorization", "reference_id", "refunded", "refunds", "rewards", "sales_receipt", "statement_descriptor", "status", "transaction_number", "voided", } ) class Source(ZoopObject): CARD_PRESENT_TYPE = "card_present_type" CARD_NOT_PRESENT_TYPE = "card_not_present_type" SOURCE_TYPES = {CARD_PRESENT_TYPE, CARD_NOT_PRESENT_TYPE} def init_custom_fields( self, card=None, type="card", currency="BRL", installment_plan=None, **kwargs, ): setattr(self, "type", type) setattr(self, "currency", currency) kwargs["amount"] = convert_currency_float_value_to_cents(kwargs["amount"]) """ Ver documentação do :meth:`.from_dict_or_instance`. Precisamos pegar o atributo `id` para identificar o tipo. """ token_for_card = Token.from_dict_or_instance(card, allow_empty=True) if token_for_card.id is not None: card_type = Source.CARD_NOT_PRESENT_TYPE else: try: token_for_card = Token.from_dict_or_instance(card) card_type = Source.CARD_PRESENT_TYPE except ValidationError as e: raise ValidationError( self, f"Tipo do source não identificado! " f"Utilize um dos tipos {Source.SOURCE_TYPES}", ) from e if installment_plan: installment_plan = InstallmentPlan.from_dict_or_instance(installment_plan) setattr(self, "installment_plan", installment_plan) setattr(self, "card", token_for_card) setattr(self, "card_type", card_type) def get_validation_fields(self): """ Pega ``campos de validação`` da instâcia.\n O conjunto de campos é construído com base no :attr:`card_type`. Se for :attr:`CARD_PRESENT_TYPE` utiliza o :meth:`get_card_present_required_fields`. Se não, utiliza o :meth:`get_card_not_present_required_fields`. Returns: ``set`` de campos para ser validados """ fields = set() if self.card_type == self.CARD_PRESENT_TYPE: return fields.union(self.get_card_present_required_fields()) else: return fields.union(self.get_card_not_present_required_fields()) def get_all_fields(self): """ Pega ``todos os campos`` da instância. Returns: ``set`` de todos os campos """ fields = set() return fields.union( self.get_validation_fields(), self.get_non_required_fields() ) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"card", "type", "currency", "usage", "amount"}) @classmethod def get_non_required_fields(cls) -> set: fields = super().get_non_required_fields() return fields.union({"installment_plan"}) @classmethod def get_card_not_present_required_fields(cls): """ Método get do ``set`` de ``required fields`` para :attr:`CARD_TYPE` quando o cartão é presente. Returns: ``set`` de campos """ return cls.get_required_fields() @classmethod def get_card_present_required_fields(cls): """ Método get do ``set`` de ``non required fields`` para :attr:`CARD_TYPE`. Returns: ``set`` de campos """ fields = cls.get_required_fields() return fields.union({"amount", "usage"}) class InstallmentPlan(ZoopObject): INTEREST_FREE_MODE = "interest_free" WITH_INTEREST_MODE = "with_interest" INSTALLMENT_PLAN_MODES = {INTEREST_FREE_MODE, WITH_INTEREST_MODE} @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"mode", "number_installments"}) def validate_custom_fields(self, **kwargs): errors = [] if self.mode not in self.INSTALLMENT_PLAN_MODES: errors.append( FieldError( "mode", f"O mode é inválido! Deveria ser um dos " f"dois tipos: {self.INSTALLMENT_PLAN_MODES}", ) ) if not InstallmentPlan._validate_number_installments(self.number_installments): errors.append( FieldError( "number_installments", f"O number_installments é inválido! Deveria ser de 1 até 12, " f"e não {self.number_installments}", ) ) return errors @classmethod def _validate_number_installments(cls, number_installments): """ Esse método verifica se: - number_installments é inteiro - number_installments é um valor inteiro entre 1 e 12 incluindo as bordas :return: bool """ if not isinstance(number_installments, int): return False return number_installments >= 1 and number_installments <= 12
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/transaction.py
transaction.py
from .base import PaymentMethod, VerificationModel class CardVerificationChecklist(VerificationModel): """ Represent a credit card verification Attributes: security_code_check: boolean of verification """ @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"security_code_check"}) class Card(PaymentMethod): """ Represent a Card. https://docs.zoop.co/reference#cart%C3%A3o The :attr:`RESOURCE` is used to identify this Model. Used to check against :attr:`.resource`! Attributes: card_brand: company name expiration_month: month of expiration expiration_year: year of expiration fingerprint: unique card identifier from company of card ? first4_digits: first 4 digits of card holder_name: owner name is_active: boolean of verification is_valid: boolean of verification is_verified: boolean of verification last4_digits: last 4 digits of card verification_checklist: CardVerificationChecklist model """ RESOURCE = "card" def init_custom_fields(self, verification_checklist=None, **kwargs): """ Initialize :attr:`verification_checklist` as :class:`CardVerificationChecklist` Args: verification_checklist: dict of data or :class:`CardVerificationChecklist` **kwargs: kwargs """ setattr( self, "verification_checklist", CardVerificationChecklist.from_dict_or_instance( verification_checklist, allow_empty=True ), ) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"expiration_month", "expiration_year", "holder_name"}) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "card_brand", "fingerprint", "first4_digits", "is_active", "is_valid", "is_verified", "last4_digits", "verification_checklist", } )
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/card.py
card.py
from .base import PaymentMethod, ZoopObject from ..exceptions import FieldError, ValidationError class BaseModeObject(ZoopObject): """ Um objeto base que possui modos de quantia e porcentagem """ MODES = set() def init_custom_fields(self, mode=None, **kwargs): """ É necessário configurar o :attr:`mode` antes pois ele influência no :meth:`get_validation_fields` """ if mode not in self.MODES: raise ValidationError( self, FieldError( "mode", f"o valor {mode} é inválido! Possíveis modos são {self.MODES}", ), ) setattr(self, "mode", mode) def get_mode_required_fields_mapping(self): raise NotImplementedError("Implemente o mapeamento!") def get_validation_fields(self): modes_required_fields_mapping = self.get_mode_required_fields_mapping() required_method = modes_required_fields_mapping.get(self.mode) return required_method() @classmethod def get_required_fields(cls): return {"mode"} @classmethod def get_percentage_required_fields(cls): fields = cls.get_required_fields() return fields.union({"percentage"}) @classmethod def get_fixed_required_fields(cls): fields = cls.get_required_fields() return fields.union({"amount"}) class Fine(BaseModeObject): """ Representa a multa! https://docs.zoop.co/docs/multa-juros-e-descontos#multa """ FIXED = "FIXED" PERCENTAGE = "PERCENTAGE" MODES = {FIXED, PERCENTAGE} def get_mode_required_fields_mapping(self): return { self.FIXED: self.get_fixed_required_fields, self.PERCENTAGE: self.get_percentage_required_fields, } @classmethod def get_non_required_fields(cls): return {"start_date"} class Interest(BaseModeObject): """ Representa um juros! https://docs.zoop.co/docs/multa-juros-e-descontos#juros """ DAILY_AMOUNT = "DAILY_AMOUNT" DAILY_PERCENTAGE = "DAILY_PERCENTAGE" MONTHLY_PERCENTAGE = "MONTHLY_PERCENTAGE" MODES = {DAILY_AMOUNT, DAILY_PERCENTAGE, MONTHLY_PERCENTAGE} def get_mode_required_fields_mapping(self): return { self.DAILY_AMOUNT: self.get_fixed_required_fields, self.DAILY_PERCENTAGE: self.get_percentage_required_fields, self.MONTHLY_PERCENTAGE: self.get_percentage_required_fields, } @classmethod def get_non_required_fields(cls): return {"start_date"} class Discount(BaseModeObject): """ Representa um desconto! https://docs.zoop.co/docs/multa-juros-e-descontos#descontos """ FIXED = "FIXED" PERCENTAGE = "PERCENTAGE" MODES = {FIXED, PERCENTAGE} def get_mode_required_fields_mapping(self): return { self.FIXED: self.get_fixed_required_fields, self.PERCENTAGE: self.get_percentage_required_fields, } @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"limit_date"}) class BillingInstructions(ZoopObject): """ Represents billing instructions (fine, interest and discount) Attributes: discount (list of :class:`.BillingConfiguration`): list of optional discount rules # noqa interest (:class:`.BillingConfiguration`): optional interest rules late_fee (:class:`.BillingConfiguration`): optional fine rules """ def init_custom_fields(self, late_fee=None, interest=None, discount=None, **kwargs): """ Inicializa late_fee, interest e discount. Args: discount: dict or instance of BillingConfiguration model interest: dict or instance of BillingConfiguration model late_fee: dict or instance of BillingConfiguration model **kwargs: kwargs """ if late_fee: setattr( self, "late_fee", Fine.from_dict_or_instance(late_fee), ) if interest: setattr( self, "interest", Interest.from_dict_or_instance(interest), ) if discount: if not isinstance(discount, list): discount = [discount] setattr( self, "discount", [Discount.from_dict_or_instance(item) for item in discount], ) @classmethod def get_non_required_fields(cls): """ Conjunto de campos não obrigatórios Returns: ``set`` de campos """ fields = super().get_non_required_fields() return fields.union({"late_fee", "interest", "discount"}) class Invoice(PaymentMethod): """ Represents a invoice ('boleto' in BR). https://docs.zoop.co/reference#boleto Attributes: billing_instructions (:class:`.BillingInstructions`): optional billing instructions # noqa security_code_check (bool): verification of security code """ RESOURCE = "boleto" def init_custom_fields(self, billing_instructions=None, **kwargs): """ initialize :attr:`billing_instructions` with :class:`.BillingInstructions` Args: billing_instructions (dict or :class:`.BillingInstructions`): data **kwargs: """ super().init_custom_fields(**kwargs) if billing_instructions: setattr( self, "billing_instructions", BillingInstructions.from_dict_or_instance(billing_instructions), ) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"expiration_date", "payment_limit_date"}) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "accepted", "bank_code", "barcode", "billing_instructions", "body_instructions", "document_number", "downloaded", "fingerprint", "paid_at", "printed", "recipient", "reference_number", "sequence", "status", "url", "zoop_boleto_id", } )
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/invoice.py
invoice.py
import copy from pycpfcnpj import cpf, cnpj from ..utils import get_logger from ..exceptions import ValidationError, FieldError logger = get_logger("models") class ZoopObject(object): """ This class represent a bare Zoop object. Attributes: _allow_empty: boolean """ def __init__(self, allow_empty=False, **kwargs): """ initialize ``all fields`` from :meth:`get_all_fields` as ``attributes`` from ``kwargs`` on instance. Then call :meth:`validate_fields`. Args: allow_empty: boolean which disable validation of required fields **kwargs: dictionary of args """ self._allow_empty = allow_empty self.init_custom_fields(**kwargs) for field_name in self.get_all_fields(): my_value = getattr(self, field_name, None) if my_value is not None: continue value = kwargs.get(field_name, None) setattr(self, field_name, value) self.validate_fields(**kwargs) def init_custom_fields(self, **kwargs): """ this method exists to set custom attributes such as :class:`ZoopObject` instances. Since all attributes set on :meth:`__init__` are ``dict's`` or ``variables``. Args: **kwargs: dictionary of args """ pass @staticmethod def make_data_copy_with_kwargs(data, **kwargs): """ make a new data dict from previous data dict with added ``kwargs`` if ``data`` is ``None`` create a ``new empty dict``.\n ``data`` may be ``None`` for the cases we are explicitly calling with ``allow_empty=True`` on :meth:`init_custom_fields` for some custom :class:`ZoopObject` instance set. Such as:: instance = ZoopObject() setattr( instance, 'address', Address.from_dict_or_instance(None, allow_empty=True) ) Args: data: dict of data may be None **kwargs: dict of kwargs Returns: new dict of data """ data = copy.deepcopy(data) data.update(kwargs) return data @classmethod def from_dict(cls, data, allow_empty=False, **kwargs): """ to construct a instance of this ``class`` from ``dict`` Args: data: dict of data allow_empty: boolean **kwargs: kwargs data: dict of data may be None allow_empty: boolean **kwargs: kwargs Raises: :class:`.ValidationError`: se data não for do tipo``dict`` ou for ``None`` Returns: instance initialized of cls """ if data is None: data = {} if not isinstance(data, dict): raise ValidationError( cls, f"A variável data deveria ser um dicionário! " f"Mas é do tipo {type(data).__name__} " f"e o valor foi ({data})", ) _data = cls.make_data_copy_with_kwargs(data, allow_empty=allow_empty, **kwargs) return cls(**_data) @classmethod def from_dict_or_instance(cls, data, **kwargs): """ Esse método existe para fazer um tratamento dos inputs de dados. O atributo :attr:`data` pode ser um dict ou um :class:`.ZoopOject`. Verifica se :attr:`data` já é uma instância da classse :class:`ZoopObject` or uma ``subclasse``.\n Se não for, chama :meth:`from_dict`. Args: data: dict of data or instance **kwargs: kwargs Returns: instance initialized of ``cls`` """ if isinstance(data, cls): return data else: return cls.from_dict(data, **kwargs) @staticmethod def is_value_empty(value): """ Verify if ``value`` passed is considered ``empty``! ``value`` may be ``None``. As we set on :meth:`__init__`:: value = kwargs.get(field_name, None) ``value`` may be ``{}`` if it was a :class:`ZoopObject` with allow_empty! \n ``value`` may be ``[{}]`` if it was a ``list`` of :class:`ZoopObject`'s with ``allow_empty``!! Args: value: Value to be verified Returns: boolean """ return value is None or value == {} or value == [{}] def to_dict(self): """ serialize ``self`` to dict Returns: dict of instance """ data = {} different_fields_mapping = self.get_original_different_fields_mapping() for field in self.get_all_fields(): value = getattr(self, field) if isinstance(value, list): """our value is a list! It may be a list of ZoopObject's. Let's try to get its serialized value!""" try: value = [item.to_dict() for item in value] except AttributeError: pass else: try: """our value is not a list! It may be a ZoopObject instance. Let's try to get its serialized value!""" value = value.to_dict() except AttributeError: pass if self.is_value_empty(value): continue if field in different_fields_mapping: original_field = different_fields_mapping.get(field) else: original_field = field data[original_field] = value return data def validate_fields(self, raise_exception=True, **kwargs): """ Valida na instância os campos retornados do conjunto :meth:`get_validation_fields`.\n Se :attr:`_allow_empty` é ``True`` não validar! Esse método deve chamar o :meth:`validate_custom_fields` para praticidade de extensão e especialização! Args: raise_exception: flag que dita se a exceção deve ser lançada ou não Raises: :class:`.ValidationError` se (algum campo ``obrigatório`` está faltando ou ocorreu algum erro no :meth:`validate_custom_fields`) e ``raise_exception==True`` # noqa """ if self._allow_empty: return errors = [] for validation_field in self.get_validation_fields(): value = getattr(self, validation_field, None) if value is None: errors.append( FieldError(validation_field, "campo obrigatório faltando!") ) errors.extend(self.validate_custom_fields(**kwargs)) error = ValidationError(self, errors) if errors and raise_exception: raise error # noinspection PyMethodMayBeStatic def validate_custom_fields(self, **kwargs): """ Método de validação a ser estendido para fazer uma validação especializada. Esse método originalmente retorna uma lista vazia pois ele serve para ser sobreescrito pelas calsses especializadas adicionando comportamento de validação! Returns: Lista de erros a serem levantados. """ return [] def get_validation_fields(self): """ Método para pegar os campos de validação!\n Isso é necessário para classes/instances com diferentes campos obrigatórios definidos por um tipo dinâmico!\n Tais como :class:`.Seller`, :class:`.BankAccount`, :class:`.Fine` e :class:`.Token`.\n O padrão é :meth:`get_required_fields`. Returns: ``set`` de campos para serem utilizados na validação """ return self.get_required_fields() def get_all_fields(self): """ Método para pegar todos os campos!\n Isso é necessário para classes/instances com diferentes campos obrigatórios definidos por um tipo dinâmico!\n Tais como :class:`.Seller`, :class:`.BankAccount`, :class:`.Fine` e :class:`.Token`.\n O padrão é :meth:`get_validation_fields` + :meth:`get_non_required_fields`. Returns: ``set`` de todos os campos """ fields = set() return fields.union( self.get_validation_fields(), self.get_non_required_fields() ) # noinspection PyMethodMayBeStatic def get_original_different_fields_mapping(self): """ Método de mapeamento de nomes diferentes de atributo => API zoop a ser estendido. Returns: Dicionário de nome_custom => nome_oringial """ return {} @classmethod def get_required_fields(cls): """ get ``set`` of ``required fields`` Returns: ``set`` of fields """ return set() @classmethod def get_non_required_fields(cls): """ get ``set`` of ``non required fields`` Returns: ``set`` of fields """ return set() class ResourceModel(ZoopObject): """ Represents a Model that is a ``resource``. Attributes: id: identifier string resource: type string uri: uri string created_at: date of creation updated_at: date of update metadata: dict with metadata """ RESOURCE = None @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( {"id", "resource", "uri", "created_at", "updated_at", "metadata"} ) class MarketPlaceModel(ResourceModel): """ This class represents a :class:`.ResourceModel` which belongs to some ``marketplace`` from ``Zoop``. Attributes: marketplace_id: identifier string """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"marketplace_id"}) class Address(ZoopObject): """ Represents a physical ``address``. Attributes: line1: complete street name line2: number line3: complement neighborhood: neighborhood city: city state: Código ISO 3166-2 para o estado postal_code: postal code country_code: ISO 3166-1 alpha-2 - códigos de país de duas letras """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "city", "country_code", "line1", "line2", "line3", "neighborhood", "postal_code", "state", } ) class Person(ZoopObject): """ Represents a ``person``. Attributes: address: Address model birthdate: birthdate email: email first_name: first name last_name: last name phone_number: phone number taxpayer_id: cpf válido """ def validate_custom_fields(self, **kwargs): """ O :attr:`taxpayer_id` precisa ser um CPF válido. Então verificamos isso. Args: raise_exception: Quando algum campo está faltando ou CPF é inválido **kwargs: """ errors = [] if self._allow_empty: return errors if not cpf.validate(self.taxpayer_id): errors.append(FieldError("taxpayer_id", "taxpayer_id inválido!")) return errors def init_custom_fields(self, address=None, **kwargs): """ Initialize :attr:`address` with :class:`.Address` Args: address: dict of data or :class:`.Address` **kwargs: """ setattr( self, "address", Address.from_dict_or_instance(address, allow_empty=True) ) @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union( { "address", "email", "first_name", "last_name", "phone_number", "taxpayer_id", } ) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"birthdate"}) @property def full_name(self): """ get ``full name`` of the person Returns: string with the ``full name`` """ return f"{self.first_name} {self.last_name}" class SocialModel(ZoopObject): """ Have social sites uri's Attributes: facebook: facebook profile url? twitter: twitter profile url? """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"facebook", "twitter"}) class FinancialModel(ZoopObject): """ Have financial attributes. Attributes: status: pending or active string account_balance: amount of balance current_balance: curent amount of balance description: description delinquent: bolean of verification payment_methods: ? default_debit: ? default_credit: ? """ @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union( { "account_balance", "current_balance", "default_credit", "default_debit", "delinquent", "description", "payment_methods", "status", } ) class VerificationModel(ZoopObject): """ Have some verification attributes. Attributes: postal_code_check: boolean of verification address_line1_check: boolean of verification """ @classmethod def get_required_fields(cls): fields = super().get_required_fields() return fields.union({"postal_code_check", "address_line1_check"}) class PaymentMethod(ResourceModel): """ Have some payment method attributes Attributes: description: text description customer: uuid id address: Address Model """ def init_custom_fields(self, address=None, **kwargs): """ initialize :attr:`address` with :class:`.Address` Args: address: dict of data or :class:`.Address` **kwargs: dic of kwargs """ setattr( self, "address", Address.from_dict_or_instance(address, allow_empty=True) ) @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"description", "customer", "address"}) class BusinessOrIndividualModel(MarketPlaceModel): """ Represents a ``Business`` Or ``Individual`` Model\n It has ``dynamic types``!\n Can be ``Business`` or ``Individual``. Attributes: taxpayer_id: cpf válido para ``type`` :attr:`INDIVIDUAL_TYPE` ein: cnpj para ``type`` :attr:`BUSINESS_TYPE` """ BUSINESS_IDENTIFIER = "ein" BUSINESS_TYPE = "business" INDIVIDUAL_IDENTIFIER = "taxpayer_id" INDIVIDUAL_TYPE = "individual" URI = {BUSINESS_TYPE: "businesses", INDIVIDUAL_TYPE: "individuals"} def init_custom_fields(self, taxpayer_id=None, ein=None, **kwargs): """ Chama :meth:`set_identifier`. Args: taxpayer_id: cpf value ein: cnpj value **kwargs: dict of kwargs """ self.set_identifier(taxpayer_id, ein) @classmethod def validate_identifiers(cls, taxpayer_id, ein): """ Valida tupla de valores de identificação. Raises: :class:`.ValidationError` quando é passado os dois, ou nenhum, ou quando o identificador passado é inválido # noqa """ if (taxpayer_id is not None and ein is not None) or ( taxpayer_id is None and ein is None ): raise ValidationError( cls, FieldError( f"{BusinessOrIndividualModel.INDIVIDUAL_IDENTIFIER} " f"ou {BusinessOrIndividualModel.BUSINESS_IDENTIFIER}", "identificadores faltando!", ), ) elif taxpayer_id is not None and not cpf.validate(taxpayer_id): raise ValidationError( cls, FieldError("taxpayer_id", "taxpayer_id inválido!") ) elif ein is not None and not cnpj.validate(ein): raise ValidationError(cls, FieldError("ein", "ein inválido!")) def get_type(self): """ get the ``dynamic type`` from instance Returns: :attr:`BUSINESS_TYPE` or :attr:`INDIVIDUAL_TYPE` """ individual_identifier = getattr( self, BusinessOrIndividualModel.INDIVIDUAL_IDENTIFIER, None ) business_identifier = getattr( self, BusinessOrIndividualModel.BUSINESS_IDENTIFIER, None ) BusinessOrIndividualModel.validate_identifiers( individual_identifier, business_identifier ) if individual_identifier: return BusinessOrIndividualModel.INDIVIDUAL_TYPE else: return BusinessOrIndividualModel.BUSINESS_TYPE def get_type_uri(self): """ get the ``dynamic type uri`` for instance based on :meth:`get_type` Returns: uri string for type from :attr:`URI` """ return self.URI.get(self.get_type()) def set_identifier(self, taxpayer_id=None, ein=None, **kwargs): """ Declara os atributos :attr:`taxpayer_id` ou (ou exclusivo) :attr:`ein`. Exatamente um deles deve ser passado e válido, e não os dois.\n ``kwargs`` are there to be called from :meth:`.Seller.init_custom_fields` and :meth:`.BankAccount.init_custom_fields` without getting ``taxpayer_id`` or ``ein`` variables. Args: taxpayer_id: cpf ein: cnpj **kwargs: kwarg """ BusinessOrIndividualModel.validate_identifiers(taxpayer_id, ein) if taxpayer_id: setattr(self, BusinessOrIndividualModel.INDIVIDUAL_IDENTIFIER, taxpayer_id) else: setattr(self, BusinessOrIndividualModel.BUSINESS_IDENTIFIER, ein) def get_validation_fields(self): """ Get ``validation fields`` for instance. if ``type`` is :attr:`BUSINESS_TYPE` then call :meth:`get_business_required_fields` else ``type`` is :attr:`INDIVIDUAL_TYPE`! then call :meth:`get_individual_required_fields` Returns: ``set`` of fields to be used on validation """ if self.get_type() == self.BUSINESS_TYPE: return self.get_business_required_fields() else: return self.get_individual_required_fields() def get_all_fields(self): """ get ``all fields`` for instance. if ``type`` is :attr:`BUSINESS_TYPE` then call :meth:`get_business_required_fields` and :meth:`get_business_non_required_fields` else ``type`` is :attr:`INDIVIDUAL_TYPE`! then call :meth:`get_individual_required_fields` and :meth:`get_individual_non_required_fields` Returns: ``set`` of all fields """ fields = set() if self.get_type() == self.BUSINESS_TYPE: return fields.union( self.get_business_non_required_fields(), self.get_business_required_fields(), ) else: return fields.union( self.get_individual_non_required_fields(), self.get_individual_required_fields(), ) @classmethod def get_business_non_required_fields(cls): """ get ``set`` of ``non required fields`` for :attr:`BUSINESS_TYPE`. Returns: ``set`` of fields """ return cls.get_non_required_fields() @classmethod def get_business_required_fields(cls): """ get ``set`` of ``required fields`` for :attr:`BUSINESS_TYPE` Returns: ``set`` of fields """ fields = cls.get_required_fields() return fields.union({"ein"}) @classmethod def get_individual_non_required_fields(cls): """ get ``set`` of ``non required fields`` for :attr:`INDIVIDUAL_TYPE` Returns: ``set`` of fields """ return cls.get_non_required_fields() @classmethod def get_individual_required_fields(cls): """ get ``set`` of ``required fields`` for :attr:`INDIVIDUAL_TYPE` Returns: ``set`` of fields """ fields = cls.get_required_fields() return fields.union({"taxpayer_id"})
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/base.py
base.py
from card_identifier.cardutils import validate_card from .base import ResourceModel, BusinessOrIndividualModel from .bank_account import BankAccount from .card import Card from ..utils import get_logger from ..exceptions import FieldError, ValidationError logger = get_logger("models") class Token(ResourceModel): """ Token is a resource used to link a :class:`.BankAccount` Or :class:`.Card` and a :class:`.Seller` or :class:`.Buyer`. https://docs.zoop.co/reference#token-1 The :attr:`RESOURCE` is used to identify this Model. Used to check against :attr:`.resource`! It has ``dynamic types``! It can be :attr:`CARD_TYPE` or :attr:`BANK_ACCOUNT_TYPE`. But before ``creation`` it won't have attribute ``type``. So we need to verify by ``other attributes``. After ``created`` on Zoop it will have ``type``. Attributes: token_type (str): value for identified token ``type`` type (str): optional :attr:`BANK_ACCOUNT_TYPE` or :attr:`CARD_TYPE`. It has collision with of :attr:`.BankAccount.type`. So we need the above :attr:`token_type`. used (bool): optional value of verification bank_account (:class:`.BankAccount`): optional value (for ``created`` token of 'bank_account' type) card (:class:`.Card`): optional value (for ``created`` token of 'card' type) holder_name (str): owner name (for both token of 'bank_account' and 'card' type) account_number (str): account number for :attr:`BANK_ACCOUNT_TYPE` taxpayer_id (str): identifier for :attr:`BANK_ACCOUNT_TYPE` of :attr:`.INDIVIDUAL_TYPE` ein (str): identifier for :attr:`BANK_ACCOUNT_TYPE` of :attr:`.BUSINESS_TYPE` bank_code (str): bank code for :attr:`BANK_ACCOUNT_TYPE` routing_number (str): agency code in BR for :attr:`BANK_ACCOUNT_TYPE` card_number (str): card number for :attr:`CARD_TYPE` expiration_month (str): month of expiration for :attr:`CARD_TYPE` expiration_year (str): year of expiration for :attr:`CARD_TYPE` security_code (str): security code for :attr:`CARD_TYPE` """ RESOURCE = "token" CARD_TYPE = "card" CARD_IDENTIFIER = "card_number" BANK_ACCOUNT_TYPE = "bank_account" BANK_ACCOUNT_IDENTIFIER = "bank_code" TYPES = {CARD_TYPE, BANK_ACCOUNT_TYPE} IDENTIFIERS = {CARD_IDENTIFIER, BANK_ACCOUNT_IDENTIFIER} def init_custom_fields(self, type=None, card=None, bank_account=None, **kwargs): """ if ``type`` is :attr:`BANK_ACCOUNT_TYPE` or :attr:`CARD_TYPE` token is ``created``!\n set :attr:`card` or :attr:`bank_account` attributes accordingly. else token is ``not created``!\n We must identify token type from attr's passed searching for :attr:`CARD_IDENTIFIER` or :attr:`BANK_ACCOUNT_IDENTIFIER`. After identifying ``type`` if it was :attr:`BANK_ACCOUNT_TYPE` set ``business`` or ``individual`` identifier from :class:`.BankAccount` method (which is from :class:`.BusinessOrIndividualModel`). Args: bank_account (dict or :class:`.BankAccount`): data card (dict or :class:`.Card`): data type (str): type for ``token`` or ``bank account`` **kwargs: kwargs """ if type in self.TYPES: token_type = type if token_type == self.CARD_TYPE: setattr( self, self.CARD_TYPE, Card.from_dict_or_instance(card, allow_empty=True), ) else: setattr( self, self.BANK_ACCOUNT_TYPE, BankAccount.from_dict_or_instance(bank_account, allow_empty=True), ) else: if self.CARD_IDENTIFIER in kwargs: token_type = self.CARD_TYPE elif self.BANK_ACCOUNT_IDENTIFIER in kwargs: token_type = self.BANK_ACCOUNT_TYPE BusinessOrIndividualModel.set_identifier(self, **kwargs) elif self._allow_empty: token_type = None else: raise ValidationError( self, FieldError( "token_type", f"Tipo de token não identificado! " f"Configure um desses atributos {self.IDENTIFIERS}", ), ) setattr(self, "token_type", token_type) def get_bank_account_type(self): """ Get ``bank account type`` for ``creation token`` of :class:`.BankAccount. Raises: TypeError: when called from a token not from 'bank_account' type Returns: value with bank_account type """ if self.token_type == self.BANK_ACCOUNT_TYPE: try: return self.bank_account.get_type() except AttributeError: return BankAccount.get_type(self) raise TypeError(f"Token is not of type {self.BANK_ACCOUNT_TYPE}") def get_validation_fields(self): """ Get ``validation fields`` for instance.\n if :attr:`token_type` is :attr:`CARD_TYPE` card return :meth:`get_card_required_fields`. else :attr:`token_type` is :attr:`BANK_ACCOUNT_TYPE`! ``fields`` is :meth:`get_bank_account_required_fields`.\n if ``bank account type`` is :attr:`.INDIVIDUAL_TYPE` return ``fields`` union :meth:`.get_individual_required_fields`.\n else ``bank account type`` is :attr:`.BUSINESS_TYPE` return ``fields`` union :meth:`.get_business_required_fields`. Returns: ``set`` of fields to be validated """ fields = set() if self.token_type == self.CARD_TYPE: return fields.union(self.get_card_required_fields()) elif self.token_type == self.BANK_ACCOUNT_TYPE: fields = fields.union(self.get_bank_account_required_fields()) if self.get_bank_account_type() == BankAccount.INDIVIDUAL_TYPE: return fields.union(BankAccount.get_individual_required_fields()) else: return fields.union(BankAccount.get_business_required_fields()) else: return fields def get_all_fields(self): """ Get ``all fields`` for instance. ``fields`` is :meth:`get_validation_fields` if :attr:`token_type` is :attr:`CARD_TYPE` return ``fields`` union :meth:`get_card_non_required_fields`. else :attr:`token_type` is :attr:`BANK_ACCOUNT_TYPE` return ``fields`` union :meth:`get_bank_account_non_required_fields`. Returns: ``set`` of all fields """ fields = self.get_validation_fields() if self.token_type == self.CARD_TYPE: return fields.union(self.get_card_non_required_fields()) elif self.token_type == self.BANK_ACCOUNT_TYPE: return fields.union(self.get_bank_account_non_required_fields()) else: return fields.union(self.get_non_required_fields()) def validate_custom_fields(self, **kwargs): """ Valida campos do token. Se for um token de cartão, valida o :attr:`.card_number`. Args: **kwargs: Returns: Lista com os erros ocorridos (se tiver algum!) """ errors = [] if self.token_type == self.CARD_TYPE: if not validate_card(self.card_number): errors.append( FieldError("card_number", "O número do cartão é inválido!") ) return errors @classmethod def get_non_required_fields(cls): fields = super().get_non_required_fields() return fields.union({"type", "used"}) @classmethod def get_card_non_required_fields(cls): """ Get ``set`` of ``non required fields`` for :attr:`CARD_TYPE`. Returns: ``set`` of fields """ fields = cls.get_non_required_fields() return fields.union({"card"}) @classmethod def get_card_required_fields(cls): """ Get ``set`` of ``required fields`` for :attr:`CARD_TYPE`. Returns: ``set`` of fields """ fields = cls.get_required_fields() return fields.union( Card.get_required_fields(), {"card_number", "security_code"} ) @classmethod def get_bank_account_non_required_fields(cls): """ Get ``set`` of ``non required fields`` for :attr:`BANK_ACCOUNT_TYPE` Returns: ``set`` of fields """ fields = cls.get_non_required_fields() return fields.union({"bank_account"}) @classmethod def get_bank_account_required_fields(cls): """ get ``set`` of ``required fields`` for :attr:`BANK_ACCOUNT_TYPE` Returns: ``set`` of fields """ fields = cls.get_required_fields() return fields.union({"account_number"})
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/token.py
token.py
from .base import ( BusinessOrIndividualModel, Person, SocialModel, Address, FinancialModel, ) class Seller(BusinessOrIndividualModel, Person, FinancialModel, SocialModel): """ Represent a seller. https://docs.zoop.co/reference#vendedor-1 The :attr:`RESOURCE` is used to identify this Model. Used to check against :attr:`.resource`! Attributes: decline_on_fail_security_code (bool): value of verification decline_on_fail_zipcode (bool): value of verification is_mobile (bool): value of verification mcc: ? merchant_code: ? show_profile_online: ? statement_descriptor: ? terminal_code: ? type (str): individual or business string taxpayer_id (str): Optional value website (str): Optional value business_address (:class:`.Address`): Optional value business_description (str): optional value business_email (str): optional value business_facebook (str): optional value business_name (str): optional value business_opening_date (str): optional value business_phone (str): optional value business_twitter (str): optional value business_website (str): optional value ein (str): optional value owner (:class:`.Person`): Optional value """ RESOURCE = "seller" def validate_custom_fields(self, **kwargs): """ Caso o vendedor seja :attr:`.BUSINESS_TYPE` precisamos validar os campos pelo :class:`.BusinessOrIndividualModel`. Caso o vendedor seja :attr:`.INDIVIDUAL_TYPE` precisamos validar os campos pelo :class:`.Person`. Args: **kwargs: """ if self.get_type() == self.INDIVIDUAL_TYPE: return Person.validate_custom_fields(self, **kwargs) return [] def init_custom_fields(self, business_address=None, owner=None, **kwargs): """ If ``dynamic type`` is :attr:`.BUSINESS_TYPE` then initialize :attr:`owner` with :class:`.Person` and initialize :attr:`business_address` with :class:`.Address`. Else ``dynamic type`` is :attr:`.INDIVIDUAL_TYPE`! Then initialize ``self`` with :class:`.Person`. Args: business_address (dict or :class:`.Address`): data owner (dict or :class:`.Person`): data **kwargs: kwargs """ self.set_identifier(**kwargs) if self.get_type() == self.BUSINESS_TYPE: setattr( self, "owner", Person.from_dict_or_instance(owner, allow_empty=True), ) setattr( self, "business_address", Address.from_dict_or_instance(business_address, allow_empty=True), ) else: Person.init_custom_fields(self, **kwargs) @classmethod def get_non_required_fields(cls): fields = set() return fields.union( BusinessOrIndividualModel.get_non_required_fields(), FinancialModel.get_non_required_fields(), { "decline_on_fail_security_code", "decline_on_fail_zipcode", "is_mobile", "show_profile_online", "statement_descriptor", "terminal_code", "type", "merchant_code", "mcc", }, ) @classmethod def get_required_fields(cls): fields = set() return fields.union( BusinessOrIndividualModel.get_required_fields(), FinancialModel.get_required_fields(), ) @classmethod def get_business_non_required_fields(cls): """ Get ``set`` of ``non required fields`` for :attr:`.BUSINESS_TYPE` Returns: ``set`` of fields """ fields = cls.get_non_required_fields() return fields.union( super().get_business_non_required_fields(), { "business_description", "business_website", "business_facebook", "business_twitter", "owner", }, ) @classmethod def get_business_required_fields(cls): """ Get ``set`` of ``required fields`` for :attr:`.BUSINESS_TYPE` Returns: ``set` `of fields """ fields = cls.get_required_fields() return fields.union( super().get_business_required_fields(), { "business_address", "business_email", "business_name", "business_opening_date", "business_phone", }, ) @classmethod def get_individual_non_required_fields(cls): """ Get ``set`` of ``non required fields`` for :attr:`.INDIVIDUAL_TYPE` Returns: ``set`` of fields """ fields = cls.get_non_required_fields() return fields.union( super().get_individual_non_required_fields(), SocialModel.get_non_required_fields(), Person.get_non_required_fields(), {"website"}, ) @classmethod def get_individual_required_fields(cls): """ Get ``set`` of ``required fields`` for :attr:`.INDIVIDUAL_TYPE` Returns: ``set`` of fields """ fields = cls.get_required_fields() return fields.union( super().get_individual_required_fields(), SocialModel.get_required_fields(), Person.get_required_fields(), ) @property def full_name(self): """ Get ``full name`` for the :class:`.Seller`. If ``dynamic type`` is :attr:`.BUSINESS_TYPE` it will have the owner attribute.\n Else `dynamic type`` is :attr:`.INDIVIDUAL_TYPE`. So we call the super() which will find the method on Person class.\n Returns: string with the ``full name`` """ owner = getattr(self, "owner", None) if owner is not None: return owner.full_name return super().full_name
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/models/seller.py
seller.py
from .base import BaseZoopWrapper from ..models.bank_account import BankAccount from ..models.token import Token class BankAccountWrapper(BaseZoopWrapper): """ Possui os métodos do resource :class:`.BankAccount` .. warning:: Não importe isso diretamente! Essa classe precisa de métodos presentes em outro wrapper """ def list_bank_accounts_by_seller(self, identifier): """ Lista todas as :class:`.BankAccount`'s. Returns: response with instances of :class:`.BankAccount` """ url = self._construct_url( action="sellers", identifier=identifier, subaction="bank_accounts" ) return self._get(url) def retrieve_bank_account(self, identifier): """ Retorna uma :class:`.BankAccount`. Args: identifier: uuid id da :class:`.BankAccount` Returns: response with instance of :class:`.BankAccount` """ url = self._construct_url(action="bank_accounts", identifier=identifier) return self._get(url) def __add_bank_account_token(self, token: Token): """ Adiciona um :class:`.Token` para uma :class:`.BankAccount`. Args: token: :class:`.Token` para :class:`.BankAccount`. Returns: response with instance of :class:`.Token` """ url = self._construct_url(action="bank_accounts", subaction="tokens") return self._post_instance(url, instance=token) def add_bank_account(self, data: dict): """ Adiciona uma :class:`.BankAccount`. Examples: >>> data = { 'account_number': 'foo', 'bank_code': 'foo', 'holder_name': 'foo', 'routing_number': 'foo', 'taxpayer_id' or 'ein': 'foo', 'type': 'foo' } Args: data: dict of data Returns: response with instance of :class:`.BankAccount` """ instance = Token.from_dict_or_instance(data) bank_account_type = instance.get_bank_account_type() if bank_account_type == BankAccount.INDIVIDUAL_TYPE: seller_response = self.search_individual_seller( # type: ignore instance.taxpayer_id ) elif bank_account_type == BankAccount.BUSINESS_TYPE: seller_response = self.search_business_seller(instance.ein) # type: ignore else: raise TypeError("this is not supposed to happen!") seller_data = seller_response.data token_response = self.__add_bank_account_token(instance) created_token = token_response.data data = {"customer": seller_data["id"], "token": created_token["id"]} url = self._construct_url(action="bank_accounts") return self._post(url, data=data) def remove_bank_account(self, identifier: str): """ Remove todas as :class:`.BankAccount` de um :class:`.Seller` usando o `identifier` deste. Args: identifier: uuid id Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="bank_accounts", identifier=identifier) return self._delete(url)
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/bank_account.py
bank_account.py
from typing import Union from .base import BaseZoopWrapper from ..models.transaction import Transaction from ..utils import convert_currency_float_value_to_cents from ..exceptions import ValidationError class TransactionWrapper(BaseZoopWrapper): """ Possui os métodos do resource :class:`.Transaction` """ def list_transactions(self): """ Lista todas as :class:`.Transaction`'s Returns: response """ url = self._construct_url(action="transactions") return self._get(url) def list_transactions_for_seller(self, identifier): """ Lista todas as :class:`.Transaction`'s de um :class:`.Seller` Args: identifier: uuid id do :class:`.Seller` offset: '' Returns: response """ url = self._construct_url( action="sellers", identifier=identifier, subaction="transactions", search="sort=time-descending", ) return self._get(url) def retrieve_transaction(self, identifier): """ Retorna uma :class:`.Transaction`. Args: identifier: uuid id da :class:`.Transaction` Returns: response """ url = self._construct_url(action="transactions", identifier=identifier) return self._get(url) def add_transaction(self, data: Union[dict, Transaction]): """ Adiciona uma :class:`.Transaction`. Examples: >>> data = { 'amount' : 'foo', 'currency' : 'BRL', 'customer': 'foo', 'description' : 'foo', 'on_behalf_of' : 'foo', 'payment_type' : 'foo', 'reference_id' : 'foo', 'payment_method' : { 'body_instructions' : instructions, 'expiration_date' : expiration_date, 'payment_limit_date' : payment_limit_date, 'billing_instructions' : { 'discount' : discount 'interest' : interest, 'late_fee' : late_fee, } } } >>> data = { 'amount': '1000', 'currency': 'BRL', 'customer': 'buyer_id', 'description': 'meu boleto gerado para teste', 'on_behalf_of': 'seller_id', 'payment_type': 'boleto', 'payment_method': { 'expiration_date': '2020-06-20', 'payment_limit_date': '2020-06-30', 'billing_instructions': { 'late_fee': { 'mode': 'FIXED', 'percentage': 30, 'start_date': '2020-06-20' }, 'interest': { 'mode': 'MONTHLY_PERCENTAGE', 'percentage': 30, 'start_date': '2020-06-20' }, 'discount': [{ 'amount': 300, 'limit_date': '2020-06-20' 'mode': 'FIXED', }] } } } Args: data: dict of data Returns: response with instance of Transaction """ instance = Transaction.from_dict_or_instance(data) url = self._construct_url(action="transactions") return self._post_instance(url, instance=instance) def _capture_or_void_transaction(self, identifier, sub_action, amount=None): """ Estorna ou captura uma :class:`.Transaction`. O :attr:`amount` é opcional, e deve ser um valor em centavos ou real. Caso ele não seja passado, o valor da transação é utilizado. Caso ele seja um valor menor do que a transação, é feita uma ação parcial no valor passado. .. warning:: o :attr:`amount` não pode ser maior do que o valor da quantia! Examples: >>> ZoopWrapper()._capture_or_void_transaction('1', 'void') >>> ZoopWrapper()._capture_or_void_transaction('1', 'void', '10.00') >>> ZoopWrapper()._capture_or_void_transaction('1', 'capture', '10,00') >>> ZoopWrapper()._capture_or_void_transaction('1', 'void', '1000') Args: identifier: uuid id da :class:`.Transaction` sub_action: string da ação a ser feita. 'void' ou 'capture' amount: quantia em centavos da ação a ser feita Returns: response """ SUB_ACTIONS = {"void", "capture"} if sub_action not in SUB_ACTIONS: raise ValidationError( self, f"Sub ação '{sub_action}' não identificada! " f"Deveria ser um dos valores {SUB_ACTIONS}", ) transaction_response = self.retrieve_transaction(identifier) transaction_data = transaction_response.data if amount is None: amount = transaction_data["amount"] else: amount = convert_currency_float_value_to_cents(amount) if amount > transaction_data["amount"]: raise ValidationError( self, f"A quantia {amount} é maior do que o " f"valor {transaction_data['amount']} da transação", ) data = { "amount": amount, "on_behalf_of": transaction_data["on_behalf_of"], } url = self._construct_url( action="transactions", identifier=identifier, subaction=sub_action ) return self._post(url, data=data) def cancel_transaction(self, identifier, amount=None): """ Estorna uma :class:`.Transaction`. O :attr:`amount` é opcional, e deve ser um valor em centavos ou real. Caso ele não seja passado, o valor da transação é utilizado. Caso ele seja um valor menor do que a transação, é feita uma ação parcial no valor passado. .. warning:: o :attr:`amount` não pode ser maior do que o valor da quantia! Examples: >>> ZoopWrapper().cancel_transaction('1', '10.00') >>> ZoopWrapper().cancel_transaction('1', '10,00') >>> ZoopWrapper().cancel_transaction('1', '1000') Args: identifier: uuid id da :class:`.Transaction` amount: quantia em centavos a ser estronada Returns: response """ return self._capture_or_void_transaction(identifier, "void", amount) def capture_transaction(self, identifier, amount=None): """ Captura uma :class:`.Transaction`. O :attr:`amount` é opcional, e deve ser um valor em centavos ou real. Caso ele não seja passado, o valor da transação é utilizado. Caso ele seja um valor menor do que a transação, é feita uma ação parcial no valor passado. .. warning:: o :attr:`amount` não pode ser maior do que o valor da quantia! Examples: >>> ZoopWrapper().capture_transaction('1', '10.00') >>> ZoopWrapper().capture_transaction('1', '10,00') >>> ZoopWrapper().capture_transaction('1', '1000') Args: identifier: uuid id da :class:`.Transaction` amount: quantia em centavos a ser capturada Returns: response """ return self._capture_or_void_transaction(identifier, "capture", amount)
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/transaction.py
transaction.py
from typing import Union from requests import HTTPError from .base import BaseZoopWrapper from ..exceptions import ValidationError, FieldError from ..models.token import Token class CardWrapper(BaseZoopWrapper): """ Possui os métodos do resource :class:`.Card` .. warning:: Não importe isso diretamente! Essa classe precisa de métodos presentes em outro wrapper """ def retrieve_card(self, identifier): """ retrieve card Args: identifier: uuid id Returns: response without instance """ url = self._construct_url(action="cards", identifier=identifier) return self._get(url) def __add_card_token(self, card_token: Token): """ Cria um :class:`.Token` do tipo :class:`.Card` Args: card_token: instância do :class:`.Token` Returns: :class:`.ZoopResponse` com instância do :class:`.Token` """ url = self._construct_url(action="cards", subaction="tokens") return self._post_instance(url, instance=card_token) def add_card(self, data: Union[dict, Token], customer_identifier: str): """ Adiciona um cartão de crédito utilizando um Token de cartão de crédito Examples: >>> data = { "card_number": "foo", "expiration_month": "foo", "expiration_year": "foo", "holder_name": "foo", "security_code": "foo" } Args: data: dicionário de dados customer_identifier: uuid do consumidor (:class:`.Buyer` ou :class:`.Seller`) # noqa Returns: :class:`.ZoopResponse` com instância do :class:`.Card` """ token = Token.from_dict_or_instance(data) try: self.retrieve_buyer(customer_identifier) # type: ignore except HTTPError: try: self.retrieve_seller(customer_identifier) # type: ignore except HTTPError: raise ValidationError( self, FieldError( "customer_identifier", "Não existe Seller ou Buyer para esse identificador", ), ) token_response = self.__add_card_token(token) created_token = token_response.data data = {"customer": customer_identifier, "token": created_token["id"]} url = self._construct_url(action="cards") return self._post(url, data=data)
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/card.py
card.py
import requests from ..constants import ZOOP_KEY, MARKETPLACE_ID from ..exceptions import ValidationError from ..models.base import ZoopObject from ..utils import get_logger from ..response import ZoopResponse logger = get_logger("wrapper") class RequestsWrapper: """ wrapper da lib requests Attributes: __base_url: Url base para construir os requests """ def __init__(self, base_url): self.__base_url = base_url @staticmethod def __process_response(response) -> ZoopResponse: """ Processa a resposta. Adiciona o :attr:`.data` carregado do :meth:`requests.Response.json`. Adiciona :attr:`.error` na resposta se tiver ocorrido erros Args: response (:class:`requests.Response`): resposta a ser processada Raises: HttpError: quando a resposta não foi ok (200 <= status <= 299)! Returns: 'objeto' (:class:`.ZoopResponse`) de resposta http """ response.data = response.json() if response.data.get("error"): error = response.data.get("error") response.reason = f"{error.get('message')}" if error.get("reasons"): response.reason += f" {error.get('reasons')}" if error.get("status_code"): response.status_code = error.get("status_code") response.raise_for_status() return response def _construct_url( self, action=None, identifier=None, subaction=None, search=None, sub_action_before_identifier=False, ): # noinspection PyProtectedMember """ Constrói a url para o request. Args: action: nome do resource identifier: identificador de detalhe (ID) search: query com url args para serem buscados sub_action_before_identifier: flag para inverter a posição do identifier e subaction subaction: subação do resource Examples: >>> rw = RequestsWrapper() >>> rw._construct_url(action='seller', identifier='1', subaction='bank_accounts', search='account_number=1') # noqa: 'rw.__base_url/seller/1/bank_accounts/search?account_number=1' Returns: url completa para o request """ url = f"{self.__base_url}/" if action: url += f"{action}/" if sub_action_before_identifier: if subaction: url += f"{subaction}/" if identifier: url += f"{identifier}/" else: if identifier: url += f"{identifier}/" if subaction: url += f"{subaction}/" if search: if isinstance(search, dict): url += "?" for key, value in search.items(): url += f"{key}={value}" else: url += f"?{search}" return url @property def _auth(self): """ Propriedade de autenticação Raises: NotImplementedError: É um método abstrato! """ raise NotImplementedError("Must implement auth function!") def _delete(self, url) -> ZoopResponse: """ http delete Args: url: url de requisição Returns: (:class:`.ZoopResponse`) """ response = requests.delete(url, auth=self._auth) # noinspection PyTypeChecker response = self.__process_response(response) return response def _get(self, url) -> ZoopResponse: """ http get Args: url: url de requisição Returns: (:class:`.ZoopResponse`) """ response = requests.get(url, auth=self._auth) # noinspection PyTypeChecker response = self.__process_response(response) return response def _post(self, url, data) -> ZoopResponse: """ http post Args: url: url de requisição data (dict): dados da requisição Returns: (:class:`.ZoopResponse`) """ response = requests.post(url, json=data, auth=self._auth) # noinspection PyTypeChecker response = self.__process_response(response) return response def _put(self, url, data) -> ZoopResponse: """ http put Args: url: url de requisição data (dict): dados da requisição Returns: (:class:`.ZoopResponse`) """ response = requests.put(url, json=data, auth=self._auth) # noinspection PyTypeChecker response = self.__process_response(response) return response class BaseZoopWrapper(RequestsWrapper): """ wrapper da Zoop API Attributes: __marketplace_id: marketplace id da zoop __key: chave de autenticação da zoop """ BASE_URL = "https://api.zoop.ws/v1/marketplaces/" def __init__(self, marketplace_id=None, key=None): if marketplace_id is None: marketplace_id = MARKETPLACE_ID if key is None: key = ZOOP_KEY self.__marketplace_id = marketplace_id self.__key = key super().__init__(base_url=f"{self.BASE_URL}{self.__marketplace_id}") @property def _auth(self): """ Propriedade de autenticação. :getter: Returns this direction's name Returns: tupla com :attr:`.ZoopKey` e "" """ return self.__key, "" def _post_instance(self, url, instance: ZoopObject): """ http post com instância de um :class:`.ZoopObject`. Args: url: url da requisição instance: instância a ser utilizada Raises: :class:`.ValidationError`: quando a instância passada não é um :class:`.ZoopObject`. # noqa Returns: (:class:`.ZoopResponse`) """ if not isinstance(instance, ZoopObject): raise ValidationError(self, "instance precisa ser um ZoopObject!") return self._post(url, data=instance.to_dict()) def _put_instance(self, url, instance: ZoopObject): """ http put com instância de um :class:`.ZoopObject`. Args: url: url da requisição instance: instância a ser utilizada Raises: :class:`.ValidationError`: quando a instância passada não é um :class:`.ZoopObject`. # noqa Returns: (:class:`.ZoopResponse`) """ if not isinstance(instance, ZoopObject): raise ValidationError(self, "instance precisa ser um ZoopObject!") return self._put(url, data=instance.to_dict())
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/base.py
base.py
from typing import Union from .base import BaseZoopWrapper from ..response import ZoopResponse from ..models.seller import Seller class SellerWrapper(BaseZoopWrapper): """ Possui os métodos do resource :class:`.Seller` """ def add_seller(self, data: Union[dict, Seller]) -> ZoopResponse: """ Adiciona um :class:`.Seller`. Examples: >>> data = { "birthdate": "1994-12-27", "email": "[email protected]", "first_name": "foo", "last_name": "foo", "phone_number": "+55 84 99999-9999", "taxpayer_id": "foo", "address": { "city": "Natal", "country_code": "BR" "line1": "foo", "line2": "123", "line3": "barbar", "neighborhood": "fooofoo", "postal_code": "59152250", "state": "RN", } } >>> data = { "business_email": "foo", "business_name": "foo", "business_opening_date": "foo", "business_phone": "foo", "business_website": "foo", "ein": "foo", "owner": { "birthdate": "foo", "email": "foo", "first_name": "foo", "last_name": "foo", "phone_number": "foo", "taxpayer_id": "foo", "address": { "city": "Natal", "country_code": "BR" "line1": "foo", "line2": "123", "line3": "barbar", "neighborhood": "fooofoo", "postal_code": "59152250", "state": "RN", } }, "business_address": { "city": "Natal", "country_code": "BR" "line1": "foo", "line2": "123", "line3": "barbar", "neighborhood": "fooofoo", "postal_code": "59152250", "state": "RN", } } Args: data: dados do :class:`.Seller` Returns: :class:`.ZoopResponse` """ instance = Seller.from_dict_or_instance(data) url = self._construct_url(action="sellers", subaction=instance.get_type_uri()) return self._post_instance(url, instance=instance) def list_sellers(self) -> ZoopResponse: """ lista :class:`.Seller`"s existentes na Zoop. Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="sellers") return self._get(url) def list_seller_bank_accounts(self, identifier: str) -> ZoopResponse: """ Lista :class:`.BankAccount`"s de algum :class:`.Seller` Args: identifier: id do :class:`.Seller` Returns: :class:`.ZoopResponse` """ url = self._construct_url( action="sellers", identifier=identifier, subaction="bank_accounts" ) return self._get(url) def retrieve_seller(self, identifier: str) -> ZoopResponse: """ Pega um :class:`.Seller` Args: identifier: id do :class:`.Seller` Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="sellers", identifier=identifier) return self._get(url) def remove_seller(self, identifier: str) -> ZoopResponse: """ Remove um :class:`.Seller`; Args: identifier: id do :class:`.Seller` Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="sellers", identifier=identifier) return self._delete(url) def __search_seller(self, **kwargs) -> ZoopResponse: """ Busca um :class:`.Seller`. Args: kwargs: dicionário de valores a serem buscados Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="sellers/search", search=kwargs) return self._get(url) def search_business_seller(self, identifier: str) -> ZoopResponse: """ search seller by CNPJ Args: identifier: ein (Employer Identification Number) is equivalent to CNPJ # noqa: Returns: response with instance of Seller """ return self.__search_seller(ein=identifier) def search_individual_seller(self, identifier: str) -> ZoopResponse: """ search seller by CPF Args: identifier: taxpayer_id is equivalent to CPF # noqa: Returns: response with instance of Seller """ return self.__search_seller(taxpayer_id=identifier) def update_seller(self, identifier: str, data: Union[dict, Seller]) -> ZoopResponse: """ Atualiza um :class:`.Seller`. Examples: >>> data = { "birthdate": "1994-12-27", "email": "[email protected]", "first_name": "foo", "last_name": "foo", "phone_number": "+55 84 99999-9999", "taxpayer_id": "foo", "address": { "city": "Natal", "country_code": "BR" "line1": "foo", "line2": "123", "line3": "barbar", "neighborhood": "fooofoo", "postal_code": "59152250", "state": "BR-RN", } } >>> data = { "business_email": "foo", "business_name": "foo", "business_opening_date": "foo", "business_phone": "foo", "business_website": "foo", "ein": "foo", "owner": { "birthdate": "foo", "email": "foo", "first_name": "foo", "last_name": "foo", "phone_number": "foo", "taxpayer_id": "foo", "address": { "city": "foo", "country_code": "foo" "line1": "foo", "line2": "foo", "line3": "foo", "neighborhood": "foo", "postal_code": "foo", "state": "foo", } } "business_address": { "city": "foo", "country_code": "foo" "line1": "foo", "line2": "foo", "line3": "foo", "neighborhood": "foo", "postal_code": "foo", "state": "foo", } } Args: identifier: id do :class:`.Seller` data: dados do :class:`.Seller` Returns: :class:`.ZoopResponse` """ instance = Seller.from_dict_or_instance(data) url = self._construct_url( action="sellers", identifier=identifier, subaction=instance.get_type_uri(), sub_action_before_identifier=True, ) return self._put_instance(url, instance=instance)
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/seller.py
seller.py
from typing import Union from .base import BaseZoopWrapper from ..models.buyer import Buyer from ..response import ZoopResponse class BuyerWrapper(BaseZoopWrapper): """ Possui os métodos do resource :class:`.Buyer` """ def add_buyer(self, data: Union[dict, Buyer]) -> ZoopResponse: """ Adiciona um :class:`.Buyer` Examples: >>> data = { "birthdate": 'foo', "email": "foo", "first_name": "foo", "last_name": "foo", "phone_number": "foo", "taxpayer_id": "foo", "address": { "city": "foo", "country_code": "foo" "line1": "foo", "line2": "foo", "line3": "foo", "neighborhood": "foo", "postal_code": "foo", "state": "foo", } } Args: data (dict ou :class:`.Buyer`): dados do :class:`.Buyer` Returns: :class:`.ZoopResponse` """ instance = Buyer.from_dict_or_instance(data) url = self._construct_url(action="buyers") return self._post_instance(url, instance=instance) def list_buyers(self) -> ZoopResponse: """ Lista todos os :class:`.Buyer`'s Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="buyers") return self._get(url) def retrieve_buyer(self, identifier: str) -> ZoopResponse: """ Pega um :class:`.Buyer` Args: identifier: uuid id Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="buyers", identifier=identifier) return self._get(url) def remove_buyer(self, identifier: str) -> ZoopResponse: """ Remove um :class:`.Buyer` Args: identifier: uuid id Returns: :class:`.ZoopResponse` """ url = self._construct_url(action="buyers", identifier=identifier) return self._delete(url) def search_buyer(self, identifier: str) -> ZoopResponse: """ Buscar um :class:`.Buyer` pelo CPF ou CNPJ .. note:: Sim, o atributo é o :attr:`.taxpayer_id` para os dois. Veja o código para entender. Args: identifier: CPF ou CNPJ Returns: :class:`.ZoopResponse` """ url = self._construct_url( action="buyers/search", search=f"taxpayer_id={identifier}" ) return self._get(url) def update_buyer(self, identifier: str, data: Union[dict, Buyer]) -> ZoopResponse: """ Atualiza um :class:`.Buyer`. Examples: >>> data = { "birthdate": "1994-12-27", "email": "[email protected]", "first_name": "foo", "last_name": "foo", "phone_number": "+55 84 99999-9999", "taxpayer_id": "foo", "address": { "city": "Natal", "country_code": "BR" "line1": "foo", "line2": "123", "line3": "barbar", "neighborhood": "fooofoo", "postal_code": "59152250", "state": "BR-RN", } } Args: identifier: id do :class:`.Buyer` data: dados do :class:`.Buyer` Returns: :class:`.ZoopResponse` """ instance = Buyer.from_dict_or_instance(data) url = self._construct_url(action="buyers", identifier=identifier) return self._put_instance(url, instance=instance)
zoop-wrapper
/zoop_wrapper-0.5.2-py3-none-any.whl/zoop_wrapper/wrapper/buyer.py
buyer.py
Zoop - ZooKeeper for Python! ============================ `Zookeeper`_ is a highly reliable distributed coordination service. *Zoop* gives you a Pythonic API for accessing ZooKeeper instances, as well as implementations of some common ZooKeeper patterns. This leaves you free to concentrate on whatever it was you were originally doing:: >>> zk = zoop.ZooKeeper('localhost:2181') >>> zk.connect() >>> q = zk.Queue('/howdy') >>> def gotit(data): ... print "Gotit got data:", data >>> q.watch(gotit) >>> q.put("frist!") Gotit got data: frist! .. _Zookeeper: http://zookeeper.apache.org/ .. image:: https://secure.travis-ci.org/davidmiller/zoop.png?branch=master :alt: Build Status :target: https://secure.travis-ci.org/davidmiller/zoop Check out: `Documentation`_ `Source`_ `Issues`_ .. _Documentation: http://www.deadpansincerity.com/docs/zoop/ .. _Source: https://github.com/davidmiller/zoop .. _Issues: https://github.com/davidmiller/zoop/issues
zoop
/zoop-0.1.1.tar.gz/zoop-0.1.1/README.rst
README.rst
import json import logging import requests import http.client import uuid import os import time import hashlib from pathlib import Path from tabulate import tabulate from urllib.parse import urlencode from IPython.core.display import display, HTML from IPython.display import IFrame, Image from oauthlib.oauth2 import BackendApplicationClient from requests_oauthlib import OAuth2Session class Indeks: """ Updatable and queryable index. """ API_ID = "prod_query_api" def __init__(self, project_id, account_id='ecommerce', access_creds=None, host='100.24.23.205', port=5000, tag=None): self._host = host self._port = port self._account_id = account_id self._project_id = project_id self._tag = tag self._query_selector = None self._query_config = None self._access_creds = access_creds if self._access_creds is None: creds_file = str(Path.home()) + "/.zooper_creds.json" if not os.path.exists(creds_file): raise ValueError('Unable to find credentials file in the credential store.') with open(creds_file, 'r') as f: self._access_creds = json.load(f) self._token = None def query(self, selector, tag=None, config=None): """ Query API. Perform SQL-like queries against the index. """ self._tag = tag if tag else self._tag self._query_selector = selector self._query_config = config return self def plot(self, chart_type="table", format="html", config={}): """ Plot API. Convert non-visual information into visual insights. """ self.ensure_token() # TODO: switch to TLS url = "http://{}:{}/api/query/query".format(self._host, self._port) params = { 'account_name': self._account_id, 'project_name': self._project_id, 'tag': self._tag, 'query_text': self._query_selector, 'plot_type': chart_type, 'plot_format': format, 'plot_config': urlencode(config) } headers = { 'authorization': "Bearer {}".format(self._token.get('access_token')) } response = requests.get(url, params=params, headers=headers) if response.status_code != requests.codes.ok: print("Error: {}".format(response.json().get("message"))) return result = response.json().get("result") if format == "html": if chart_type == "table": display(HTML(result)) else: chart_file = "chart_{}.html".format(uuid.uuid4()) os.makedirs("plots", exist_ok=True) with open("plots/" + chart_file, 'w+') as f: f.write(result) display(IFrame("plots/" + chart_file, '100%', '600px')) elif format == "json": return json.loads(result) else: return result return self def get_versions(self, tag=None, last_n=None): """ Get available index versions. """ raise ValueError("Not yet implemented") def __repr__(self): return '' def ensure_token(self): now = time.time() if self._token == None: # Try to load an existing token from cache try: with open(self.get_token_cache_file(), 'r') as f: self._token = json.loads(f.read()) except: pass if self._token and 'expires_at' in self._token and int(self._token.get('expires_at')) > int(now): # Token is still good return self._token = None client = BackendApplicationClient(client_id=self._access_creds.get('key')) oauth = OAuth2Session(client=client) token = oauth.fetch_token(token_url='https://dev-q0m6ske4.auth0.com/oauth/token', client_id=self._access_creds.get('key'), client_secret=self._access_creds.get('secret_key'), audience=self.API_ID) os.makedirs("{}/.zooper_auth".format(str(Path.home())), exist_ok=True) with open(self.get_token_cache_file(), 'w+') as f: f.write(json.dumps(token)) self._token = token def get_token_cache_file(self): creds = "{}{}".format(self._access_creds.get('key'), self._access_creds.get('secret_key')) creds_hash = hashlib.sha256(creds.encode('utf-8')).hexdigest() return "{}/.zooper_auth/{}_{}.json".format(str(Path.home()), self._account_id, creds_hash)
zooper-client
/zooper_client-0.1.7-py3-none-any.whl/zooper_client/indeks.py
indeks.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from keras.models import Sequential, Input, Model, K from keras.layers import Dropout, Flatten, Dense, Activation from keras_applications import _KERAS_BACKEND, _KERAS_LAYERS, _KERAS_MODELS, _KERAS_UTILS from keras_applications import get_submodules_from_kwargs from keras_applications import imagenet_utils from keras_applications.imagenet_utils import decode_predictions from keras_applications.imagenet_utils import _obtain_input_shape BASE_WEIGHT_URL = ('https://github.com/fchollet/deep-learning-models/' 'releases/download/v0.7/') from keras import backend from keras import layers from keras import models from keras import utils def preprocess_input(x, **kwargs): """ Preprocesses a numpy array encoding a batch of images. Arguments x: a 4D numpy array consists of RGB values within [0, 255]. Returns Preprocessed array. """ return imagenet_utils.preprocess_input(x, mode='tf', **kwargs) def conv2d_bn(x, filters, kernel_size, strides=1, padding='same', activation='relu', use_bias=False, training=True, name=None): """ Utility function to apply conv + BN. Arguments x: input tensor. filters: filters in `Conv2D`. kernel_size: kernel size as in `Conv2D`. strides: strides in `Conv2D`. padding: padding mode in `Conv2D`. activation: activation in `Conv2D`. use_bias: whether to use a bias in `Conv2D`. name: name of the ops; will become `name + '_ac'` for the activation and `name + '_bn'` for the batch norm layer. Returns Output tensor after applying `Conv2D` and `BatchNormalization`. """ x = layers.Conv2D(filters, kernel_size, strides=strides, padding=padding, use_bias=use_bias, name=name)(x) if not use_bias: bn_axis = 1 if backend.image_data_format() == 'channels_first' else 3 bn_name = None if name is None else name + '_bn' x = layers.BatchNormalization(axis=bn_axis, scale=False, name=bn_name)(x) if activation is not None: ac_name = None if name is None else name + '_ac' x = layers.Activation(activation, name=ac_name)(x) return x def inception_resnet_block(x, scale, block_type, block_idx, activation='relu'): """ Adds a Inception-ResNet block. This function builds 3 types of Inception-ResNet blocks mentioned in the paper, controlled by the `block_type` argument (which is the block name used in the official TF-slim implementation): - Inception-ResNet-A: `block_type='block35'` - Inception-ResNet-B: `block_type='block17'` - Inception-ResNet-C: `block_type='block8'` Arguments x: input tensor. scale: scaling factor to scale the residuals (i.e., the output of passing `x` through an inception module) before adding them to the shortcut branch. Let `r` be the output from the residual branch, the output of this block will be `x + scale * r`. block_type: `'block35'`, `'block17'` or `'block8'`, determines the network structure in the residual branch. block_idx: an `int` used for generating layer names. The Inception-ResNet blocks are repeated many times in this network. We use `block_idx` to identify each of the repetitions. For example, the first Inception-ResNet-A block will have `block_type='block35', block_idx=0`, and the layer names will have a common prefix `'block35_0'`. activation: activation function to use at the end of the block (see [activations](../activations.md)). When `activation=None`, no activation is applied (i.e., "linear" activation: `a(x) = x`). Returns Output tensor for the block. Raises ValueError: if `block_type` is not one of `'block35'`, `'block17'` or `'block8'`. """ if block_type == 'block35': branch_0 = conv2d_bn(x, 32, 1) branch_1 = conv2d_bn(x, 32, 1) branch_1 = conv2d_bn(branch_1, 32, 3) branch_2 = conv2d_bn(x, 32, 1) branch_2 = conv2d_bn(branch_2, 48, 3) branch_2 = conv2d_bn(branch_2, 64, 3) branches = [branch_0, branch_1, branch_2] elif block_type == 'block17': branch_0 = conv2d_bn(x, 192, 1) branch_1 = conv2d_bn(x, 128, 1) branch_1 = conv2d_bn(branch_1, 160, [1, 7]) branch_1 = conv2d_bn(branch_1, 192, [7, 1]) branches = [branch_0, branch_1] elif block_type == 'block8': branch_0 = conv2d_bn(x, 192, 1) branch_1 = conv2d_bn(x, 192, 1) branch_1 = conv2d_bn(branch_1, 224, [1, 3]) branch_1 = conv2d_bn(branch_1, 256, [3, 1]) branches = [branch_0, branch_1] else: raise ValueError('Unknown Inception-ResNet block type. ' 'Expects "block35", "block17" or "block8", ' 'but got: ' + str(block_type)) block_name = block_type + '_' + str(block_idx) channel_axis = 1 if backend.image_data_format() == 'channels_first' else 3 mixed = layers.Concatenate( axis=channel_axis, name=block_name + '_mixed')(branches) up = conv2d_bn(mixed, backend.int_shape(x)[channel_axis], 1, activation=None, use_bias=True, name=block_name + '_conv') x = layers.Lambda(lambda inputs, scale: inputs[0] + inputs[1] * scale, output_shape=backend.int_shape(x)[1:], arguments={'scale': scale}, name=block_name)([x, up]) if activation is not None: x = layers.Activation(activation, name=block_name + '_ac')(x) return x def ZooperBaseInceptionResNetV2(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): """ Instantiates the Inception-ResNet v2 architecture. Optionally loads weights pre-trained on ImageNet. Note that the data format convention used by the model is the one specified in your Keras config at `~/.keras/keras.json`. # Arguments include_top: whether to include the fully-connected layer at the top of the network. weights: one of `None` (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. input_shape: optional shape tuple, only to be specified if `include_top` is `False` (otherwise the input shape has to be `(299, 299, 3)` (with `'channels_last'` data format) or `(3, 299, 299)` (with `'channels_first'` data format). It should have exactly 3 inputs channels, and width and height should be no smaller than 75. E.g. `(150, 150, 3)` would be one valid value. pooling: Optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional block. - `'avg'` means that global average pooling will be applied to the output of the last convolutional block, and thus the output of the model will be a 2D tensor. - `'max'` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is `True`, and if no `weights` argument is specified. # Returns A Keras `Model` instance. # Raises ValueError: in case of invalid argument for `weights`, or invalid input shape. """ global backend, layers, models, keras_utils backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) if not (weights in {'imagenet', None} or os.path.exists(weights)): raise ValueError('The `weights` argument should be either ' '`None` (random initialization), `imagenet` ' '(pre-training on ImageNet), ' 'or the path to the weights file to be loaded.') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as `"imagenet"` with `include_top`' ' as true, `classes` should be 1000') # Determine proper input shape input_shape = _obtain_input_shape( input_shape, default_size=299, min_size=75, data_format=backend.image_data_format(), require_flatten=include_top, weights=weights) if input_tensor is None: img_input = layers.Input(shape=input_shape) else: if not backend.is_keras_tensor(input_tensor): img_input = layers.Input(tensor=input_tensor, shape=input_shape) else: img_input = input_tensor # Stem block: 35 x 35 x 192 x = conv2d_bn(img_input, 32, 3, strides=2, padding='valid') x = conv2d_bn(x, 32, 3, padding='valid') x = conv2d_bn(x, 64, 3) x = layers.MaxPooling2D(3, strides=2)(x) x = conv2d_bn(x, 80, 1, padding='valid') x = conv2d_bn(x, 192, 3, padding='valid') x = layers.MaxPooling2D(3, strides=2)(x) # Mixed 5b (Inception-A block): 35 x 35 x 320 branch_0 = conv2d_bn(x, 96, 1) branch_1 = conv2d_bn(x, 48, 1) branch_1 = conv2d_bn(branch_1, 64, 5) branch_2 = conv2d_bn(x, 64, 1) branch_2 = conv2d_bn(branch_2, 96, 3) branch_2 = conv2d_bn(branch_2, 96, 3) branch_pool = layers.AveragePooling2D(3, strides=1, padding='same')(x) branch_pool = conv2d_bn(branch_pool, 64, 1) branches = [branch_0, branch_1, branch_2, branch_pool] channel_axis = 1 if backend.image_data_format() == 'channels_first' else 3 x = layers.Concatenate(axis=channel_axis, name='mixed_5b')(branches) # 10x block35 (Inception-ResNet-A block): 35 x 35 x 320 for block_idx in range(1, 11): x = inception_resnet_block(x, scale=0.17, block_type='block35', block_idx=block_idx) # Mixed 6a (Reduction-A block): 17 x 17 x 1088 branch_0 = conv2d_bn(x, 384, 3, strides=2, padding='valid') branch_1 = conv2d_bn(x, 256, 1) branch_1 = conv2d_bn(branch_1, 256, 3) branch_1 = conv2d_bn(branch_1, 384, 3, strides=2, padding='valid') branch_pool = layers.MaxPooling2D(3, strides=2, padding='valid')(x) branches = [branch_0, branch_1, branch_pool] x = layers.Concatenate(axis=channel_axis, name='mixed_6a')(branches) # 20x block17 (Inception-ResNet-B block): 17 x 17 x 1088 for block_idx in range(1, 21): x = inception_resnet_block(x, scale=0.1, block_type='block17', block_idx=block_idx) # Mixed 7a (Reduction-B block): 8 x 8 x 2080 branch_0 = conv2d_bn(x, 256, 1) branch_0 = conv2d_bn(branch_0, 384, 3, strides=2, padding='valid') branch_1 = conv2d_bn(x, 256, 1) branch_1 = conv2d_bn(branch_1, 288, 3, strides=2, padding='valid') branch_2 = conv2d_bn(x, 256, 1) branch_2 = conv2d_bn(branch_2, 288, 3) branch_2 = conv2d_bn(branch_2, 320, 3, strides=2, padding='valid') branch_pool = layers.MaxPooling2D(3, strides=2, padding='valid')(x) branches = [branch_0, branch_1, branch_2, branch_pool] x = layers.Concatenate(axis=channel_axis, name='mixed_7a')(branches) # 10x block8 (Inception-ResNet-C block): 8 x 8 x 2080 for block_idx in range(1, 10): x = inception_resnet_block(x, scale=0.2, block_type='block8', block_idx=block_idx) x = inception_resnet_block(x, scale=1., activation=None, block_type='block8', block_idx=10) # Final convolution block: 8 x 8 x 1536 x = conv2d_bn(x, 1536, 1, name='conv_7b') if include_top: # Classification block x = layers.GlobalAveragePooling2D(name='avg_pool')(x) x = layers.Dense(classes, activation='softmax', name='predictions')(x) else: if pooling == 'avg': x = layers.GlobalAveragePooling2D()(x) elif pooling == 'max': x = layers.GlobalMaxPooling2D()(x) # Ensure that the model takes into account # any potential predecessors of `input_tensor`. if input_tensor is not None: inputs = keras_utils.get_source_inputs(input_tensor) else: inputs = img_input # Create model. model = models.Model(inputs, x, name='inception_resnet_v2') # Load weights. if weights == 'imagenet': if include_top: fname = 'inception_resnet_v2_weights_tf_dim_ordering_tf_kernels.h5' weights_path = keras_utils.get_file( fname, BASE_WEIGHT_URL + fname, cache_subdir='models', file_hash='e693bd0210a403b3192acc6073ad2e96') else: fname = ('inception_resnet_v2_weights_' 'tf_dim_ordering_tf_kernels_notop.h5') weights_path = keras_utils.get_file( fname, BASE_WEIGHT_URL + fname, cache_subdir='models', file_hash='d19885ff4a710c122648d3b5c3b684e4') model.load_weights(weights_path) elif weights is not None: model.load_weights(weights) return model def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper @keras_modules_injection def ZooperInceptionResNetV2(*args, **kwargs): """ A custom InceptionResNetV2 model built on top of Keras InceptionResNetV2. """ return ZooperBaseInceptionResNetV2(*args, **kwargs)
zooper-common
/zooper_common-0.1.52-py3-none-any.whl/zooper_common/ai.py
ai.py
import json import os import time import boto3 import tensorflow as tf import math import numpy as np from datetime import datetime from os.path import join, basename, splitext, isdir, exists from functools import partial from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img from keras.models import Sequential, Input, Model, K from keras.layers import Dropout, Flatten, Dense, Activation, BatchNormalization from keras import applications, layers from keras import optimizers from keras import regularizers from zooper_common.ai import ZooperInceptionResNetV2 from zooper_common.s3 import S3Utility s3 = S3Utility() class ZooperVisionFineTuner: """ Zooper's vision fine-tuner class. Quickstart: =========== z = ZooperVisionFineTuner( base_model_name='ZooperInceptionResNetV2', dense_layer_size=512, num_classes=8, storage_bucket='premium-handbags', train_data_dir='/tmp/premium_handbags/train', validation_data_dir='/tmp/premium_handbags/validation' ) # Extract and save bottleneck features from the base model. z.extract_bottlebeck_features( bottleneck_feature_file_prefix='bottleneck_features') # Pretrain the top model. z.train_top_model( output_weights_path='201910261243.h5', epochs=14) """ train_data_dir = '/tmp/train' validation_data_dir = '/tmp/validation' nb_train_samples = 25840 nb_validation_samples = 6464 batch_size = 16 use_tensorboard = False storage_bucket = 'zooper_models' def __init__(self, base_model_name='ZooperInceptionResNetV2', storage_bucket=None, input_image_width=224, input_image_height=224, dense_layer_size=512, num_classes=8, train_data_dir='/tmp/train', validation_data_dir='/tmp/validation'): # Assuming the TF layout. self.base_model_name = base_model_name self.dense_layer_size = dense_layer_size self.num_classes = num_classes self.input_image_width = input_image_width self.input_image_height = input_image_height self.input_shape = (input_image_width, input_image_height, 3) self.storage_bucket = storage_bucket self.train_data_dir = train_data_dir self.validation_data_dir = validation_data_dir @staticmethod def get_data_statistics(root_dir): image_dirs = [d for d in os.listdir(root_dir) if isdir(join(root_dir, d))] print(image_dirs) cat = {} total_count = 0 for d in image_dirs: list_files = os.listdir(join(root_dir, d)) count = len(list_files) cat[d] = count total_count += count print(json.dumps(cat, indent=2)) print('total: {}'.format(total_count)) return cat def get_labels(self, data_dir, stats=None): """ Get labels from the dataset, by scanning the data_dir and doing one-hot-encoding. """ datagen = ImageDataGenerator(rescale=1. / 255) generator = datagen.flow_from_directory( data_dir, target_size=(self.input_image_height, self.input_image_width), batch_size=self.batch_size, class_mode='categorical', shuffle=False) print(generator.class_indices.keys()) label_index = [] prev_index = 0 if stats is not None: for k, v in generator.class_indices.items(): label_index.append(prev_index) prev_index += stats[k] print('{}:{}'.format(k, stats[k])) number_of_examples = len(generator.filenames) number_of_generator_calls = math.ceil(number_of_examples / (1.0 * self.batch_size)) # 1.0 above is to skip integer division labels = [] for i in range(0,int(number_of_generator_calls)): labels.extend(np.array(generator[i][1])) for idx in label_index: print(labels[idx]) return np.array(labels) def extract_bottlebeck_features(self, bottleneck_feature_file_prefix='bottleneck_features'): """ Extract and save the base_model output as bottleneck features. The output bottleneck features can be used to pretrained a Dense layer. At the end of the process the features are written to: - `bottleneck_feature_file_prefix` + _train.npy - `bottleneck_feature_file_prefix` + _validation.npy """ print('Extracting bottleneck features of {} model ...'.format(self.base_model_name)) train_datagen = ImageDataGenerator( rescale=1. / 255, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, # rotation_range=30, # width_shift_range=0.2, # height_shift_range=0.2, # fill_mode='nearest', ) test_datagen = ImageDataGenerator(rescale=1. / 255) # Build the tower. model = self.create_base_model() generator = train_datagen.flow_from_directory( self.train_data_dir, target_size=(self.input_image_width, self.input_image_height), batch_size=self.batch_size, class_mode='categorical', shuffle=False, ) bottleneck_features_train = model.predict_generator( generator, self.nb_train_samples // self.batch_size) np.save(open(bottleneck_feature_file_prefix + '_train.npy', 'wb'), bottleneck_features_train) generator = test_datagen.flow_from_directory( self.validation_data_dir, target_size=(self.input_image_width, self.input_image_height), batch_size=self.batch_size, class_mode='categorical', shuffle=False, ) bottleneck_features_validation = model.predict_generator( generator, self.nb_validation_samples // self.batch_size) np.save(open(bottleneck_feature_file_prefix + '_validation.npy', 'wb'), bottleneck_features_validation) if self.storage_bucket is not None: s3.upload_folder( bottleneck_feature_file_prefix + '_train.npy', bucket=self.storage_bucket, prefix=self.base_model_name) s3.upload_folder( bottleneck_feature_file_prefix + '_validation.npy', bucket=self.storage_bucket, prefix=self.base_model_name) def train_top_model(self, output_weights_path, epochs=10, bottleneck_feature_file_prefix='bottleneck_features'): """ Training only the top Dense layers using the bottleneck features. """ train_data = np.load(open(bottleneck_feature_file_prefix + '_train.npy', 'rb')) print(train_data.shape) stats = ZooperVisionFineTuner.get_data_statistics(self.train_data_dir) train_labels = self.get_labels(self.train_data_dir, stats)[:self.nb_train_samples] print(train_labels.shape) validation_data = np.load(open(bottleneck_feature_file_prefix + '_validation.npy', 'rb')) print(validation_data.shape) stats = ZooperVisionFineTuner.get_data_statistics(self.validation_data_dir) validation_labels = self.get_labels(self.validation_data_dir, stats)[:self.nb_validation_samples] print(validation_labels.shape) model = self.create_top_model(train_data.shape[1:]) # Compile the model with a SGD/momentum optimizer # and a very slow learning rate. sgd = optimizers.SGD(lr=1e-4, momentum=0.9) model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) try: timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") if self.use_tensorboard: logdir = "logs/scalars/" + timestamp tensorboard_callback = keras.callbacks.TensorBoard( log_dir=logdir) training_history = model.fit( train_data, train_labels, epochs=epochs, batch_size=self.batch_size, validation_data=(validation_data, validation_labels), callbacks=[tensorboard_callback]) print("Average test loss: ", np.average( training_history.history['loss'])) else: model.fit( train_data, train_labels, epochs=epochs, batch_size=self.batch_size, validation_data=(validation_data, validation_labels)) model.save_weights(output_weights_path) if self.storage_bucket is not None: print('Saving output weights {} ...'.format(output_weights_path)) s3.upload_folder(output_weights_path, bucket=self.storage_bucket, prefix=self.base_model_name) print('Completed training.') except Exception as e: print(e) def create_top_model(self, input_shape, enable_regularizer=True): """ Create dense (top) layer as Sequential. """ base_model = Sequential() base_model.add(Flatten(input_shape=input_shape)) if enable_regularizer: base_model.add(Dense(self.dense_layer_size, name='fc_{}'.format(self.dense_layer_size), activation='relu', kernel_regularizer=regularizers.l2(0.005), bias_regularizer=regularizers.l2(0.005))) base_model.add(Dropout(0.5)) base_model.add(Dense(self.num_classes, name='fc_{}'.format(self.num_classes), activation='softmax', kernel_regularizer=regularizers.l2(0.005), bias_regularizer=regularizers.l2(0.005))) else: base_model.add(Dense(self.dense_layer_size, name='fc_{}'.format(self.dense_layer_size), activation='relu')) base_model.add(Dropout(0.5)) base_model.add(Dense(self.num_classes, activation='softmax', name='fc_{}'.format(self.num_classes))) return base_model def create_base_model(self, weights='imagenet'): """ Create base tower model. """ print('Creating `{}` model ...'.format(self.base_model_name)) if self.base_model_name == 'ZooperInceptionResNetV2': base_model = ZooperInceptionResNetV2( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'InceptionResNetV2': base_model = applications.inception_resnet_v2.InceptionResNetV2( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'VGG16': base_model = applications.VGG16( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'VGG19': base_model = applications.VGG19( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'ResNet50': base_model = applications.ResNet50( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'InceptionV3': base_model = applications.inception_v3.InceptionV3( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'DenseNet169': base_model = applications.densenet.DenseNet169( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'MobileNet': base_model = applications.mobilenet.MobileNet( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) elif self.base_model_name == 'MobileNetV2': base_model = applications.mobilenet_v2.MobileNetV2( include_top=False, weights=weights, input_shape=self.input_shape, classes=self.num_classes, ) else: raise ValueError('Not supported model {}.'.format(self.base_model_name)) return base_model def create_full_model(self, weights_config={ "top_layer": False, "path": None, }, mode='training', train_last_k_layers=3): """ Create full tower (base + dense). """ def _is_batch_normalization(layer): return (_layer.name.endswith('_bn') or _layer.name.startswith('batch_normalization') or isinstance(_layer, BatchNormalization)) st = time.time() input_tensor = Input(shape=self.input_shape) if mode == 'inference': print('[Inference mode]') K.set_learning_phase(0) # For inference, do not load 'imagenet' weights by default. base_model = self.create_base_model() elif mode == 'training': print('[Training mode]') base_model = self.create_base_model() else: raise ValueError('Invalid mode: {}'.format(mode)) # Build a classifier model to put on top of the convolutional model. # Note that it is necessary to start with a fully-trained # classifier, including the top classifier, # in order to successfully do fine-tuning. top_model = self.create_top_model(base_model.output_shape[1:]) pretrained_weights_path = weights_config['path'] if (weights_config['top_layer'] and pretrained_weights_path is not None): print('Loading top-level weights: {} ...'.format( pretrained_weights_path)) top_model.load_weights(pretrained_weights_path) model = Model(inputs=base_model.input, outputs=top_model(base_model.output)) model.base_model = base_model model.top_model = top_model if not weights_config['top_layer'] and pretrained_weights_path is not None: print('Loading full weights: {} ...'.format( pretrained_weights_path)) model.load_weights(pretrained_weights_path) print('Model loaded.') print('|Layers|:{}'.format(len(model.layers))) # Set the base_model layers (up to the last 12th) # to non-trainable (weights will not be updated) if mode == 'training': print('Fine tuning last-{} layers ...'.format(train_last_k_layers)) _bottom_layers = model.layers[:-train_last_k_layers] _top_layers = model.layers[-train_last_k_layers:] elif mode == 'inference': _bottom_layers = model.layers _top_layers = [] for _layer in _bottom_layers: _layer.trainable = False # This is important for model with BN to work with fine-tuning. # References: # - https://github.com/keras-team/keras/pull/9965#issuecomment-382801648 # - https://github.com/keras-team/keras/issues/9214#issuecomment-422490253 if (_is_batch_normalization(_layer)): print('Freezing BN layers ... {}'.format(_layer.name)) _layer.call = lambda inputs, _: BatchNormalization.call( inputs=inputs, training=False) for _layer in _top_layers: _layer.trainable = True if (_is_batch_normalization(_layer)): print('Unfreezing BN layers ... {}'.format(_layer.name)) _layer = BatchNormalization # Compile the model with a SGD/momentum optimizer # and a very slow learning rate. sgd = optimizers.SGD(lr=1e-4, momentum=0.9) model.compile( loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) print('{} model is loaded. Elapsed time: {} secs'.format( self.base_model_name, time.time() - st)) return model def tune(self, weights_config, epochs=10, train_last_k_layers=3, steps_per_epoch=10000, validation_steps=1000): """ Fine tuning a pre-trained model. """ timestamp = datetime.now().strftime("%Y%m%d-%H") pretrained_weights_path = weights_config['path'] model = self.create_full_model( weights_config=weights_config, train_last_k_layers=train_last_k_layers, mode='training') filename, ext = os.path.splitext(pretrained_weights_path) output_weights=filename + '_finetuned_lastklayer{}_ep{}_{}{}'.format( train_last_k_layers, epochs, timestamp, ext) # Prepare data augmentation configuration # TODO(zooper): make this configurable. train_datagen = ImageDataGenerator( rotation_range=15, width_shift_range=0.2, height_shift_range=0.2, rescale=1. / 255, shear_range=0.2, zoom_range=[0.5,1.0], brightness_range=[0.5, 1.5], horizontal_flip=True) test_datagen = ImageDataGenerator(rescale=1. / 255) print('Initializing train_generator ...') train_generator = train_datagen.flow_from_directory( self.train_data_dir, target_size=(self.input_image_height, self.input_image_width), batch_size=self.batch_size, class_mode='categorical', shuffle=False) print('Initializing validation_generator ...') validation_generator = test_datagen.flow_from_directory( self.validation_data_dir, target_size=(self.input_image_height, self.input_image_width), batch_size=self.batch_size, class_mode='categorical', shuffle=False) model.summary() model.top_model.summary() # Fine-tune the model. model.fit_generator( train_generator, steps_per_epoch=steps_per_epoch, #nb_train_samples // batch_size, epochs=epochs, validation_data=validation_generator, validation_steps=validation_steps) #nb_validation_samples // batch_size) model.save_weights(output_weights) if self.storage_bucket is not None: s3.upload_folder(output_weights, bucket=self.storage_bucket, prefix=self.base_model_name) print('Fine tuning is completed.')
zooper-common
/zooper_common-0.1.52-py3-none-any.whl/zooper_common/ai_finetuner.py
ai_finetuner.py
from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import object import errno import os import boto3 class S3Utility(object): """ S3Utility class. Usage: s3 = S3Utility() s3.upload_folder(file_name, bucket='bucket_name', prefix='images') s3.download_file(bucket_name, key, target) """ ACCESS_KEY = 'AKIA3ZXWQQGBBEMIWCGP' SECRET_KEY = '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' def __init__(self): self.s3_client = boto3.client( 's3', aws_access_key_id=self.ACCESS_KEY, aws_secret_access_key=self.SECRET_KEY) session = boto3.Session( aws_access_key_id=self.ACCESS_KEY, aws_secret_access_key=self.SECRET_KEY ) self.s3_session_resource = session.resource('s3') self.s3_resource = boto3.resource( 's3', aws_access_key_id=self.ACCESS_KEY, aws_secret_access_key=self.SECRET_KEY ) def upload_folder(self, path, bucket=None, prefix=None): """ Upload a folder or file to target bucket/prefix """ files = [] key_suffix = None if os.path.isdir(path): for (dirpath, dirnames, filenames) in os.walk(path): for name in filenames: local_path = os.path.join(dirpath, name) s3_relative_prefix = '' if path == dirpath else os.path.relpath(dirpath, start=path) + '/' s3_key = '{}/{}{}'.format(prefix, s3_relative_prefix, name) files.append((local_path, s3_key)) else: _, name = os.path.split(path) if prefix is None or prefix is '': s3_key = '{}'.format(name) else: s3_key = '{}/{}'.format(prefix, name) files.append((path, s3_key)) key_suffix = name for local_path, s3_key in files: self.s3_session_resource.Object(bucket, s3_key).upload_file(local_path) s3_uri = ('s3://{}'.format(bucket) if prefix is None or prefix is '' else 's3://{}/{}'.format(bucket, prefix)) # If a specific file was used as input (instead of a directory), we return the full S3 key # of the uploaded object. This prevents unintentionally using other files under the same prefix # during training. if key_suffix: s3_uri = '{}/{}'.format(s3_uri, key_suffix) return s3_uri def download_file(self, bucket_name, key, target): """ Download a file. Usage: s3.download_file('mybucket', 'images', '/tmp/file.jpg') """ print('Downloading {}/{} to {} ...'.format(bucket_name, key, target)) self.s3_resource.Bucket(bucket_name).download_file(key, target) def download_folder(self, bucket_name, prefix, target): """ Download folder (bucket_name/prefix) to a target on local storage """ bucket = self.s3_session_resource.Bucket(bucket_name) prefix = prefix.lstrip('/') print('Downloading bucket {}/{} to {} ...'.format(bucket_name, prefix, target)) # there is a chance that the prefix points to a file and not a 'directory' if that is the case # we should just download it. objects = list(bucket.objects.filter(Prefix=prefix)) if len(objects) > 0 and objects[0].key == prefix and prefix[-1] != '/': self.s3_session_resource.Object(bucket_name, prefix).download_file(os.path.join(target, os.path.basename(prefix))) return # the prefix points to an s3 'directory' download the whole thing for obj_sum in bucket.objects.filter(Prefix=prefix): # if obj_sum is a folder object skip it. if obj_sum.key != '' and obj_sum.key[-1] == '/': continue obj = self.s3_session_resource.Object(obj_sum.bucket_name, obj_sum.key) s3_relative_path = obj_sum.key[len(prefix):].lstrip('/') file_path = os.path.join(target, s3_relative_path) try: os.makedirs(os.path.dirname(file_path)) except OSError as exc: # EXIST means the folder already exists, this is safe to skip # anything else will be raised. if exc.errno != errno.EEXIST: raise pass if not os.path.exists(file_path): print('Downloading {} ...'.format(file_path)) obj.download_file(file_path) else: print('Skip file {}, already exists'.format(file_path)) def get_presigned_url(self, bucket, key, expires_seconds=3600): # Generate the URL to get 'key-name' from 'bucket-name' url = self.s3_client.generate_presigned_url( ClientMethod='get_object', Params={ 'Bucket': bucket, 'Key': key }, ExpiresIn=expires_seconds ) return url def sync(self, source, dest): command = "aws s3 sync {} {}".format(source, dest) os.system(command) def key_exist(self, bucket_name, key): bucket = self.s3_resource.Bucket(bucket_name) try: iter(bucket.objects.filter(Prefix=key)).next() return True except StopIteration: return False
zooper-common
/zooper_common-0.1.52-py3-none-any.whl/zooper_common/s3.py
s3.py
from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver import Chrome from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from webdriver_manager.chrome import ChromeDriverManager import time from typing import Optional from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import TimeoutException class Scraper: ''' This class is a scraper that can be used for browsing different websites Parameters ---------- url: str The link that we want to visit Attribute --------- driver: THis is the webdriver object ''' def __init__(self, url): self.driver = Chrome(ChromeDriverManager().install()) self.driver.get(url) def accept_cookies(self, xpath: str, iframe: Optional[str] = None): ''' This method looks for and click on the accept ccokies button Parameters ---------- xpath: str The xpath of the accept cookies button iframe: Optional[str] The id of the iframe in case there is one in front of the accept cookies button ''' try: time.sleep(2) self.driver.switch_to.frame(iframe) cookies_button = ( WebDriverWait(self.driver, 10) .until(EC.presence_of_element_located(( By.XPATH, xpath)) ) ) print(cookies_button) self.driver.find_element(By.XPATH, xpath).click() except TimeoutException: print('No cookies found') def look_for_search_bar(self, xpath: str): ''' Looks for the search bar given the xpat Parameters ---------- xpath: str The xpath of the search bar Returns ------- Optional[webdriver.element] ''' try: time.sleep(1) search_bar = ( WebDriverWait(self.driver, 5) .until(EC.presence_of_element_located( (By.XPATH, xpath) ) ) ) search_bar.click() return search_bar except TimeoutException: print('No search bar found') return None def send_keys_to_search_bar(self, text: str, xpath: str) -> None: ''' Write something on a search bar Parameters ---------- text: str The text we want to pass to the search bar xpath: str xpath of the search bar ''' search_bar = self.look_for_search_bar(xpath) if search_bar: search_bar.send_keys(text) else: raise Exception('No search bar found') def find_container(self, xpath: str) -> None: ''' Finds the container of items in a website ''' return self.driver.find_element(By.XPATH, xpath) class ScraperZoopla(Scraper): ''' Scraper that works only for the zoopla website It will extract information about the price, n_bedrooms, n_bathrooms, and sqft of the properties in a certain location Parameters ---------- location: str The location to look properties in Attributes ---------- prop_dict: dict Contains price, bedrooms, bathrooms, and sqft of each property ''' def __init__(self, location: str): super().__init__('https://www.zoopla.co.uk') self.prop_dict = { 'Price': [], 'Bedrooms': [], 'Bathrooms': [], 'Sqft': [], } self.location = location def scrape_properties(self): self.accept_cookies(xpath='//button[@id="save"]', iframe='gdpr-consent-notice') self.send_keys_to_search_bar( text=self.location, xpath='//input[@id="header-location"]') time.sleep(1) list_locations = self.driver.find_element(By.XPATH, '//ul[@data-testid="autosuggest-list"]') time.sleep(1) list_locations.find_element(By.XPATH, './li').click() time.sleep(1) self.driver.find_element(By.XPATH, '//button[@data-testid="search-button"]').click() # container = self.find_container(xpath='//div[@class="css-1anhqz4-ListingsContainer earci3d2"]') # container.find_elements(By.XPATH) if __name__ == '__main__': bot = ScraperZoopla('London') bot.scrape_properties()
zoopla-scraper-test
/zoopla_scraper_test-0.0.2.tar.gz/zoopla_scraper_test-0.0.2/scraper_package/scraper.py
scraper.py
zoopla ====== |Build Status| |codecov| A python wrapper for the Zoopla API. Zoopla has launched an open API to allow developers to create applications using hyper local data on 27m homes, over 1m sale and rental listings, and 15 years of sold price data. `Register`_ for a user account and apply for an instant API key. Browse the `documentation`_ to understand how to use the API and the specifications for the individual APIs. Installation ------------ :: $ pip install zoopla Tests ----- Install the dev requirements: .. code:: sh $ pip install -r requirements.txt | Run py.test with your developer key (otherwise you won’t be able to hit the live | API upon which these tests depend). .. code:: sh $ py.test --api_key=<your-api-key> tests/ # pytest under Python 3+ Examples -------- Retrieve property listings for a given area. .. code:: python from zoopla import Zoopla zoopla = Zoopla(api_key='your_api_key') search = zoopla.property_listings({ 'maximum_beds': 2, 'page_size': 100, 'listing_status': 'sale', 'area': 'Blackley, Greater Manchester' }) for result in search.listing: print(result.price) print(result.description) print(result.image_url) Retrieve a list of house price estimates for the requested area. .. code:: python zed_indices = zoopla.area_zed_indices({ 'area': 'Blackley, Greater Manchester', 'output_type': 'area', 'area_type': 'streets', 'order': 'ascending', 'page_number': 1, 'page_size': 10 }) print(zed_indices.town) print(zed_indices.results_url) Generate a graph of values for an outcode over the previous 3 months and return the URL to the generated image. .. code:: python area_graphs = zoopla.area_value_graphs({'area': 'SW11'}) print(area_graphs.average_values_graph_url) print(area_graphs.value_trend_graph_url) Retrieve the average sale price for houses in a particular area. .. code:: python average = zoopla.average_area_sold_price({'area': 'SW11'}) print(average.average_sold_price_7year) print(average.average_sold_price_5year) Submit a viewing request to an agent regarding a particular listing. .. code:: python session_id = zoopla.get_session_id() arrange_viewing = zoopla.arrange_viewing({ 'session_id': session_id, 'listing_id': 44863256, 'name': 'Tester', 'email': "[email protected]", 'phone': '01010101', 'phone_type': 'work', 'best_time_to_call': 'anytime', 'message': 'Hi, I seen your listing on zoopla.co.uk and I would love to arrange a viewing!' }) Contributing ------------ - Fork the project and clone locally. - Create a new branch for what you're going to work on. - Push to your origin repository. - Include tests and update documentation if necessary. - Create a new pull request in GitHub. .. _Register: http://developer.zoopla.com/member/register/ .. _documentation: http://developer.zoopla.com/docs/ .. |Build Status| image:: https://travis-ci.org/AnthonyBloomer/zoopla.svg?branch=master :target: https://travis-ci.org/AnthonyBloomer/zoopla .. |codecov| image:: https://codecov.io/gh/AnthonyBloomer/zoopla/branch/master/graph/badge.svg :target: https://codecov.io/gh/AnthonyBloomer/zoopla
zoopla
/zoopla-1.0.0.tar.gz/zoopla-1.0.0/README.rst
README.rst
from zoopt.dimension import Dimension import numpy as np class SetCover: """ set cover problem for discrete optimization this problem has some extra initialization tasks, thus we define this problem as a class """ def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1]) def fx(self, solution): """ Objective function. :param solution: a Solution object :return: the value of f(x) """ x = solution.get_x() allweight = 0 countw = 0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full = True for i in range(len(dims)): if dims[i] is False: full = False if full is False: countw += allweight return countw @property def dim(self): """ Dimension of set cover problem. :return: Dimension instance """ dim_size = 20 dim_regs = [[0, 1]] * dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) def sphere(solution): """ Sphere function for continuous optimization """ x = solution.get_x() value = sum([(i-0.2)*(i-0.2) for i in x]) return value def sphere_mixed(solution): """ Sphere function for mixed optimization """ x = solution.get_x() value = sum([i*i for i in x]) return value def sphere_discrete_order(solution): """ Sphere function for integer continuous optimization """ x = solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return value def ackley(solution): """ Ackley function for continuous optimization """ x = solution.get_x() bias = 0.2 ave_seq = sum([(i - bias) * (i - bias) for i in x]) / len(x) ave_cos = sum([np.cos(2.0*np.pi*(i-bias)) for i in x]) / len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value def ackley_noise_creator(mu, sigma): """ Ackley function under noise """ return lambda solution: ackley(solution) + np.random.normal(mu, sigma, 1)
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/simple_functions/simple_function.py
simple_function.py
import numpy as np from zoopt import Objective, Parameter, ExpOpt, Dimension, Opt from simple_function import ackley, sphere class StoppingCriterion: """ This class defines a stopping criterion, which is used as a parameter of the class Parameter, and should implement check(self, optcontent) member function. """ def __init__(self): self.__best_result = 0 self.__count = 0 self.__total_count = 0 self.__count_limit = 100 def check(self, optcontent): """ This function is invoked at each iteration of the optimization. Optimization will stop early when this function returns True, otherwise, it is not affected. In this example, optimization will be stopped if the best result remains unchanged for 100 iterations. :param optcontent: an instance of the class RacosCommon. Several functions can be invoked to get the contexts of the optimization, which are listed as follows, optcontent.get_best_solution(): get the current optimal solution optcontent.get_data(): get all the solutions contained in the current solution pool optcontent.get_positive_data(): get positive solutions contained in the current solution pool optcontent.get_negative_data(): get negative solutions contained in the current solution pool :return: bool object. """ self.__total_count += 1 content_best_value = optcontent.get_best_solution().get_value() if content_best_value == self.__best_result: self.__count += 1 else: self.__best_result = content_best_value self.__count = 0 if self.__count >= self.__count_limit: print("stopping criterion holds, total_count: %d" % self.__total_count) return True else: return False if __name__ == '__main__': dim_size = 100 # form up the objective function objective = Objective(sphere, Dimension(dim_size, [[-1, 1]] * dim_size, [True] * dim_size)) budget = 100 * dim_size # if intermediate_result is True, ZOOpt will output intermediate best solution every intermediate_freq budget parameter = Parameter(budget=budget, intermediate_result=True, intermediate_freq=10, stopping_criterion=StoppingCriterion()) sol = Opt.min(objective, parameter) sol.print_solution()
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/simple_functions/opt_with_stopping_criterion.py
opt_with_stopping_criterion.py
import gym from gym.spaces.discrete import Discrete from nn_model import NNModel class GymTask: """ This class sets a gym runtime environment. """ def __init__(self, name): """ Init function. :param name: gym task name """ self.reset_task() self.__envir = gym.make(name) # gym environment self.__envir_name = name # environment name self.__obser_size = self.__envir.observation_space.shape[0] # the number of parameters in observation self.__obser_up_bound = [] # the upper bound of parameters in observation self.__obser_low_bound = [] # the lower bound of parameters in observation self.total_step = 0 # total s self.__action_size = None # the number of parameters in action self.__action_sca = [] # environment action space, specified by gym self.__action_type = [] # the type of action, false means discrete self.__action_low_bound = [] # action lower bound self.__action_up_bound = [] # action upper bound # policy model, it's a neural network in this example self.__policy_model = None self.__max_step = 0 # maximum stop step self.__stop_step = 0 # the stop step in recent trajectory for i in range(self.__obser_size): self.__obser_low_bound.append( self.__envir.observation_space.high[i]) self.__obser_up_bound.append(self.__envir.observation_space.low[i]) # if the dimension of action space is one if isinstance(self.__envir.action_space, Discrete): self.__action_size = 1 self.__action_sca = [] self.__action_type = [] self.__action_sca.append(self.__envir.action_space.n) self.__action_type.append(False) # if action object is Box else: self.__action_size = self.__envir.action_space.shape[0] self.__action_type = [] self.__action_low_bound = [] self.__action_up_bound = [] for i in range(self.__action_size): self.__action_type.append(True) self.__action_low_bound.append( self.__envir.action_space.low[i]) self.__action_up_bound.append( self.__envir.action_space.high[i]) def reset_task(self): """ Reset gym runtime environment. :return: no return value """ self.__envir = None self.__envir_name = None self.__obser_size = None self.__obser_low_bound = [] self.__obser_up_bound = [] self.__action_type = [] self.__policy_model = None self.__max_step = 0 # def transform_action(self, temp_act): """ Transform action from neural network into true action. :param temp_act: output of the neural network :return: action """ action = [] for i in range(self.__action_size): # if action is continue if self.__action_type[i]: tmp_act = (temp_act[i]+1)*((self.__action_up_bound[i] - self.__action_low_bound[i])/2.0)+self.__action_low_bound[i] action.append(tmp_act) else: sca = 2.0 / self.__action_sca[0] start = -1.0 now_value = start + sca true_act = 0 while now_value <= 1.0: if temp_act[i] <= now_value: break else: now_value += sca true_act += 1 if true_act >= self.__action_sca[i]: true_act = self.__action_sca[i] - 1 action.append(true_act) if self.__action_size == 1 and self.__action_type[0] is False: action = action[0] return action def new_nnmodel(self, layers): """ Generate a new model :param layers: layer information :return: no return """ # initialize NN model as policy self.__policy_model = NNModel() self.__policy_model.construct_nnmodel(layers) return def nn_policy_sample(self, observation): """ Generate action from observation using neuron network policy :param observation: observation is the output of gym task environment :return: action to choose """ output = self.__policy_model.cal_output(observation) action = self.transform_action(output) return action def sum_reward(self, solution): """ Objective function of racos by summation of reward in a trajectory :param solution: a data structure containing x and fx :return: value of fx """ x = solution.get_x() sum_re = 0 # reset stop step self.__stop_step = self.__max_step # reset nn model weight self.__policy_model.decode_w(x) # reset environment observation = self.__envir.reset() for i in range(self.__max_step): action = self.nn_policy_sample(observation) observation, reward, done, info = self.__envir.step(action) sum_re += reward if done: self.__stop_step = i break self.total_step += 1 value = sum_re name = self.__envir_name # turn the direction for minimization if name == 'CartPole-v0' or name == 'CartPole-v1' or name == 'MountainCar-v0' or name == 'Acrobot-v1' or name == 'HalfCheetah-v1' \ or name == 'Humanoid-v1' or name == 'Swimmer-v1' or name == 'Ant-v1' or name == 'Hopper-v1' \ or name == 'LunarLander-v2' or name == 'BipedalWalker-v2': value = -value return value def get_environment(self): return self.__envir def get_environment_name(self): return self.__envir_name def get_observation_size(self): return self.__obser_size def get_observation_low_bound(self, index): return self.__obser_low_bound[index] def get_observation_upbound(self, index): return self.__obser_up_bound[index] def get_action_size(self): return self.__action_size def get_action_type(self, index): return self.__action_type[index] def get_stop_step(self): return self.__stop_step def get_w_size(self): return self.__policy_model.get_w_size() def set_max_step(self, ms): self.__max_step = ms return
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/direct_policy_search_for_gym/gym_task.py
gym_task.py
from gym_task import GymTask from zoopt import Dimension, Objective, Parameter, ExpOpt, Opt, Solution def run_test(task_name, layers, in_budget, max_step, repeat, terminal_value=None): """ example of running direct policy search for gym task. :param task_name: gym task name :param layers: layer information of the neural network e.g., [2, 5, 1] means input layer has 2 neurons, hidden layer(only one) has 5 and output layer has 1 :param in_budget: number of calls to the objective function :param max_step: max step in gym :param repeat: repeat number in a test :param terminal_value: early stop, algorithm should stop when such value is reached :return: no return value """ gym_task = GymTask(task_name) # choose a task by name gym_task.new_nnmodel(layers) # construct a neural network gym_task.set_max_step(max_step) # set max step in gym budget = in_budget # number of calls to the objective function rand_probability = 0.95 # the probability of sample in model # set dimension dim_size = gym_task.get_w_size() dim_regs = [[-10, 10]] * dim_size dim_tys = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys) # form up the objective function objective = Objective(gym_task.sum_reward, dim) parameter = Parameter(budget=budget, terminal_value=terminal_value) parameter.set_probability(rand_probability) solution_list = ExpOpt.min(objective, parameter, repeat=repeat, plot=True) def run_test_handlingnoise(task_name, layers, in_budget, max_step, repeat, terminal_value): """ example of running direct policy search for gym task with noise handling. :param task_name: gym task name :param layers: layer information of the neural network e.g., [2, 5, 1] means input layer has 2 neurons, hidden layer(only one) has 5 and output layer has 1 :param in_budget: number of calls to the objective function :param max_step: max step in gym :param repeat: number of repeatitions for noise handling :param terminal_value: early stop, algorithm should stop when such value is reached :return: no return value """ gym_task = GymTask(task_name) # choose a task by name gym_task.new_nnmodel(layers) # construct a neural network gym_task.set_max_step(max_step) # set max step in gym budget = in_budget # number of calls to the objective function rand_probability = 0.95 # the probability of sample in model # set dimension dim_size = gym_task.get_w_size() dim_regs = [[-10, 10]] * dim_size dim_tys = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys) # form up the objective function objective = Objective(gym_task.sum_reward, dim) # by default, the algorithm is sequential RACOS parameter = Parameter(budget=budget, autoset=True, suppression=True, terminal_value=terminal_value) parameter.set_resample_times(70) parameter.set_probability(rand_probability) solution_list = ExpOpt.min(objective, parameter, repeat=repeat) def test(task_name, layers, max_step, solution): gym_task = GymTask(task_name) # choose a task by name gym_task.new_nnmodel(layers) # construct a neural network gym_task.set_max_step(max_step) # set max step in gym reward = gym_task.sum_reward(solution) print(reward) return reward if __name__ == '__main__': CartPole_layers = [4, 5, 1] mountain_car_layers = [2, 5, 1] acrobot_layers = [6, 5, 3, 1] halfcheetah_layers = [17, 10, 6] humanoid_layers = [376, 25, 17] swimmer_layers = [8, 5, 3, 2] ant_layers = [111, 15, 8] hopper_layers = [11, 9, 5, 3] lunarlander_layers = [8, 5, 3, 1] # run_test('CartPole-v0', CartPole_layers, 2000, 500, 1) solution = Solution(x=[0.9572737226684644, 0.6786734362488325, 3.034275386199532, -1.465937683272493, -2.851881104646097, -1.4061455678150114, 5.406235543363033, -6.525666803912518, 5.509873865601744, -0.2641441560205742, 0.16264240578115619, 7.142612522126051, 7.401277183520886, -8.143118688085988, 1.3939130264981063, -7.288693746967178, 4.370406888883354, 6.996497964270439, -0.506503274716799, 2.7761417375401347, 0.23427516091347123, 7.707963832464561, 6.790387947114599, 1.6543213356897475, 8.549797968853504]) test("CartPole-v0", CartPole_layers, 500, solution) # run_test('MountainCar-v0', mountain_car_layers, 2000, 1000, 1) # run_test_handlingnoise('MountainCar-v0', mountain_car_layers, 1000, 1000, 5, terminal_value=-500) # run_test('Acrobot-v1', acrobot_layers, 2000, 500, 10) # If you want to run the following examples, you may need to install more libs(mujoco, Box2D). # run_test('HalfCheetah-v1', halfcheetah_layers, 2000, 10000, 10) # run_test('Humanoid-v1', humanoid_layers, 2000, 50000, 10) # run_test('Swimmer-v1', swimmer_layers, 2000, 10000, 10) # run_test('Ant-v1', ant_layers, 2000, 10000, 10) # run_test('Hopper-v1', hopper_layers, 2000, 10000, 10) # run_test('LunarLander-v2', lunarlander_layers, 2000, 10000, 10)
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/direct_policy_search_for_gym/run.py
run.py
import numpy as np import math class ActivationFunction: """ This class defines activation functions in neural network. """ @staticmethod # sigmoid function def sigmoid(x): """ Sigmoid function. :param x: input of the sigmoid function :return: value of sigmoid(x) """ for i in range(len(x)): if -700 <= x[i] <= 700: x[i] = (2 / (1 + math.exp(-x[i]))) - 1 # sigmoid function else: if x[i] < -700: x[i] = -1 else: x[i] = 1 return x class Layer(object): """ This class defines a layer in neural network. """ def __init__(self, in_size, out_size, input_w=None, activation_function=None): """ Init function. :param in_size: input size of this layer :param out_size: output size of this layer :param input_w: initial weight matrix :param activation_function: activation function of this layer """ self.__row = in_size self.__column = out_size self.__w = [] self.decode_w(input_w) self.__activation_function = activation_function self.__wx_plus_b = 0 self.outputs = 0 def cal_output(self, inputs): """ Forward prop of one layer. In this example, we ignore bias. :param inputs: input of this layer :return: output of this layer """ self.__wx_plus_b = np.dot(inputs, self.__w) if self.__activation_function is None: self.outputs = self.__wx_plus_b else: self.outputs = self.__activation_function(self.__wx_plus_b) return self.outputs # def decode_w(self, w): """ The input x is a vector and this function decompose w into a matrix. :param w: input weight vector :return: weight matrix """ if w is None: return interval = self.__column begin = 0 output = [] step = int(len(w) / interval) for i in range(step): output.append(w[begin: begin + interval]) begin += interval self.__w = np.array(output) return def get_row(self): return self.__row def get_column(self): return self.__column class NNModel: """ This class defines neural network. """ def __init__(self): self.__layers = [] self.__layer_size = [] self.__w_size = 0 return def construct_nnmodel(self, layers): """ This function constructs a neural network from a list. :param layers: layers is a list, each element is the number of neurons in each layer len(layers) is at least 2, including input layer and output layer :return: no return value """ self.__layer_size = layers for i in range(len(layers) - 1): self.add_layer(layers[i], layers[i + 1], activation_function=ActivationFunction.sigmoid) self.__w_size += layers[i] * layers[i + 1] def add_layer(self, in_size, out_size, input_w=None, activation_function=None): """ Add one layer in neural network. :param in_size: input size of this layer :param out_size: output size of this layer :param input_w: initial weight matrix :param activation_function: activation function of this layer :return: no return value """ new_layer = Layer(in_size, out_size, input_w, activation_function) self.__layers.append(new_layer) return # def decode_w(self, w): """ This function decomposes a big vector into several vectors and assign them to weight matrices of the neural network. In the direct policy search example, big vector is the concatenation of all flattened weight matrices and small vectors are flattened weight matrices. :param w: concatenation of all flattened weight matrices :return: no return value """ begin = 0 for i in range(len(self.__layers)): length = self.__layers[i].get_row() * self.__layers[i].get_column() w_temp = w[begin: begin + length] self.__layers[i].decode_w(w_temp) begin += length return # output y from input x def cal_output(self, x): """ Forward prop of the neural network. :param x: input of the neural network :return: output of the network """ out = x for i in range(len(self.__layers)): out = self.__layers[i].cal_output(out) return out def get_w_size(self): return self.__w_size
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/direct_policy_search_for_gym/nn_model.py
nn_model.py
import numpy as np from zoopt import Opt, Parameter, Objective, Dimension, ExpOpt import codecs import arff class SparseMSE: """ This class implements the Sparse MSE problem. """ _X = 0 _Y = 0 _C = 0 _b = 0 _size = 0 _k = 0 _best_solution = None def __init__(self, filename): """ Initialization. :param filename: filename """ data = self.read_data(filename) self._size = np.shape(data)[1] - 1 self._X = data[:, 0: self._size] self._Y = data[:, self._size] self._C = self._X.T * self._X self._b = self._X.T * self._Y def position(self, s): """ This function is to find the index of s where element is 1 return a list of positions :param s: :return: a list of index of s where element is 1 """ n = len(s) result = [] for i in range(n): if s[i] == 1: result.append(i) return result def constraint(self, solution): """ If the constraints are satisfied, the constraint function will return a zero or positive value. Otherwise a negative value will be returned. :param solution: a Solution object :return: a zero or positive value which means constraints are satisfied, otherwise a negative value """ x = solution.get_x() return self._k-sum(x) def set_sparsity(self, k): self._k = k def get_sparsity(self): return self._k def loss(self, solution): """ loss function for sparse regression :param solution: a Solution object """ x = solution.get_x() if sum(x) == 0.0 or sum(x) >= 2.0 * self._k: return float('inf') pos = self.position(x) alpha = (self._C[pos, :])[:, pos] alpha = alpha.I * self._b[pos, :] sub = self._Y - self._X[:, pos]*alpha mse = sub.T*sub / np.shape(self._Y)[0] return mse[0, 0] def get_dim(self): """ Construct a Dimension object of this problem. :return: a dimension object of sparse mse. """ dim_regs = [[0, 1]] * self._size dim_tys = [False] * self._size return Dimension(self._size, dim_regs, dim_tys) def read_data(self, filename): """ Read data from file. :param filename: filename :return: normalized data """ file_ = codecs.open(filename, 'rb', 'utf-8') decoder = arff.ArffDecoder() dataset = decoder.decode(file_.readlines(), encode_nominal=True) file_.close() data = dataset['data'] return self.normalize_data(np.mat(data)) @staticmethod def normalize_data(data_matrix): """ Normalize data to have mean 0 and variance 1 for each column :param data_matrix: matrix of all data :return: normalized data """ try: mat_size = np.shape(data_matrix) for i in range(0, mat_size[1]): the_column = data_matrix[:, i] column_mean = sum(the_column)/mat_size[0] minus_column = np.mat(the_column-column_mean) std = np.sqrt(np.transpose(minus_column)*minus_column/mat_size[0]) data_matrix[:, i] = (the_column-column_mean)/std return data_matrix except Exception as e: print(e) finally: pass def get_k(self): return self._k
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/sparse_regression/sparse_mse.py
sparse_mse.py
import arff, codecs from zoopt import Dimension, Objective, Parameter, ExpOpt class RampLoss: """ Define ramploss learning loss function. """ __data = None __test = None __ramploss_c = 10 __ramploss_s = -1 __dim_size = 0 def __init__(self, arfffile): self.read_data(arfffile) def read_data(self, filename): """ Read data from file. :param filename: Name of the file to read :return: no return """ file_ = codecs.open(filename, 'rb', 'utf-8') decoder = arff.ArffDecoder() dataset = decoder.decode(file_.readlines(), encode_nominal=True) file_.close() self.__data = dataset['data'] if self.__data is not None and self.__data[0] is not None: self.__dim_size = len(self.__data[0]) def get_dim_size(self): return self.__dim_size def calc_product(self, weight, j): """ Calculate product between the weights and the instance. :param weight: weight vector :param j: the index of the instance :return: product value """ temp_sum = 0 for i in range(len(weight) - 1): temp_sum += weight[i] * self.__data[j][i] temp_sum += weight[len(weight) - 1] return temp_sum def calc_h(self, ylfx, st): """ Calculate hinge loss. """ temp = st - ylfx if temp > 0: return temp else: return 0 def calc_regularization(self, weight): """ Calculate regularization """ temp_sum = 0 for i in range(len(weight)): temp_sum += weight[i] * weight[i] return temp_sum def trans_label(self, i): """ Transform label from 0/1 to -1/+1 """ if self.__data[i][self.__dim_size - 1] == 1: return 1 else: return -1 # def eval(self, solution): """ Objectve function to calculate the ramploss. """ weight = solution.get_x() H1 = 0 Hs = 0 for i in range(len(self.__data)): fx = self.calc_product(weight, i) H1 += self.calc_h(self.trans_label(i) * fx, 1) Hs += self.calc_h(self.trans_label(i) * fx, self.__ramploss_s) regularization = self.calc_regularization(weight) value = regularization / 2 + self.__ramploss_c * H1 - self.__ramploss_c * Hs return value # def training_error(self, best): """ Training error. """ wrong = 0.0 for i in range(len(self.__data)): fx = self.calc_product(best, i) if fx * self.trans_label(i) <= 0: wrong += 1 rate = wrong / len(self.__data) return rate def dim(self): """ Construct dimension of this problem. """ return Dimension(self.__dim_size, [[-10, 10]] * self.__dim_size, [True] * self.__dim_size) if __name__=='__main__': # read data loss = RampLoss('ionosphere.arff') objective = Objective(loss.eval, loss.dim()) budget = 100 * loss.get_dim_size() parameter = Parameter(budget=budget) solution_list = ExpOpt.min(objective, parameter, repeat=1, plot=True, plot_file="img/ramploss.png")
zoopt
/zoopt-0.4.2-py3-none-any.whl/example/linear_classifier_using_ramploss/ramploss.py
ramploss.py
=============================== Zoort =============================== .. image:: https://mejorando.la/static/images/logos/mejorandola.png :target: https://www.mejorando.la .. image:: https://badge.fury.io/py/zoort.png :target: http://badge.fury.io/py/zoort .. image:: https://pypip.in/d/zoort/badge.png :target: https://crate.io/packages/zoort?version=latest A Python script for automatic MongoDB backups Features -------- * Backup for just one or all your MongoDB Databases. * Encrypt and Decrypt output dump file. * Upload file to S3 bucket. Requirements ------------ - Python 2.6 | 2.7 License ------- MIT licensed. See the bundled `LICENSE <https://github.com/yograterol/zoort/blob/master/LICENSE>`_ file for more details.
zoort
/zoort-0.1.8.tar.gz/zoort-0.1.8/README.rst
README.rst
from __future__ import unicode_literals, print_function import json import os import datetime import time import dateutil.parser import boto import shutil import ftplib import dropbox from boto.s3.key import Key from docopt import docopt from swiftclient import Connection, ClientException, get_keystoneclient_2_0 from functools import wraps from fabric.api import local, hide from fabric.colors import blue, red, green from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String from sqlalchemy.orm import sessionmaker Base = declarative_base() try: input = raw_input except NameError: pass __version__ = '0.1.8' __author__ = 'Yohan Graterol' __license__ = 'MIT' ADMIN_USER = None ADMIN_PASSWORD = None AWS_ACCESS_KEY = None AWS_SECRET_KEY = None AWS_BUCKET_NAME = None AWS_VAULT_NAME = None AWS_KEY_NAME = None PASSWORD_FILE = None DELETE_BACKUP = None DELETE_WEEKS = None # Can be loaded from an import, but I put here # for simplicity. _error_codes = { 100: u'Error #00: Can\'t load config.', 101: u'Error #01: Database is not define.', 103: u'Error #03: Backup name is not defined.', 104: u'Error #04: Bucket name is not defined.', 105: u'Error #05: Path for dump is not dir.', 106: u'Error #06: Path is not file.', 107: u'Error #07: Storage provider is wrong!', 108: u'Error #08: Configure error!', 109: u'Error #09: Oh, you are not root user! :(', 110: u'Error #10: Path for sqlite database is not defined!.', 111: u'Error #11: Backup path is invalid.', 112: u'Error #12: Unable to connect to {0}: {1}', 113: u'Error #13: Can\'t create directory {0} in {1}: {2}', 114: u'Error #14: Can\'t change directory to {0}: {1}', 115: u'Error #15: Can\'t upload file {0} in {1}: {2}', 116: u'Error #16: Can\'t delete file {0}: {1}', 117: u'Error #17: Can\'t get file date {0}: {1}', 200: u'Warning #00: Field is requerid!', 201: u'Warning #01: Field Type is wrong!', 300: u'Success #00: Zoort is configure :)' } def factory_uploader(type_uploader, *args, **kwargs): def get_diff_date(creation_date): ''' Return the difference between backup's date and now ''' now = int(time.time()) format = '%m-%d-%Y %H:%M:%S' date_parser = dateutil.parser.parse(creation_date) # convert string date to '%m-%d-%Y %H:%M:%S' format cd_strf = date_parser.strftime(format) # convert '%m-%d-%Y %H:%M:%S' to time.struct_time cd_struct = time.strptime(cd_strf, format) # convert time.struct_time to seconds cd_time = int(time.mktime(cd_struct)) return now - cd_time class AWSS3(object): def __init__(self, *args, **kwargs): super(AWSS3, self).__init__() self.__dict__.update(kwargs) if not self.name_backup: raise SystemExit(_error_codes.get(103)) if not self.bucket_name: raise SystemExit(_error_codes.get(104)) # Connect to S3 self.conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_KEY) # Get the bucket self.bucket = self.conn.get_bucket(self.bucket_name) def upload(self): global AWS_KEY_NAME if not AWS_KEY_NAME: AWS_KEY_NAME = 'dump/' print(blue('Uploading file to S3...')) # Delete all backups of two weeks before self._delete(bucket=self.bucket) k = Key(self.bucket) s3_key = (normalize_path(AWS_KEY_NAME) + 'week-' + str(datetime.datetime.now().isocalendar()[1]) + '/' + self.name_backup.split('/')[-1]) print(blue('Uploading {0} to {1}.'.format(self.name_backup, s3_key))) k.key = s3_key k.set_contents_from_filename(self.name_backup) def _get_old_backups(self, bucket): ret = [] dif = DELETE_WEEKS * 7 * 24 * 60 for key in bucket.list(): if get_diff_date(key.last_modified) >= dif: ret.append(key) return ret def _delete(self, bucket): global DELETE_BACKUP if not DELETE_BACKUP: return for key in self._get_old_backups(bucket): key.delete() class AWSGlacier(object): class File(Base): __tablename__ = 'file' id = Column(Integer, primary_key=True) date_upload = Column(String) filename = Column(String) archive_id = Column(String) def __init__(self, date_upload, filename, archive_id): self.date_upload = date_upload self.filename = filename self.archive_id = archive_id def __repr__(self): return "<File('%s','%s', '%s')>" % (self.date_upload, self.filename, self.archive_id) File.__table__ File.__mapper__ def __init__(self, *args, **kwargs): super(AWSGlacier, self).__init__() self.__dict__.update(kwargs) self.path = kwargs.get('path', None) self.name_backup = kwargs.get('name_backup', None) self.glacier_connection = \ boto.connect_glacier(aws_access_key_id=AWS_ACCESS_KEY, aws_secret_access_key=AWS_SECRET_KEY) self.vault = self.glacier_connection.get_vault(AWS_VAULT_NAME) def connect_db(self): if not self.path: raise SystemExit(110) self.engine = create_engine('sqlite:///{0}'.format(self.path)) Base.metadata.create_all(self.engine) Session = sessionmaker(bind=self.engine) self.session = Session() def commit(self): self.session.dirty self.session.new self.session.commit() def add_archive_id(self, archiveID): archive_data = self.File(str(time.time()), self.name_backup, archiveID) self.session.add(archive_data) def get_file_from_time(self, time_old): return self.session.query( self.File).filter(self.File.date_upload <= time_old) def download_all_backups(self): jobs = self.vault.list_jobs(completed=True) for job in jobs: print(green('Downloading {0}'.format(job))) if job.description: job.download_to_file(job.description) def upload(self): self.connect_db() if not self.name_backup: raise SystemExit(111) print(green('Uploading file to Glacier...')) archive_id = self.vault.upload_archive( self.name_backup, description=self.name_backup.split('/')[-1]) retrieve_job = self.vault.retrieve_archive(archive_id) print(green('The job {0} is begin...'.format(retrieve_job))) self.add_archive_id(archive_id) self.delete() self.commit() def delete(self): print(green("Checking old backups...")) dif = time.time() - DELETE_WEEKS * 7 * 24 * 60 * 60 archive_id_set = self.get_file_from_time(dif) for archive in archive_id_set: print(red('Deleting {0}...'.format(archive[0]))) self.vault.delete_archive(archive[0]) self.session.delete(archive) self.session.flush() class FTP(object): def __init__(self, *args, **kwargs): super(FTP, self).__init__() self.__dict__.update(kwargs) config_data = get_config_json() self.host = kwargs.get('host', config_data.get('ftp').get('host')) self.user = kwargs.get('user', config_data.get('ftp').get('user')) self.passwd = kwargs.get('passwd', config_data.get('ftp').get('passwd')) self.path = normalize_path(kwargs.get('path', config_data.get('ftp').get('path'))) self.host = kwargs.get('host', config_data.get('ftp').get('host')) self.user = kwargs.get('user', config_data.get('ftp').get('user')) self.passwd = kwargs.get('passwd', config_data.get('ftp').get('passwd')) self.path = normalize_path(kwargs.get('path', config_data.get('ftp').get('path'))) self.name_backup = kwargs.get('name_backup', None) if not self.name_backup: raise SystemExit(_error_codes.get(103)) def connect(self): try: self.conn = ftplib.FTP(self.host, self.user, self.passwd) except Exception, e: raise SystemExit(_error_codes.get(12).format(self.host, e)) print('Connected to {0}'.format(self.host)) def disconnect(self): self.conn.quit() def mkdir(self, dirname): try: self.conn.mkd(dirname) except Exception, e: raise SystemExit(_error_codes.get(13).format( dirname, self.conn.pwd(), e)) def change_dir(self, dirname): try: self.conn.cwd(dirname) except Exception, e: raise SystemExit(_error_codes.get(14).format(dirname, e)) def send_file(self, filename): try: backup_file = open(filename, 'rb') self.conn.storbinary('STOR ' + filename, backup_file) except Exception, e: raise SystemExit(_error_codes.get(15). format(filename, self.path, e)) def delete_file(self, filename): try: self.conn.delete(filename) except Exception, e: raise SystemExit(_error_codes.get(16).format(filename, e)) def get_file_date(self, filename): try: mdtm = self.conn.sendcmd('MDTM ' + filename) except Exception, e: raise SystemExit(_error_codes.get(17).format(filename, e)) return mdtm[4:] def list_files(self): ''' Return all files in the actual directory without '.' and '..' ''' ret = [] for path in self.conn.nlst(): if path in ['.', '..']: continue ret.append(path) return ret def goto_path(self, path): ''' Change to 'path' directory or create if not exist ''' try: self.conn.cwd(self.folder) except: self.change_dir('/') for folder in path.split('/'): if not folder: continue if not folder in self.conn.nlst(): self.mkdir(folder) self.change_dir(folder) def upload(self): self.connect() path = (normalize_path(self.path) + 'week-' + str(datetime.datetime.now().isocalendar()[1])) self.goto_path(path) print('Uploading file to {0} in {1}'.format(self.host, self.conn.pwd())) name_backup = self.name_backup.split('/')[-1] self.send_file(name_backup) self.delete() self.disconnect() def delete(self): global DELETE_BACKUP if not DELETE_BACKUP: return for filename in self._get_old_backup(): self.delete_file(filename) def _get_old_backup(self): global DELETE_WEEKS ret = [] dif = DELETE_WEEKS * 7 * 24 * 60 self.goto_path(self.path) for path in self.list_files(): self.change_dir(path) for backup in self.list_files(): if get_diff_date(self.get_file_date(backup)) >= dif: # Add full path of backup ret.append(self.conn.pwd() + '/' + backup) self.change_dir('..') return ret class DropboxStorage(object): def __init__(self, *args, **kwargs): super(DropboxStorage, self).__init__() self.__dict__.update(kwargs) config_data = get_config_json() self.auth_token = config_data.get('dropbox').get('auth_token') self.path = config_data.get('dropbox').get('path') self.name_backup = kwargs.get('name_backup', None) self.client = dropbox.client.DropboxClient(self.auth_token) if not self.name_backup: raise SystemExit(_error_codes.get(103)) def send_file(self, filename): try: backup_file = open(filename, 'rb') response = self.client.put_file(filename, backup_file) print('Uploading file {0} to directory "{1}" on Dropbox'. format(filename, response.get('root'))) except Exception, e: raise SystemExit(_error_codes.get(115).format( filename, 'Dropbox', e)) def upload(self): name_backup = self.name_backup.split('/')[-1] self.send_file(name_backup) class SwiftStorage(object): def __init__(self, *args, **kwargs): super(SwiftStorage, self).__init__() self.__dict__.update(kwargs) config_data = get_config_json() # Load config variables. self.auth_url = config_data.get('swift').get('auth_url') self.access_key = config_data.get('swift').get('access_key') self.secret_key = config_data.get('swift').get('secret_key') self.auth_version = config_data.get('swift').get('auth_version') self.tenant_name = config_data.get('swift').get('tenant_name') self.insecure = True self.container = config_data.get('swift').get('container') self.name_backup = kwargs.get('name_backup', None) self.conn = Connection( authurl=self.auth_url, user=self.access_key, key=self.secret_key, auth_version=self.auth_version, tenant_name=self.tenant_name, insecure=self.insecure) try: self.conn.head_container(self.container) except: self.conn.put_container(self.container) if not self.name_backup: raise SystemExit(_error_codes.get(103)) def send_file(self, filename, **kwargs): try: backup_file = open(filename, 'rb') response = self.conn.put_object( self.container, filename, backup_file) print('Uploading file {0} to container "{1}" on swift'. format(filename, self.container)) except Exception, e: raise SystemExit(_error_codes.get(115).format( filename, 'Swift', e)) def upload(self): name_backup = self.name_backup.split('/')[-1] self.send_file(name_backup) uploaders = {'S3': AWSS3, 'Glacier': AWSGlacier, 'FTP': FTP, 'Dropbox': DropboxStorage, 'Swift': SwiftStorage} upload = uploaders.get(type_uploader)(*args, **kwargs) if not upload: raise SystemExit(_error_codes.get(107)) action = kwargs.get('action') if action == 'upload': upload.upload() elif action == 'download': upload.download_all_backups() def transform_type(value, typ=None): if not typ: return value try: return typ(value) except ValueError: print(red(_error_codes.get(201))) return def get_input(msg, is_password=False, verify_type=None): import getpass if is_password: inp = getpass.getpass else: inp = input in_user = transform_type(inp(msg), verify_type) while not in_user: print(red(_error_codes.get(200))) in_user = transform_type(inp(msg), verify_type) return in_user def get_config_json(): config = None try: config = open('/etc/zoort/config.json') except IOError: try: config = open( os.path.join( os.path.expanduser('~'), '.zoort/config.json')) except IOError: raise SystemExit(_error_codes.get(100)) config_data = json.load(config) return config_data def configure(service=None): print(''' Zoort v-{0} Please fill all fields for configure Zoort. '''.format(__version__)) # Check if is root user if os.geteuid() != 0: raise SystemExit(_error_codes.get(109)) config_dict = dict() config_dict['admin_user'] = get_input('MongoDB User Admin: ') config_dict['admin_password'] = \ get_input('MongoDB Password Admin (Is hidden): ', True) if 'aws' in service: # Define dict to aws key config_dict['aws'] = dict() try: if int(get_input('Do you want use Amazon Web Services S3? ' ' (1 - Yes / 0 - No): ', verify_type=int)): config_dict['aws']['aws_bucket_name'] = \ get_input('AWS Bucket S3 name: ') if int(get_input('Do you want use Amazon Web Services Glacier? ' ' (1 - Yes / 0 - No): ', verify_type=int)): config_dict['aws']['aws_vault_name'] = \ get_input('AWS Vault Glacier name: ') config_dict['aws']['aws_key_name'] = \ get_input('Key name for backups file: ') config_dict['password_file'] = \ get_input('Password for encrypt with AES (Is hidden): ', True) config_dict['delete_backup'] = \ int(get_input('Do you want delete old backups? ' ' (1 - Yes / 0 - No): ', verify_type=int)) if config_dict['delete_backup']: config_dict['delete_weeks'] = \ get_input('When weeks before of backups do you want delete? ' '(Number please) ', verify_type=int) if 'dropbox' in service: # Define dict to dropbox key config_dict['dropbox'] = dict() # Dropbox Variables try: config_dict['dropbox']['app_key'] = \ get_input('Dropbox app key: ') config_dict['dropbox']['secret_key'] = \ get_input('Dropbox secret key: ') flow = dropbox.client.DropboxOAuth2FlowNoRedirect( config_dict['dropbox']['app_key'], config_dict['dropbox']['secret_key']) authorize_url = flow.start() config_dict['dropbox']['auth_code'] = \ get_input('Go to ' + authorize_url + ', allow,' + 'an put the code here: ') access_token, user_id = flow.finish( config_dict['dropbox']['auth_code']) config_dict['dropbox']['auth_token'] = \ get_input('This is your access token ' + access_token + ' Type the code: ') except ValueError: raise SystemExit(_error_codes.get(108)) if 'swift' in service: # Define dict to swift config_dict['swift'] = dict() # swift variables try: config_dict['swift']['auth_url'] = \ get_input('Swift Auth url: ') config_dict['swift']['access_key'] = \ get_input('Swift username: ') config_dict['swift']['secret_key'] = \ get_input('Swift password: ') config_dict['swift']['auth_version'] = \ get_input('Swift version auth used: ') config_dict['swift']['tenant_name'] = \ get_input('Swift tenant used: ') config_dict['swift']['container'] = \ get_input('Swift container used: ') except ValueError: raise SystemExit(_error_codes.get(108)) except ValueError: raise SystemExit(_error_codes.get(108)) with open('/etc/zoort/config.json', 'w') as config: json.dump(config_dict, config) print(green(_error_codes.get(300))) def load_config(func): ''' @Decorator Load config from JSON file. ''' @wraps(func) def wrapper(*args, **kwargs): config_data = get_config_json() try: global ADMIN_USER global ADMIN_PASSWORD global AWS_ACCESS_KEY global AWS_SECRET_KEY global AWS_BUCKET_NAME global AWS_VAULT_NAME global AWS_KEY_NAME global PASSWORD_FILE global DELETE_BACKUP global DELETE_WEEKS ADMIN_USER = config_data.get('admin_user') ADMIN_PASSWORD = config_data.get('admin_password') PASSWORD_FILE = config_data.get('password_file') AWS_ACCESS_KEY = config_data.get('aws').get('aws_access_key') AWS_SECRET_KEY = config_data.get('aws').get('aws_secret_key') AWS_BUCKET_NAME = config_data.get('aws').get('aws_bucket_name') AWS_VAULT_NAME = config_data.get('aws').get('aws_vault_name') AWS_KEY_NAME = config_data.get('aws').get('aws_key_name') DELETE_BACKUP = config_data.get('delete_backup') DELETE_WEEKS = config_data.get('delete_weeks') except ValueError, e: print(e) return func(*args, **kwargs) return wrapper def normalize_path(path): ''' Add slash to path end ''' if path[-1] != '/': return path + '/' return path def compress_folder_dump(path, target): ''' Compress folder dump to tar.gz file ''' import tarfile if not path or not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) name_out_file = (target + 'dump-' + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')) tar = tarfile.open(name_out_file + '.tar.gz', 'w:gz') tar.add(path, arcname='dump') tar.close() return (name_out_file, name_out_file + '.tar.gz') def encrypt_file(path, output, password=None): ''' Encrypt file with AES method and password. ''' if not password: password = PASSWORD_FILE query = 'openssl aes-128-cbc -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, output, password)) os.remove(path) def decrypt_file(path, password=None): ''' Decrypt file with AES method and password. ''' global PASSWORD_FILE if not password: password = PASSWORD_FILE if path and not os.path.isfile(path): raise SystemExit(_error_codes.get(106)) query = 'openssl aes-128-cbc -d -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, path + '.tar.gz', PASSWORD_FILE)) def optional_actions(encrypt, path, compress_file, **kwargs): ''' Optional actions about of AWS S3 and encrypt file. ''' yes = ('y', 'Y') file_to_upload = normalize_path(path) + compress_file[1] if encrypt in yes: encrypt_file(compress_file[1], compress_file[0]) file_to_upload = compress_file[0] if kwargs.get('s3') in yes: factory_uploader('S3', name_backup=file_to_upload, bucket_name=AWS_BUCKET_NAME, action='upload') if kwargs.get('glacier') in yes: factory_uploader('Glacier', name_backup=file_to_upload, vault_name=AWS_VAULT_NAME, path=os.path.join(os.path.expanduser('~'), '.zoort.db'), action='upload') if kwargs.get('dropbox') in yes: factory_uploader('Dropbox', name_backup=file_to_upload, action='upload') if kwargs.get('swift') in yes: factory_uploader('Swift', name_backup=file_to_upload, action='upload') @load_config def main(): '''Main entry point for the mongo_backups CLI.''' args = docopt(__doc__, version=__version__) if args.get('backup'): backup_database(args) if args.get('backup_all'): backup_all(args) if args.get('decrypt'): decrypt_file(args.get('<path>')) if args.get('configure'): configure(service='all') if args.get('configure-aws'): configure(service='aws') if args.get('configure-dropbox'): configure(service='dropbox') if args.get('configure-swift'): configure(service='swift') if args.get('download_all'): download_all() def download_all(): factory_uploader('Glacier', action='download') def backup_database(args): ''' Backup one database from CLI ''' username = args.get('<user>') password = args.get('<password>') database = args['<database>'] host = args.get('<host>') or '127.0.0.1' path = args.get('--path') or os.getcwd() s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if not database: raise SystemExit(_error_codes.get(101)) if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) query = 'mongodump -d {database} --host {host} ' if username: query += '-u {username} ' if password: query += '-p {password} ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, database=database, host=host, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift) def backup_all(args): ''' Backup all databases with access user. ''' username = None password = None auth = args.get('--auth') path = args.get('--path') s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if (ADMIN_USER and ADMIN_PASSWORD): username = ADMIN_USER password = ADMIN_PASSWORD if not path: path = os.getcwd() if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) if auth: query = 'mongodump -u {username} -p {password} ' else: query = 'mongodump ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift) if __name__ == '__main__': main()
zoort
/zoort-0.1.8.tar.gz/zoort-0.1.8/zoort.py
zoort.py
Zoosync ======= Zoosync is a simple service discovery tool using Zookeeper as database backend. Usage ===== See `zoosync --help` for brief usage or manual page for more detailed usage. The output is in the form of shell variable assignement. Tool could be used this way:: ZOO='zoo1.example.com,zoo2.example.com,zoo3.example.com' REQ_SERVICES='impala,hadoop-hdfs,test,test2,test3' zoosync --zookeeper ${ZOO} --services ${REQ_SERVICES} cleanup eval `zoosync --zookeeper ${ZOO} --services ${REQ_SERVICES} --wait 1800 wait` echo "active: ${SERVICES}" echo "missing: ${MISSING}" Deployment ========== :: # install pip install zoosync # configure (/etc/zoosynrc and startup scripts) zoosync -z zoo1,zoo2,zoo3 -s service1,service2 -u user -p password deploy Tests ===== Tests require running zookeeper and proper configuration of zoosync (see Usage). Launch:: python setup.py test
zoosync
/zoosync-3.0.2.tar.gz/zoosync-3.0.2/DESCRIPTION.rst
DESCRIPTION.rst
import requests import logging import time logger = logging.getLogger(__name__) try: # python3 compatibility import urllib.parse as urlparse except ImportError: import urlparse try: # i prefer ultrajson encoder import ujson as json except ImportError: import json from . import ZOOZ_SANDBOX from .utils import backoff_retry from .exceptions import ZoozException ZOOZ_API_VERSION = '1.0.4' ZOOZ_URLS = { 'production': 'https://app.zooz.com/', 'sandbox': 'https://sandbox.zooz.co/', } class ZoozRequestBase(object): """ Base client for the Zooz API """ def __init__(self, *args, **kwargs): self.requests = requests.Session() @property def get_url(self): global ZOOZ_SANDBOX global ZOOZ_URLS if ZOOZ_SANDBOX: return ZOOZ_URLS['sandbox'] else: return ZOOZ_URLS['production'] @backoff_retry(retries=5) def post(self, url, payload, headers): """ Add authentication headers to the request """ return self.requests.post(url, data=payload, headers=headers) def _parse_response_nvp(self, response): """ parse_qs will build a dictionary of {key: [list]}, this will return a plain dict """ response_dict = urlparse.parse_qs(response, keep_blank_values=True) return {k: v[0] for (k, v) in response_dict.items()} class ZoozRequest(ZoozRequestBase): """ Client for the ZooZ Server API Go to https://app.zooz.com/portal/PortalController?cmd=resources to see complete API documentation For authentication, some keys are needed: unique_id: as registered in the ZooZ developer portal app_key: as received upon registration By default, requests will be done to the 'production' ZooZ servers, so all transactions and payment will be real, to allow 'sandbox' mode just change the global variable ZOOZ_SANDBOX ZOOZ_SANDBOX = True """ def __init__(self, unique_id, app_key): self.unique_id = unique_id self.app_key = app_key super(ZoozRequest, self).__init__() @property def get_url(self): """ Returns the final URI needed to do requests to the secured servlet """ return super(ZoozRequest, self).get_url + 'mobile/SecuredWebServlet' def add_authentication(self): headers = { 'ZooZUniqueID': self.unique_id, 'ZooZAppKey': self.app_key, 'ZooZResponseType': 'NVP' } return headers def open_transaction(self, amount, currency_code, extra=None): """ Open a transaction using a secured channel to ZooZ. amount: The amount to pay. currency_code: ISO code of the currency used to pay. Optional parametres can be used, use extra and a dict for a list of parameters name, see ZooZ mobile web developer guide. :returns: Unique token used to identify the transaction. 'statusCode': If equals to zero, request succeeded 'errorMessage': Will contain the error message 'token': Token generated :raises: ZoozException in case request fails """ payload = { 'cmd': 'openTrx', 'amount': amount, 'currencyCode': currency_code, } if extra: payload.update(extra) headers = self.add_authentication() logger.debug('[ZOOZ] open transaction: %s', payload) response = self._parse_response_nvp( self.post(self.get_url, payload, headers).text) if int(response['statusCode']) != 0: raise ZoozException( response['errorMessage'], response['statusCode']) return response class ZoozRequestExtended(ZoozRequestBase): """ Client for the ZooZ Extended Server API Go to https://app.zooz.com/portal/PortalController?cmd=resources to see complete API documentation developer_id: developer email used to log in ZooZ Developers portal app_key: Server API Key found in ZooZ portal -> My Account By default, requests will be done to the 'production' ZooZ servers, so all transactions and payment will be real, to allow 'sandbox' mode just change the global variable ZOOZ_SANDBOX ZOOZ_SANDBOX = True """ def __init__(self, developer_id, api_key): self.developer_id = developer_id self.api_key = api_key super(ZoozRequestExtended, self).__init__() @property def get_url(self): """ Returns the final URI needed to do requests to extended API """ return super( ZoozRequestExtended, self).get_url + 'mobile/ExtendedServerAPI' def add_authentication(self): headers = { 'ZooZDeveloperId': self.developer_id, 'ZooZServerAPIKey': self.api_key, } return headers def get_transaction(self, transaction_id): """ Get the info about a transaction using its ID :returns: a dict with two keys: 'ResponseStatus': 0 if all is correct 'ResponseObject': transaction info, see ZooZExtendedAPI doc. :raises: ZoozException in case request fails """ assert transaction_id payload = { 'cmd': 'getTransactionDetails', 'ver': ZOOZ_API_VERSION, 'transactionID': transaction_id, } headers = self.add_authentication() logger.debug('[ZOOZ] get transaction with payload: %s', payload) response = self.post(self.get_url, payload, headers).json() if int(response['ResponseStatus']) != 0: raise ZoozException( response['ResponseObject']['errorMessage'], response['ResponseStatus']) return response def get_transactions(self, user_email, from_date=None, to_date=None): """ Get the list of transaction generated by an user. Allows filtering by date, date should be in the format: YYYY-mm-dd :returns: a dict with two keys: 'ResponseStatus': 0 if all is correct. 'ResponseObject': transaction info, see ZooZExtendedAPI doc. :raises: ZoozException in case request fails """ assert user_email payload = { 'cmd': 'getTransactionDetailsByPayerEmail', 'ver': ZOOZ_API_VERSION, 'email': user_email, 'fromDate': from_date, 'toDate': to_date, } headers = self.add_authentication() logger.debug('[ZOOZ] get transactions for user: %s', payload) response = self.post(self.get_url, payload, headers).json() if int(response['ResponseStatus']) != 0: raise ZoozException( response['ResponseObject']['errorMessage'], response['ResponseStatus']) return response
zooz-python
/zooz-python-0.4.tar.gz/zooz-python-0.4/zooz/client.py
client.py
📦 setup.py (for humans) ======================= This repo exists to provide [an example setup.py] file, that can be used to bootstrap your next Python project. It includes some advanced patterns and best practices for `setup.py`, as well as some commented–out nice–to–haves. For example, this `setup.py` provides a `$ python setup.py upload` command, which creates a *universal wheel* (and *sdist*) and uploads your package to [PyPi] using [Twine], without the need for an annoying `setup.cfg` file. It also creates/uploads a new git tag, automatically. In short, `setup.py` files can be daunting to approach, when first starting out — even Guido has been heard saying, "everyone cargo cults thems". It's true — so, I want this repo to be the best place to copy–paste from :) [Check out the example!][an example setup.py] Installation ----- ```bash cd your_project # Download the setup.py file: # download with wget wget https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py -O setup.py # download with curl curl -O https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py ``` To Do ----- - Tests via `$ setup.py test` (if it's concise). Pull requests are encouraged! More Resources -------------- - [What is setup.py?] on Stack Overflow - [Official Python Packaging User Guide](https://packaging.python.org) - [The Hitchhiker's Guide to Packaging] - [Cookiecutter template for a Python package] License ------- This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. [an example setup.py]: https://github.com/navdeep-G/setup.py/blob/master/setup.py [PyPi]: https://docs.python.org/3/distutils/packageindex.html [Twine]: https://pypi.python.org/pypi/twine [image]: https://farm1.staticflickr.com/628/33173824932_58add34581_k_d.jpg [What is setup.py?]: https://stackoverflow.com/questions/1471994/what-is-setup-py [The Hitchhiker's Guide to Packaging]: https://the-hitchhikers-guide-to-packaging.readthedocs.io/en/latest/creation.html [Cookiecutter template for a Python package]: https://github.com/audreyr/cookiecutter-pypackage
zop-cms
/zop-cms-0.1.0.tar.gz/zop-cms-0.1.0/README.md
README.md
📦 setup.py (for humans) ======================= This repo exists to provide [an example setup.py] file, that can be used to bootstrap your next Python project. It includes some advanced patterns and best practices for `setup.py`, as well as some commented–out nice–to–haves. For example, this `setup.py` provides a `$ python setup.py upload` command, which creates a *universal wheel* (and *sdist*) and uploads your package to [PyPi] using [Twine], without the need for an annoying `setup.cfg` file. It also creates/uploads a new git tag, automatically. In short, `setup.py` files can be daunting to approach, when first starting out — even Guido has been heard saying, "everyone cargo cults thems". It's true — so, I want this repo to be the best place to copy–paste from :) [Check out the example!][an example setup.py] Installation ----- ```bash cd your_project # Download the setup.py file: # download with wget wget https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py -O setup.py # download with curl curl -O https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py ``` To Do ----- - Tests via `$ setup.py test` (if it's concise). Pull requests are encouraged! More Resources -------------- - [What is setup.py?] on Stack Overflow - [Official Python Packaging User Guide](https://packaging.python.org) - [The Hitchhiker's Guide to Packaging] - [Cookiecutter template for a Python package] License ------- This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. [an example setup.py]: https://github.com/navdeep-G/setup.py/blob/master/setup.py [PyPi]: https://docs.python.org/3/distutils/packageindex.html [Twine]: https://pypi.python.org/pypi/twine [image]: https://farm1.staticflickr.com/628/33173824932_58add34581_k_d.jpg [What is setup.py?]: https://stackoverflow.com/questions/1471994/what-is-setup-py [The Hitchhiker's Guide to Packaging]: https://the-hitchhikers-guide-to-packaging.readthedocs.io/en/latest/creation.html [Cookiecutter template for a Python package]: https://github.com/audreyr/cookiecutter-pypackage
zop
/zop-0.1.0.tar.gz/zop-0.1.0/README.md
README.md
# Copyright 2014 Jean-Francois Paris # # This library is free software: you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation, either # version 3 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals, absolute_import, division import requests from lxml import html from decimal import Decimal from re import sub import time import random from collections import namedtuple from .log import logger zopa_url = "https://secure2.zopa.com/login" provision_fund_url = "https://www.zopa.com/lending/peer-to-peer-experts" user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:29.0) Gecko/20100101 Firefox/29.0" SafeGuardFund = namedtuple('SafeGuardFund', 'amount, estimated_default, coverage') def convert_to_decimal(num): """ convert strings to decimal.Decimal() objects, taking into account zopa formatting conventions :param num: a number as per formatted by rate setter website :return: decimal.Decimal() representation of num """ return Decimal(sub(r'[^\d\-.]', '', num.strip('£ \n\r'))) class ZopaException(Exception): pass class ZopaExceptionSiteChanged(ZopaException): pass class ZopaClient(object): def __init__(self, email, password, security_questions, natural=True): """Initialise the Zopa client The init method takes a dict that contains the answers to the security questions used by Zopa during the authentication process. The key of the dictionary are as follow _security_questions = { "FIRST_SCHOOL": "xxx", "LAST_SCHOOL": "xxx", "PLACE_OF_BIRTH": "xxx" } :param str email: email address for the account :param str password: password for the account :param dict security_questions: dict mapping the three expected security questions to their answers :param boolean natural: when true, the object behave naturally and pauses between requests """ self._email = email self._password = password self._security_questions = security_questions self._natural = natural self._connected = False self._dashboard_url = None # create an http session self._init_session() # if in natural mode, we initiate the random number generator if self._natural: random.seed() logger.debug("Created client for Zopa") def _get_http_helper(self): """Returns a helper function that allows lxml form processor to post using requests""" def helper(method, url, value): if not url: logger.error("Cannot submit request. No URL provided") raise ValueError("cannot submit, no URL provided") if method == 'GET': logger.debug("GET request URL: %s, Value: %s", url, value) return self._session.get(url, value) else: logger.debug("POST request URL: %s, Value: %s", url, value) return self._session.post(url, value) return helper def _sleep_if_needed(self): """Sleep for a random amount of time between 2 and 10 seconds This method is used to make our behaviour look more human and avoid overloading Zopa's server """ if self._natural: #if in natural mode we sleep for some time time.sleep(random.randint(2, 10)) def _extract_url(self, tree): """Extract and save the main urls This method shall be called once after connection in order to avoid having to seek for the URL at a later stage """ # noinspection PyAttributeOutsideInit self._exit_url = tree.cssselect(".signout a")[0].get("href") self._loanbook_url = tree.cssselect("#lending_my_loan_book a")[0].get("href") self._account_url = tree.cssselect("#lending_account a")[0].get("href") self._statement_url = "https://secure2.zopa.com/lending/statements" def _init_session(self): """Create a new http client """ # initiate the browser self._session = requests.Session() self._session.headers = {'User-agent': user_agent} self._session.verify = True def connect(self): """Connect the client from Zopa""" # create a new http session each time we attempt a new connection self._init_session() # pull zopaclient signup page logger.debug("GET request URL: %s", zopa_url) page = self._session.get(zopa_url) self._sleep_if_needed() # fill the signup form tree = html.fromstring(page.text, base_url=page.url) form = tree.forms[0] form.fields["email"] = self._email form.fields["password"] = self._password logger.debug("Submit form") page = html.submit_form(form, open_http=self._get_http_helper()) self._sleep_if_needed() # check if we have landed on the secret verification page url = page.url if not "login/confirm" in url: raise ZopaExceptionSiteChanged("Unexpected page") # fill the idea verification form tree = html.fromstring(page.text, base_url=page.url) form = tree.forms[0] form.fields["answer"] = self._security_questions[form.fields["question"]] logger.debug("Submit form") page = html.submit_form(form, open_http=self._get_http_helper()) self._sleep_if_needed() # check if we have landed on the dashboard page url = page.url if not "/dashboard" in url: raise ZopaExceptionSiteChanged("Unexpected page")() self._connected = True self._dashboard_url = url tree = html.fromstring(page.text, base_url=page.url) tree.make_links_absolute(page.url) self._extract_url(tree) def disconnect(self): """Disconnect the client from Zopa""" # call the logout url logger.debug("GET request URL: %s", self._exit_url) page = self._session.get(self._exit_url) url = page.url if not "/signed_out" in url: raise Exception("Failed to sign out") self._connected = False def get_loan_book(self): """Download and return the full loan book :return: loan book in csv format :rtype: str """ logger.debug("GET request URL: %s", self._loanbook_url) page = self._session.get(self._loanbook_url) self._sleep_if_needed() tree = html.fromstring(page.text, base_url=page.url) form = tree.forms[0] # submit the two following values through the extra_values parameters # as they are not part of the initial form values = {"_template$MainControl$Content$MyLoanBookControl$btnDownloadCSV.x": "132", "_template$MainControl$Content$MyLoanBookControl$btnDownloadCSV.y": "7"} logger.debug("Submit form") page = html.submit_form(form, extra_values=values, open_http=self._get_http_helper()) self._sleep_if_needed() return page.text def get_statement(self, year, month): """Download and return the monthly statement for a given period :param int year: year for which the statement is required :param int month: month within the year for which the statement is required :return: statement in csv format :rtype: str """ logger.debug("GET request URL: %s", self._statement_url) page = self._session.get(self._statement_url) self._sleep_if_needed() tree = html.fromstring(page.text, base_url=page.url) form = tree.forms[0] form.fields["date[month]"] = str(month) if type(month) == int else month form.fields["date[year]"] = str(year) if type(year) == int else year logger.debug("Submit form") page = html.submit_form(form, open_http=self._get_http_helper()) self._sleep_if_needed() return page.text def get_account_summary(self): """Get the account summary :return: summary of current account :rtype: dict """ logger.debug("GET request URL: %s", self._account_url) page = self._session.get(self._account_url) self._sleep_if_needed() results = {} tree = html.fromstring(page.text, base_url=page.url) summary_items = tree.cssselect(".result .important strong") ZopaAccount = namedtuple('ZopaAccount', """total_earnings, zopa_total, total_paid_in, total_paid_out, not_offered, fees_not_deducted offered, processing, processing_nb_loans, lent_out, lent_out_nb_loans, late_payment, late_payment_nb_loans, bad_debt, bad_debt_nb_loans, all_time_borrower_interest, all_time_holding_account_interest, all_time_bonuses, all_time_tell_a_friend, all_time_rapid_return_interest, all_time_rate_promise, all_time_total_lender_fees, all_time_bad_debt, all_time_lent_out, all_time_capital_returned""") total_earnings = convert_to_decimal(summary_items[0].text) zopa_total = convert_to_decimal(summary_items[1].text) total_paid_in = convert_to_decimal(summary_items[2].text) total_paid_out = convert_to_decimal(summary_items[3].text) summary_items = tree.cssselect(".lending-offers-summary td.number") not_offered = convert_to_decimal(summary_items[0].text) fees_not_deducted = convert_to_decimal(summary_items[2].text) offered = convert_to_decimal(summary_items[4].text) processing = convert_to_decimal(summary_items[6].text) processing_nb_loans = convert_to_decimal(summary_items[7].text) lent_out = convert_to_decimal(summary_items[8].text) lent_out_nb_loans = convert_to_decimal(summary_items[9].text) late_payment = convert_to_decimal(summary_items[10].text) late_payment_nb_loans= convert_to_decimal(summary_items[11].text) bad_debt = convert_to_decimal(summary_items[14].text) bad_debt_nb_loans = convert_to_decimal(summary_items[15].text) summary_items = tree.cssselect(".lending-offers-all-time-summary td.number") all_time_borrower_interest = convert_to_decimal(summary_items[0].text) all_time_holding_account_interest = convert_to_decimal(summary_items[1].text) all_time_bonuses = convert_to_decimal(summary_items[2].text) all_time_tell_a_friend = convert_to_decimal(summary_items[3].text) all_time_rapid_return_interest = convert_to_decimal(summary_items[4].text) all_time_rate_promise = convert_to_decimal(summary_items[5].text) all_time_total_lender_fees = convert_to_decimal(summary_items[6].text) all_time_bad_debt = convert_to_decimal(summary_items[7].text) all_time_lent_out = convert_to_decimal(summary_items[8].text) all_time_capital_returned = convert_to_decimal(summary_items[9].text) return ZopaAccount(total_earnings=total_earnings, zopa_total=zopa_total, total_paid_in=total_paid_in, total_paid_out=total_paid_out, not_offered=not_offered, fees_not_deducted=fees_not_deducted, offered=offered, processing=processing, processing_nb_loans=processing_nb_loans, lent_out=lent_out, lent_out_nb_loans=lent_out_nb_loans, late_payment=late_payment, late_payment_nb_loans=late_payment_nb_loans, bad_debt=bad_debt, bad_debt_nb_loans=bad_debt_nb_loans, all_time_borrower_interest=all_time_borrower_interest, all_time_holding_account_interest=all_time_holding_account_interest, all_time_bonuses=all_time_bonuses, all_time_tell_a_friend = all_time_tell_a_friend, all_time_rapid_return_interest=all_time_rapid_return_interest, all_time_rate_promise=all_time_rate_promise, all_time_total_lender_fees=all_time_total_lender_fees, all_time_bad_debt=all_time_bad_debt, all_time_lent_out=all_time_lent_out, all_time_capital_returned=all_time_capital_returned) def get_safeguard_fund(self): """ Get a summary of the amount in the safeguard fund This method does not require that the client is connected before you invoke it :return: namedtuple with the following keys: * amount: total amount in the safeguard fund * estimated_default: estimated default from outstanding loans * coverage: coverage ratio """ logger.debug("GET request URL: %s", provision_fund_url) page = self._session.get(provision_fund_url) self._sleep_if_needed() tree = html.fromstring(page.text, base_url=page.url) td = tree.xpath('.//div[@id = "reducing-risk"]/descendant::td[@class = "number"]') amount = convert_to_decimal(td[0].text) estimated_default = convert_to_decimal(td[1].text) coverage = amount / estimated_default return SafeGuardFund(amount = amount, estimated_default = estimated_default, coverage = coverage)
zopaClient
/zopaClient-2.0.2.tar.gz/zopaClient-2.0.2/zopaclient/api.py
api.py
import os, shutil, sys, tempfile from optparse import OptionParser tmpeggs = tempfile.mkdtemp() usage = '''\ [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] Bootstraps a buildout-based project. Simply run this script in a directory containing a buildout.cfg, using the Python that you want bin/buildout to use. Note that by using --setup-source and --download-base to point to local resources, you can keep this script from going over the network. ''' parser = OptionParser(usage=usage) parser.add_option("-v", "--version", help="use a specific zc.buildout version") parser.add_option("-t", "--accept-buildout-test-releases", dest='accept_buildout_test_releases', action="store_true", default=False, help=("Normally, if you do not specify a --version, the " "bootstrap script and buildout gets the newest " "*final* versions of zc.buildout and its recipes and " "extensions for you. If you use this flag, " "bootstrap and buildout will get the newest releases " "even if they are alphas or betas.")) parser.add_option("-c", "--config-file", help=("Specify the path to the buildout configuration " "file to be used.")) parser.add_option("-f", "--find-links", help=("Specify a URL to search for buildout releases")) options, args = parser.parse_args() ###################################################################### # load/install distribute to_reload = False try: import pkg_resources, setuptools if not hasattr(pkg_resources, '_distribute'): to_reload = True raise ImportError except ImportError: ez = {} try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen exec(urlopen('http://python-distribute.org/distribute_setup.py').read(), ez) setup_args = dict(to_dir=tmpeggs, download_delay=0, no_fake=True) ez['use_setuptools'](**setup_args) if to_reload: reload(pkg_resources) import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout ws = pkg_resources.working_set cmd = [sys.executable, '-c', 'from setuptools.command.easy_install import main; main()', '-mZqNxd', tmpeggs] find_links = os.environ.get( 'bootstrap-testing-find-links', options.find_links or ('http://downloads.buildout.org/' if options.accept_buildout_test_releases else None) ) if find_links: cmd.extend(['-f', find_links]) distribute_path = ws.find( pkg_resources.Requirement.parse('distribute')).location requirement = 'zc.buildout' version = options.version if version is None and not options.accept_buildout_test_releases: # Figure out the most recent final version of zc.buildout. import setuptools.package_index _final_parts = '*final-', '*final' def _final_version(parsed_version): for part in parsed_version: if (part[:1] == '*') and (part not in _final_parts): return False return True index = setuptools.package_index.PackageIndex( search_path=[distribute_path]) if find_links: index.add_find_links((find_links,)) req = pkg_resources.Requirement.parse(requirement) if index.obtain(req) is not None: best = [] bestv = None for dist in index[req.project_name]: distv = dist.parsed_version if _final_version(distv): if bestv is None or distv > bestv: best = [dist] bestv = distv elif distv == bestv: best.append(dist) if best: best.sort() version = best[-1].version if version: requirement = '=='.join((requirement, version)) cmd.append(requirement) import subprocess if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=distribute_path)) != 0: raise Exception( "Failed to execute command:\n%s", repr(cmd)[1:-1]) ###################################################################### # Import and run buildout ws.add_entry(tmpeggs) ws.require(requirement) import zc.buildout.buildout if not [a for a in args if '=' not in a]: args.append('bootstrap') # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args[0:0] = ['-c', options.config_file] zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs)
zopache.pagetemplate
/zopache.pagetemplate-0.1.0.tar.gz/zopache.pagetemplate-0.1.0/bootstrap.py
bootstrap.py
"""TTW PageTemplate """ from persistent import Persistent from zope.security.proxy import ProxyFactory from zope.interface import implements from zope.pagetemplate.pagetemplate import PageTemplate from zope.size.interfaces import ISized from zope.publisher.browser import BrowserView from zope.traversing.api import getPath from zope.pagetemplate.engine import AppPT from zope.container.contained import Contained from zope.app.publication.interfaces import IFileContent from zopache.pagetemplate.interfaces import \ IZopachePageTemplate, IRenderZopachePageTemplate class ZopachePageTemplate(AppPT, PageTemplate, Persistent, Contained): implements(IZopachePageTemplate, IRenderZopachePageTemplate, IFileContent) expand = False evaluateInlineCode = False def getSource(self, request=None): return self.read(request) def setSource(self, text, content_type='text/html'): if not isinstance(text, unicode): raise TypeError("source text must be Unicode" , text) self.pt_edit(text, content_type) source = property(getSource, setSource) def pt_getEngineContext(self, namespace): context = self.pt_getEngine().getContext(namespace) context.evaluateInlineCode = self.evaluateInlineCode return context def pt_getContext(self, instance, request, **_kw): # instance is a View component namespace = super(ZopachePageTemplate, self).pt_getContext(**_kw) namespace['template'] = self namespace['request'] = request namespace['container'] = namespace['context'] = instance return namespace def pt_source_file(self): try: return getPath(self) except TypeError: return None def render(self, request, *args, **keywords): instance = self.__parent__ debug_flags = request.debug request = ProxyFactory(request) instance = ProxyFactory(instance) if args: args = ProxyFactory(args) kw = ProxyFactory(keywords) namespace = self.pt_getContext(instance, request, args=args, options=kw) return self.pt_render(namespace, showtal=debug_flags.showTAL, sourceAnnotations=debug_flags.sourceAnnotations) class Sized(object): implements(ISized) def __init__(self, page): self.num_lines = len(page.getSource().splitlines()) def sizeForSorting(self): 'See ISized' return ('line', self.num_lines) def sizeForDisplay(self): 'See ISized' if self.num_lines == 1: return u'1 line' return u'%s lines' % str(self.num_lines)
zopache.pagetemplate
/zopache.pagetemplate-0.1.0.tar.gz/zopache.pagetemplate-0.1.0/src/zopache/pagetemplate/pagetemplate.py
pagetemplate.py
import os, shutil, sys, tempfile from optparse import OptionParser tmpeggs = tempfile.mkdtemp() usage = '''\ [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] Bootstraps a buildout-based project. Simply run this script in a directory containing a buildout.cfg, using the Python that you want bin/buildout to use. Note that by using --setup-source and --download-base to point to local resources, you can keep this script from going over the network. ''' parser = OptionParser(usage=usage) parser.add_option("-v", "--version", help="use a specific zc.buildout version") parser.add_option("-t", "--accept-buildout-test-releases", dest='accept_buildout_test_releases', action="store_true", default=False, help=("Normally, if you do not specify a --version, the " "bootstrap script and buildout gets the newest " "*final* versions of zc.buildout and its recipes and " "extensions for you. If you use this flag, " "bootstrap and buildout will get the newest releases " "even if they are alphas or betas.")) parser.add_option("-c", "--config-file", help=("Specify the path to the buildout configuration " "file to be used.")) parser.add_option("-f", "--find-links", help=("Specify a URL to search for buildout releases")) options, args = parser.parse_args() ###################################################################### # load/install distribute to_reload = False try: import pkg_resources, setuptools if not hasattr(pkg_resources, '_distribute'): to_reload = True raise ImportError except ImportError: ez = {} try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen exec(urlopen('http://python-distribute.org/distribute_setup.py').read(), ez) setup_args = dict(to_dir=tmpeggs, download_delay=0, no_fake=True) ez['use_setuptools'](**setup_args) if to_reload: reload(pkg_resources) import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout ws = pkg_resources.working_set cmd = [sys.executable, '-c', 'from setuptools.command.easy_install import main; main()', '-mZqNxd', tmpeggs] find_links = os.environ.get( 'bootstrap-testing-find-links', options.find_links or ('http://downloads.buildout.org/' if options.accept_buildout_test_releases else None) ) if find_links: cmd.extend(['-f', find_links]) distribute_path = ws.find( pkg_resources.Requirement.parse('distribute')).location requirement = 'zc.buildout' version = options.version if version is None and not options.accept_buildout_test_releases: # Figure out the most recent final version of zc.buildout. import setuptools.package_index _final_parts = '*final-', '*final' def _final_version(parsed_version): for part in parsed_version: if (part[:1] == '*') and (part not in _final_parts): return False return True index = setuptools.package_index.PackageIndex( search_path=[distribute_path]) if find_links: index.add_find_links((find_links,)) req = pkg_resources.Requirement.parse(requirement) if index.obtain(req) is not None: best = [] bestv = None for dist in index[req.project_name]: distv = dist.parsed_version if _final_version(distv): if bestv is None or distv > bestv: best = [dist] bestv = distv elif distv == bestv: best.append(dist) if best: best.sort() version = best[-1].version if version: requirement = '=='.join((requirement, version)) cmd.append(requirement) import subprocess if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=distribute_path)) != 0: raise Exception( "Failed to execute command:\n%s", repr(cmd)[1:-1]) ###################################################################### # Import and run buildout ws.add_entry(tmpeggs) ws.require(requirement) import zc.buildout.buildout if not [a for a in args if '=' not in a]: args.append('bootstrap') # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args[0:0] = ['-c', options.config_file] zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs)
zopache.pythonscript
/zopache.pythonscript-0.1.0.zip/zopache.pythonscript-0.1.0/bootstrap.py
bootstrap.py
"""TTW Python Script """ import types from persistent import Persistent from cStringIO import StringIO from zope.interface import implements from zope.schema.fieldproperty import FieldProperty from zope.security.untrustedpython import interpreter from zope.size.interfaces import ISized from zope.container.contained import Contained from zope.app.publication.interfaces import IFileContent from zopache.pythonscript.interfaces import IPythonScript SOURCE_TEMPLATE = '''\ def %s(%s): %s ''' def evalModule(source): """Evaluate module and return a pair: collected symbols and execution output. """ module = types.ModuleType('pythonscript', "TTW Python Script") if not source: return module prog = interpreter.CompiledProgram(source) f = StringIO() # Collect a few more builtins that the untrusted Python interpreter does # not declare. ns = {'sum': sum} prog.exec_(ns, output=f) module.__dict__.update(ns) return module class PythonScript(Persistent, Contained): implements(IPythonScript, IFileContent) _v_module = None _v_sourceHash = None signature = FieldProperty(IPythonScript['signature']) source = FieldProperty(IPythonScript['source']) def getModule(self): source = SOURCE_TEMPLATE %( self.__name__, self.signature, self.source.replace('\n', '\n ')) if self._v_module is None or self._v_sourceHash != hash(source): self._v_module = evalModule(source) self._v_sourceHash = hash(source) return self._v_module def __call__(self, *args, **kw): module = self.getModule() return getattr(module, self.__name__)(*args, **kw) class Sized(object): implements(ISized) def __init__(self, script): self.num_lines = len(script.source.splitlines()) def sizeForSorting(self): 'See ISized' return ('line', self.num_lines) def sizeForDisplay(self): 'See ISized' if self.num_lines == 1: return u'1 line' return u'%s lines' % str(self.num_lines)
zopache.pythonscript
/zopache.pythonscript-0.1.0.zip/zopache.pythonscript-0.1.0/src/zopache/pythonscript/pythonscript.py
pythonscript.py
========= Changes ========= 5.0 (2023-03-27) ================ - Add support for Python 3.11. - Drop support for Python 2.7, 3.5, 3.6. 4.8 (2022-09-06) ================ - Add support for Python 3.8, 3.9, 3.10. - Drop support for Python 3.4. 4.7.0 (2018-10-16) ================== - Add support for Python 3.7 and drop support for Python 3.3. - Fix a DeprecationWarning from ``zope.annotation.attribute``. See `issue 16 <https://github.com/zopefoundation/zope.annotation/issues/16>`_. 4.6.0 (2017-09-22) ================== - Make ``AttributeAnnotations`` have a ``__parent__``. The ``__parent__`` is the object that it stores ``__annotations__`` on. This is a convenience for upwards traversal as used by things like ``zope.keyreference``. See https://github.com/zopefoundation/zope.annotation/issues/11 4.5 (2017-06-03) ================ - Drop support for Python 2.6. - Claim support for Python 3.5 and 3.6. - Reach 100% test coverage. - ``AttributeAnnotations`` is now always a ``collections.MutableMapping``. Previously on Python 2 it was a ``UserDict.DictMixin``. 4.4.1 (2015-01-09) ================== - Convert doctests to Sphinx documentation. Doctest snippets are still tested via ``tox -e docs``. 4.4.0 (2015-01-09) ================== - LP #98462: add additional "iterable mapping" methods to ``IAnnotations``. - LP #878265: - Make ``persistent`` (used only for doctests) a soft dependency, installable via the ``zope.annotation[btree]`` extra. - Make ``BTrees`` (used for attribute storage) a soft dependency, installable via the ``zope.annotation[btree]`` extra. Fall back to using ``dict`` for attribute storage if ``BTrees`` is not importable. 4.3.0 (2014-12-26) ================== - Add support for Python 3.4. 4.2.0 (2013-03-18) ================== - Don't make AttributeAnnotations available as a view. 4.1.0 (2013-02-24) ================== - Add ``__bool__`` method to ``IAnnotations`` API for Python 3 compatibility. 4.0.1 (2013-02-11) ================== - Add `tox.ini`. 4.0.0 (2013-02-11) ================== - Add support for Python 3.3 and PyPy. - Replace deprecated ``zope.component.adapts`` usage with equivalent ``zope.component.adapter`` decorator. - Replace deprecated ``zope.interface.implements`` usage with equivalent ``zope.interface.implementer`` decorator. - Drop support for Python 2.4 and 2.5. - Include zcml dependencies in configure.zcml, require the necessary packages via a zcml extra, added tests for zcml. 3.5.0 (2009-09-07) ================== - Add ZODB3 to install_requires, because it's a true requirement of this package, not just a testing requirement, as BTrees are in use. - Fix one test that was inactive because it's function was overriden by a mistake. 3.4.2 (2009-03-09) ================== - Clean up package description and documentation a bit. - Change mailing list address to zope-dev at zope.org, as zope3-dev at zope.org is now retired. - Remove old zpkg-related files. 3.4.1 (2008-08-26) ================== - Annotation factories take care not to store proxies in the database, so adapting an object wrapped in a ``LocationProxy`` works correctly. Fixes https://bugs.launchpad.net/zope3/+bug/261620 3.4.0 (2007-08-29) ================== - Annotation factories are no longer containing the factored object. Instead the objects are located using ``zope.location``. This removes a dependency to ``zope.app.container``.
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/CHANGES.rst
CHANGES.rst
``zope.annotation`` README ========================== .. image:: https://img.shields.io/pypi/v/zope.annotation.svg :target: https://pypi.python.org/pypi/zope.annotation/ :alt: Latest Version .. image:: https://github.com/zopefoundation/zope.annotation/actions/workflows/tests.yml/badge.svg :target: https://github.com/zopefoundation/zope.annotation/actions/workflows/tests.yml .. image:: https://readthedocs.org/projects/zopeannotation/badge/?version=latest :target: http://zopeannotation.readthedocs.org/en/latest/ :alt: Documentation Status This package provides a mechanism to store additional information about objects without need to modify object class.
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/README.rst
README.rst
__docformat__ = 'restructuredtext' from zope.interface import Interface class IAnnotatable(Interface): """Marker interface for objects that support storing annotations. This interface says "There exists an adapter to an IAnnotations for an object that implements `IAnnotatable`". Classes should not directly declare that they implement this interface. Instead they should implement an interface derived from this one, which details how the annotations are to be stored, such as `IAttributeAnnotatable`. """ class IAnnotations(IAnnotatable): """Stores arbitrary application data under package-unique keys. By "package-unique keys", we mean keys that are are unique by virtue of including the dotted name of a package as a prefix. A package name is used to limit the authority for picking names for a package to the people using that package. For example, when implementing annotations for storing Zope Dublin-Core meta-data, we use the key:: "zope.app.dublincore.ZopeDublinCore" """ def __bool__(): """Test whether there are any annotations.""" def __getitem__(key): """Return the annotation stored under key. Raises KeyError if key not found. """ def get(key, default=None): """Return the annotation stored under key, or default if not found. """ def __setitem__(key, value): """Store annotation under key. In order to avoid key collisions, users of this interface must use their dotted package name as part of the key name. """ def __delitem__(key): """Removes the annotation stored under key. Raises a KeyError if the key is not found. """ def __iter__(): """Return an iterator for the keys in the container. """ def __contains__(key): """Return True if 'key' is in the container, else False. """ def items(): """Return '(key, value)' pairs for the keys in the container. """ class IAttributeAnnotatable(IAnnotatable): """Marker indicating that annotations can be stored on an attribute. This is a marker interface giving permission for an `IAnnotations` adapter to store data in an attribute named `__annotations__`. """
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/src/zope/annotation/interfaces.py
interfaces.py
"""Attribute Annotations implementation""" import logging from collections.abc import MutableMapping as DictMixin try: from BTrees.OOBTree import OOBTree as _STORAGE except ImportError: # pragma: no cover logging.getLogger(__name__).warning( 'BTrees not available: falling back to dict for attribute storage') _STORAGE = dict from zope import component from zope import interface from zope.annotation import interfaces _EMPTY_STORAGE = _STORAGE() @interface.implementer(interfaces.IAnnotations) @component.adapter(interfaces.IAttributeAnnotatable) class AttributeAnnotations(DictMixin): """Store annotations on an object Store annotations in the `__annotations__` attribute on a `IAttributeAnnotatable` object. """ # Yes, there's a lot of repetition of the `getattr` call, # but that turns out to be the most efficient for the ways # instances are typically used without sacrificing any semantics. # See https://github.com/zopefoundation/zope.annotation/issues/8 # for a discussion of alternatives (which included functools.partial, # a closure, capturing the annotations in __init__, and versions # with getattr and exceptions). def __init__(self, obj, context=None): self.obj = obj @property def __parent__(self): return self.obj def __bool__(self): return bool(getattr(self.obj, '__annotations__', 0)) def get(self, key, default=None): """See zope.annotation.interfaces.IAnnotations""" annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE) return annotations.get(key, default) def __getitem__(self, key): annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE) return annotations[key] def keys(self): annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE) return annotations.keys() def __iter__(self): annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE) return iter(annotations) def __len__(self): annotations = getattr(self.obj, '__annotations__', _EMPTY_STORAGE) return len(annotations) def __setitem__(self, key, value): """See zope.annotation.interfaces.IAnnotations""" try: annotations = self.obj.__annotations__ except AttributeError: annotations = self.obj.__annotations__ = _STORAGE() annotations[key] = value def __delitem__(self, key): """See zope.app.interfaces.annotation.IAnnotations""" try: annotation = self.obj.__annotations__ except AttributeError: raise KeyError(key) del annotation[key]
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/src/zope/annotation/attribute.py
attribute.py
:mod:`zope.annotation` API ========================== Framework Interfaces -------------------- These interfaces define the source and targets for adaptation under the :mod:`zope.annotation` framework: .. automodule:: zope.annotation.interfaces Attribute-Based Annotations --------------------------- The default adapter implementation uses a special attribute, ``__annotations__``, on the annotated object: .. autoclass:: zope.annotation.attribute.AttributeAnnotations Because setting an attribute is somewhat intrusive (as opposed to storing annotations elsewhere), this adapter requires that its context implment :class:`zope.annotation.interfaces.IAttributeAnnotatable` to signal that this attribute can be used. Factories --------- .. automodule:: zope.annotation.factory
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/docs/api.rst
api.rst
Hacking on :mod:`zope.annotation` ================================= Getting the Code ################ The main repository for :mod:`zope.annotation` is in the Zope Foundation Github repository: https://github.com/zopefoundation/zope.annotation You can get a read-only checkout from there: .. code-block:: sh $ git clone https://github.com/zopefoundation/zope.annotation.git or fork it and get a writeable checkout of your fork: .. code-block:: sh $ git clone [email protected]/jrandom/zope.annotation.git The project also mirrors the trunk from the Github repository as a Bazaar branch on Launchpad: https://code.launchpad.net/zope.annotation You can branch the trunk from there using Bazaar: .. code-block:: sh $ bzr branch lp:zope.annotation Working in a ``virtualenv`` ########################### Installing ---------- If you use the ``virtualenv`` package to create lightweight Python development environments, you can work with your checkout using a virtualenv. First, create the virtualenv: .. code-block:: sh $ /path/to/virtualenv --no-site-packages /tmp/hack-zope.annotation Next, get this package registered as a "development egg" in the environment: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/python setup.py develop Running the tests ----------------- Run the tests using the build-in ``setuptools`` testrunner: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/python setup.py -q test -q running test ............ ---------------------------------------------------------------------- Ran 11 tests in 0.000s OK The ``dev`` command alias downloads and installs extra tools, like the :mod:`nose` testrunner and the :mod:`coverage` coverage analyzer: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/python setup.py dev $ /tmp/hack-zope.annotation/bin/nosetests running nosetests .......... ---------------------------------------------------------------------- Ran 12 tests in 0.000s OK If you have the :mod:`coverage` pacakge installed in the virtualenv, you can see how well the tests cover the code: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/nosetests --with coverage ............ Name Stmts Miss Cover Missing ---------------------------------------------------------- zope.annotation 4 0 100% zope.annotation.attribute 59 0 100% zope.annotation.factory 28 0 100% zope.annotation.interfaces 15 0 100% ---------------------------------------------------------- TOTAL 106 35 67% ---------------------------------------------------------------------- Ran 12 tests in 2.166s OK Building the documentation -------------------------- :mod:`zope.annotation` uses the nifty :mod:`Sphinx` documentation system for building its docs. Using the same virtualenv you set up to run the tests, you can build the docs: The ``docs`` command alias downloads and installs Sphinx and its dependencies: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/python setup.py docs ... $ /tmp/hack-zope.annotation/bin/sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html ... build succeeded. You can also test the code snippets in the documentation: .. code-block:: sh $ /tmp/hack-zope.annotation/bin/sphinx-build -b doctest -d docs/_build/doctrees docs docs/_build/doctest ... running tests... Document: narrative ------------------- 1 items passed all tests: 54 tests in default 54 tests in 1 items. 54 passed and 0 failed. Test passed. Doctest summary =============== 54 tests 0 failures in tests 0 failures in setup code build succeeded. Using :mod:`zc.buildout` ######################## Setting up the buildout ----------------------- :mod:`zope.annotation` ships with its own :file:`buildout.cfg` file and :file:`bootstrap.py` for setting up a development buildout: .. code-block:: sh $ /path/to/python2.7 bootstrap.py ... Generated script '.../bin/buildout' $ bin/buildout Develop: '/path/to/annotation/.' ... Generated script '.../bin/sphinx-quickstart'. Generated script '.../bin/sphinx-build'. Running the tests ----------------- You can now run the tests: .. code-block:: sh $ bin/test --all Running zope.testing.testrunner.layer.UnitTests tests: Set up zope.testing.testrunner.layer.UnitTests in 0.000 seconds. Ran 11 tests with 0 failures and 0 errors in 0.000 seconds. Tearing down left over layers: Tear down zope.testing.testrunner.layer.UnitTests in 0.000 seconds. Building the documentation -------------------------- The :mod:`zope.annotation` buildout installs the Sphinx scripts required to build the documentation, including testing its code snippets: .. code-block:: sh $ cd docs $ PATH=../bin:$PATH make doctest html sphinx-build -b doctest -d _build/doctrees . _build/doctest running tests... Document: narrative ------------------- 1 items passed all tests: 54 tests in default 54 tests in 1 items. 54 passed and 0 failed. Test passed. Doctest summary =============== 54 tests 0 failures in tests 0 failures in setup code build succeeded. Testing of doctests in the sources finished, look at the results in _build/doctest/output.txt. sphinx-build -b html -d _build/doctrees . _build/html ... build succeeded. Build finished. The HTML pages are in docs/_build/html. Using :mod:`tox` ################ Running Tests on Multiple Python Versions ----------------------------------------- `tox <http://tox.testrun.org/latest/>`_ is a Python-based test automation tool designed to run tests against multiple Python versions. It creates a ``virtualenv`` for each configured version, installs the current package and configured dependencies into each ``virtualenv``, and then runs the configured commands. :mod:`zope.annotation` configures the following :mod:`tox` environments via its ``tox.ini`` file: - The ``py26``, ``py27``, ``py33``, ``py34``, and ``pypy`` environments builds a ``virtualenv`` with ``pypy``, installs :mod:`zope.annotation` and dependencies, and runs the tests via ``python setup.py test -q``. - The ``nobtree`` environment builds a ``virtualenv`` with Python 2.7, installs :mod:`zope.annotation` and its minimal dependencies (no ``persistent`` or ``BTrees``), and runs the tests via ``python setup.py test -q``. - The ``coverage`` environment builds a ``virtualenv`` with ``python2.7``, installs :mod:`zope.annotation` and dependencies, installs :mod:`nose` and :mod:`coverage`, and runs ``nosetests`` with statement coverage. - The ``docs`` environment builds a virtualenv with ``python2.7``, installs :mod:`zope.annotation` and dependencies, installs ``Sphinx`` and dependencies, and then builds the docs and exercises the doctest snippets. This example requires that you have a working ``python2.6`` on your path, as well as installing ``tox``: .. code-block:: sh $ tox -e py26 GLOB sdist-make: .../zope.annotation/setup.py py26 sdist-reinst: .../zope.annotation/.tox/dist/zope.annotation-4.x.ydev.zip py26 runtests: commands[0] ... ---------------------------------------------------------------------- Ran 11 tests in 0.000s OK ___________________________________ summary ____________________________________ py26: commands succeeded congratulations :) Running ``tox`` with no arguments runs all the configured environments, including building the docs and testing their snippets: .. code-block:: sh $ tox GLOB sdist-make: .../zope.annotation/setup.py py26 sdist-reinst: .../zope.annotation/.tox/dist/zope.annotation-4.0.2dev.zip py26 runtests: commands[0] ... Doctest summary =============== 54 tests 0 failures in tests 0 failures in setup code 0 failures in cleanup code build succeeded. ___________________________________ summary ____________________________________ py26: commands succeeded py27: commands succeeded py33: commands succeeded py34: commands succeeded pypy: commands succeeded nobtree: commands succeeded coverage: commands succeeded docs: commands succeeded congratulations :) Contributing to :mod:`zope.annotation` ###################################### Submitting a Bug Report ----------------------- :mod:`zope.annotation` tracks its bugs on Github: https://github.com/zopefoundation/zope.annotation/issues Please submit bug reports and feature requests there. Sharing Your Changes -------------------- .. note:: Please ensure that all tests are passing before you submit your code. If possible, your submission should include new tests for new features or bug fixes, although it is possible that you may have tested your new code by updating existing tests. If have made a change you would like to share, the best route is to fork the Githb repository, check out your fork, make your changes on a branch in your fork, and push it. You can then submit a pull request from your branch: https://github.com/zopefoundation/zope.annotation/pulls If you branched the code from Launchpad using Bazaar, you have another option: you can "push" your branch to Launchpad: .. code-block:: sh $ bzr push lp:~tseaver/zope.annotation/cool_feature After pushing your branch, you can link it to a bug report on Github, or request that the maintainers merge your branch using the Launchpad "merge request" feature.
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/docs/hacking.rst
hacking.rst
Object Annotations ================== Annotation factories -------------------- There is more to document about annotations, but we'll just sketch out a scenario on how to use the annotation factory for now. This is one of the easiest ways to use annotations -- basically you can see them as persistent, writable adapters. .. testsetup:: from zope.testing import cleanup from zope.component import provideAdapter from zope.annotation.attribute import AttributeAnnotations cleanup.setUp() provideAdapter(AttributeAnnotations) First, let's make a persistent object we can create annotations for: .. doctest:: >>> from zope.interface import Interface >>> from zope.interface import implementer >>> class IFoo(Interface): ... pass >>> from zope.annotation.interfaces import IAttributeAnnotatable >>> @implementer(IFoo, IAttributeAnnotatable) ... class Foo(object): ... pass We directly say that :class:`Foo` implements :class:`~zope.annotation.interfacesIAttributeAnnotatable` here. In practice this is often done in ZCML, using the ``implements`` subdirective of the ``content`` or ``class`` directive. Now let's create an annotation for this: .. doctest:: >>> from zope.component import adapts >>> from zope.interface import Attribute >>> class IBar(Interface): ... a = Attribute('A') ... b = Attribute('B') >>> from zope import component >>> @implementer(IBar) ... class Bar(object): ... adapts(IFoo) ... def __init__(self): ... self.a = 1 ... self.b = 2 Note that the annotation implementation does not expect any arguments to its ``__init__``. Otherwise it's basically an adapter. Now, we'll register the annotation as an adapter. To do this we use the :func:`~.factory` function provided by ``zope.annotation``: .. doctest:: >>> from zope.component import provideAdapter >>> from zope.annotation import factory >>> provideAdapter(factory(Bar)) >>> from zope.component import provideAdapter >>> from zope.annotation.attribute import AttributeAnnotations >>> provideAdapter(AttributeAnnotations) Note that we do not need to specify what the adapter provides or what it adapts - we already do this on the annotation class itself. Now let's make an instance of ``Foo``, and make an annotation for it. .. doctest:: >>> foo = Foo() >>> bar = IBar(foo) >>> bar.a 1 >>> bar.b 2 We'll change ``a`` and get the annotation again. Our change is still there: .. doctest:: >>> bar.a = 3 >>> IBar(foo).a 3 Of course it's still different for another instance of ``Foo``: .. doctest:: >>> foo2 = Foo() >>> IBar(foo2).a 1 What if our annotation does not provide what it adapts with ``adapts``? It will complain: .. doctest:: >>> class IQux(Interface): ... pass >>> @implementer(IQux) ... class Qux(object): ... pass >>> provideAdapter(factory(Qux)) # doctest: +ELLIPSIS Traceback (most recent call last): ... TypeError: Missing 'zope.component.adapts' on annotation It's possible to provide an annotation with an explicit key. (If the key is not supplied, the key is deduced from the annotation's dotted name, provided it is a class.) .. doctest:: >>> class IHoi(Interface): ... pass >>> @implementer(IHoi) ... class Hoi(object): ... adapts(IFoo) >>> provideAdapter(factory(Hoi, 'my.unique.key')) >>> isinstance(IHoi(foo), Hoi) True Location -------- Annotation factories are put into the location hierarchy with their parent pointing to the annotated object and the name to the dotted name of the annotation's class (or the name the adapter was registered under): .. doctest:: >>> foo3 = Foo() >>> new_hoi = IHoi(foo3) >>> new_hoi.__parent__ <Foo object at 0x...> >>> new_hoi.__name__ 'my.unique.key' >>> import zope.location.interfaces >>> zope.location.interfaces.ILocation.providedBy(new_hoi) True Please notice, that our Hoi object does not implement ILocation, so a location proxy will be used. This has to be re-established every time we retrieve the object (Guard against former bug: proxy wasn't established when the annotation existed already.) .. doctest:: >>> old_hoi = IHoi(foo3) >>> old_hoi.__parent__ <Foo object at 0x...> >>> old_hoi.__name__ 'my.unique.key' >>> zope.location.interfaces.ILocation.providedBy(old_hoi) True LocationProxies --------------- Suppose your annotation proxy provides ILocation. .. doctest:: >>> class IPolloi(Interface): ... pass >>> @implementer(IPolloi, zope.location.interfaces.ILocation) ... class Polloi(object): ... adapts(IFoo) ... __name__ = __parent__ = 0 >>> provideAdapter(factory(Polloi, 'my.other.key')) Sometimes you're adapting an object wrapped in a LocationProxy. .. doctest:: >>> foo4 = Foo() >>> import zope.location.location >>> wrapped_foo4 = zope.location.location.LocationProxy(foo4, None, 'foo4') >>> located_polloi = IPolloi(wrapped_foo4) At first glance it looks as if located_polloi is located under wrapped_foo4. .. doctest:: >>> located_polloi.__parent__ is wrapped_foo4 True >>> located_polloi.__name__ 'my.other.key' but that's because we received a LocationProxy .. doctest:: >>> type(located_polloi).__name__ 'LocationProxy' If we unwrap located_polloi and look at it directly, we'll see it stores a reference to the real Foo object .. doctest:: >>> from zope.proxy import removeAllProxies >>> removeAllProxies(located_polloi).__parent__ == foo4 True >>> removeAllProxies(located_polloi).__name__ 'my.other.key' .. testcleanup:: from zope.testing import cleanup cleanup.tearDown()
zope.annotation
/zope.annotation-5.0.tar.gz/zope.annotation-5.0/docs/narrative.rst
narrative.rst
import os, shutil, sys, tempfile from optparse import OptionParser tmpeggs = tempfile.mkdtemp() usage = '''\ [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] Bootstraps a buildout-based project. Simply run this script in a directory containing a buildout.cfg, using the Python that you want bin/buildout to use. Note that by using --setup-source and --download-base to point to local resources, you can keep this script from going over the network. ''' parser = OptionParser(usage=usage) parser.add_option("-v", "--version", help="use a specific zc.buildout version") parser.add_option("-t", "--accept-buildout-test-releases", dest='accept_buildout_test_releases', action="store_true", default=False, help=("Normally, if you do not specify a --version, the " "bootstrap script and buildout gets the newest " "*final* versions of zc.buildout and its recipes and " "extensions for you. If you use this flag, " "bootstrap and buildout will get the newest releases " "even if they are alphas or betas.")) parser.add_option("-c", "--config-file", help=("Specify the path to the buildout configuration " "file to be used.")) parser.add_option("-f", "--find-links", help=("Specify a URL to search for buildout releases")) options, args = parser.parse_args() ###################################################################### # load/install distribute to_reload = False try: import pkg_resources, setuptools if not hasattr(pkg_resources, '_distribute'): to_reload = True raise ImportError except ImportError: ez = {} try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen exec(urlopen('http://python-distribute.org/distribute_setup.py').read(), ez) setup_args = dict(to_dir=tmpeggs, download_delay=0, no_fake=True) ez['use_setuptools'](**setup_args) if to_reload: reload(pkg_resources) import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout ws = pkg_resources.working_set cmd = [sys.executable, '-c', 'from setuptools.command.easy_install import main; main()', '-mZqNxd', tmpeggs] find_links = os.environ.get( 'bootstrap-testing-find-links', options.find_links or ('http://downloads.buildout.org/' if options.accept_buildout_test_releases else None) ) if find_links: cmd.extend(['-f', find_links]) distribute_path = ws.find( pkg_resources.Requirement.parse('distribute')).location requirement = 'zc.buildout' version = options.version if version is None and not options.accept_buildout_test_releases: # Figure out the most recent final version of zc.buildout. import setuptools.package_index _final_parts = '*final-', '*final' def _final_version(parsed_version): for part in parsed_version: if (part[:1] == '*') and (part not in _final_parts): return False return True index = setuptools.package_index.PackageIndex( search_path=[distribute_path]) if find_links: index.add_find_links((find_links,)) req = pkg_resources.Requirement.parse(requirement) if index.obtain(req) is not None: best = [] bestv = None for dist in index[req.project_name]: distv = dist.parsed_version if _final_version(distv): if bestv is None or distv > bestv: best = [dist] bestv = distv elif distv == bestv: best.append(dist) if best: best.sort() version = best[-1].version if version: requirement = '=='.join((requirement, version)) cmd.append(requirement) import subprocess if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=distribute_path)) != 0: raise Exception( "Failed to execute command:\n%s", repr(cmd)[1:-1]) ###################################################################### # Import and run buildout ws.add_entry(tmpeggs) ws.require(requirement) import zc.buildout.buildout if not [a for a in args if '=' not in a]: args.append('bootstrap') # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args[0:0] = ['-c', options.config_file] zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs)
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/bootstrap.py
bootstrap.py
"""Component Inspection Utilities """ import base64 import six import types import zope.interface.declarations from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory from zope.component.registry import ( AdapterRegistration, HandlerRegistration, SubscriptionRegistration, UtilityRegistration) from zope.i18nmessageid import ZopeMessageFactory as _ from zope.interface import Interface from zope.interface.interface import InterfaceClass from zope.publisher.interfaces import IRequest from zope.apidoc._compat import unicode from zope.apidoc.classregistry import classRegistry from zope.apidoc.utilities import relativizePath, truncateSysPath from zope.apidoc.utilities import getPythonPath, isReferencable, renderText SPECIFIC_INTERFACE_LEVEL = 1 EXTENDED_INTERFACE_LEVEL = 2 GENERIC_INTERFACE_LEVEL = 4 def encodeUtilityName(name): return base64.urlsafe_b64encode(name.encode('utf-8')).decode() def _adapterishRegistrations(registry): for r in registry.registeredAdapters(): yield r for r in registry.registeredSubscriptionAdapters(): yield r for r in registry.registeredHandlers(): yield r def getRequiredAdapters(iface, withViews=False): """Get adapter registrations where the specified interface is required.""" gsm = getGlobalSiteManager() for reg in _adapterishRegistrations(gsm): # Ignore adapters that have no required interfaces if len(reg.required) == 0: continue # Ignore views if not withViews and reg.required[-1].isOrExtends(IRequest): continue # Only get the adapters for which this interface is required for required_iface in reg.required: if iface.isOrExtends(required_iface): yield reg def getProvidedAdapters(iface, withViews=False): """Get adapter registrations where this interface is provided.""" gsm = getGlobalSiteManager() for reg in _adapterishRegistrations(gsm): # Only get adapters # Ignore adapters that have no required interfaces if len(reg.required) == 0: continue # Ignore views if not withViews and reg.required[-1] and \ reg.required[-1].isOrExtends(IRequest): continue # Only get adapters for which this interface is provided if reg.provided is None or not reg.provided.isOrExtends(iface): continue yield reg def filterAdapterRegistrations(regs, iface, level=SPECIFIC_INTERFACE_LEVEL): """Return only those registrations that match the specifed level""" for reg in regs: if level & GENERIC_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface in (Interface, None): yield reg continue if level & EXTENDED_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface is not Interface and \ iface.extends(required_iface): yield reg continue if level & SPECIFIC_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface is iface: yield reg continue def getClasses(iface): """Get the classes that implement this interface.""" return classRegistry.getClassesThatImplement(iface) def getFactories(iface): """Return the factory registrations, who will return objects providing this interface.""" gsm = getGlobalSiteManager() for reg in gsm.registeredUtilities(): if reg.provided is not IFactory: continue interfaces = reg.component.getInterfaces() try: if interfaces.isOrExtends(iface): yield reg except AttributeError: for interface in interfaces: if interface.isOrExtends(iface): yield reg break def getUtilities(iface): """Return all utility registrations that provide the interface.""" gsm = getGlobalSiteManager() for reg in gsm.registeredUtilities(): if reg.provided.isOrExtends(iface): yield reg def getRealFactory(factory): """Get the real factory. Sometimes the original factory is masked by functions. If the function keeps track of the original factory, use it. """ # Remove all wrappers until none are found anymore. while hasattr(factory, 'factory'): factory = factory.factory # If we have an instance, return its class if not hasattr(factory, '__name__'): return factory.__class__ return factory def getParserInfoInfoDictionary(info): """Return a PT-friendly info dictionary for a parser info object.""" return {'file': relativizePath(info.file), 'url': truncateSysPath(info.file).replace('\\', '/'), 'line': info.line, 'eline': info.eline, 'column': info.column, 'ecolumn': info.ecolumn} def getInterfaceInfoDictionary(iface): """Return a PT-friendly info dictionary for an interface.""" if isinstance(iface, zope.interface.declarations.Implements): iface = iface.inherit if iface is None: return None return {'module': getattr(iface, '__module__', _('<unknown>')), 'name': getattr(iface, '__name__', _('<unknown>'))} def getInterfaceInfoDictionary(iface): """Return a PT-friendly info dictionary for an interface.""" if isinstance(iface, zope.interface.declarations.Implements): iface = iface.inherit if iface is None: return None return {'module': getattr(iface, '__module__', _('<unknown>')), 'name': getattr(iface, '__name__', _('<unknown>'))} def getTypeInfoDictionary(type): """Return a PT-friendly info dictionary for a type.""" path = getPythonPath(type) return {'name': type.__name__, 'module': type.__module__, 'url': isReferencable(path) and path.replace('.', '/') or None} def getSpecificationInfoDictionary(spec): """Return an info dictionary for one specification.""" info = {'isInterface': False, 'isType': False} if zope.interface.interfaces.IInterface.providedBy(spec): info.update(getInterfaceInfoDictionary(spec)) info['isInterface'] = True else: info.update(getTypeInfoDictionary(spec.inherit)) info['isType'] = True return info def getAdapterInfoDictionary(reg): """Return a PT-friendly info dictionary for an adapter registration.""" factory = getRealFactory(reg.factory) path = getPythonPath(factory) url = None if isReferencable(path): url = path.replace('.', '/') if isinstance(reg.info, (str, unicode)): doc = reg.info zcml = None else: doc = None zcml = getParserInfoInfoDictionary(reg.info) return { 'provided': getInterfaceInfoDictionary(reg.provided), 'required': [getSpecificationInfoDictionary(iface) for iface in reg.required if iface is not None], 'name': unicode(getattr(reg, 'name', u'')), 'factory': path, 'factory_url': url, 'doc': doc, 'zcml': zcml} def getFactoryInfoDictionary(reg): """Return a PT-friendly info dictionary for a factory.""" factory = reg.component callable = factory # Usually only zope.component.factory.Factory instances have this attribute if IFactory.providedBy(factory) and hasattr(factory, '_callable'): callable = factory._callable elif hasattr(callable, '__class__'): callable = callable.__class__ path = getPythonPath(callable) return {'name': unicode(reg.name) or _('<i>no name</i>'), 'title': getattr(factory, 'title', u''), 'description': renderText(getattr(factory, 'description', u''), module=callable.__module__), 'url': isReferencable(path) and path.replace('.', '/') or None} def getUtilityInfoDictionary(reg): """Return a PT-friendly info dictionary for a factory.""" component = reg.component # Check whether we have an instance of some custom type or not # Unfortunately, a lot of utilities have a `__name__` attribute, so we # cannot simply check for its absence # TODO: Once we support passive display of instances, this insanity can go # away. if not isinstance(component, (types.MethodType, types.FunctionType, InterfaceClass)+six.class_types): component = getattr(component, '__class__', component) path = getPythonPath(component) # provided interface id iface_id = '%s.%s' % (reg.provided.__module__, reg.provided.getName()) # Determine the URL if isinstance(component, InterfaceClass): url = 'Interface/%s' % path else: url = None if isReferencable(path): url = 'Code/%s' % path.replace('.', '/') return {'name': unicode(reg.name) or _('<i>no name</i>'), 'url_name': encodeUtilityName(reg.name or '__noname__'), 'iface_id': iface_id, 'path': path, 'url': url}
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/src/zope/apidoc/component.py
component.py
======================== Zope 3 API Documentation ======================== This Zope 3 package provides fully dynamic API documentation of Zope 3 and registered add-on components. The package is very extensible and can be easily extended by implementing new modules. Besides being an application, the API doctool also provides several public APIs to extract information from various objects used by Zope 3. * utilities -- Miscellaneous classes and functions that aid all documentation modules. They are broadly usable. * interface -- This module contains functions to inspect interfaces and schemas. * component -- This modules provides utility functions to lookup components given an interface. * presentation -- Presentation components are generally more complex than others, so a separate utilities module is provided to inspect views. * classregistry -- Here a simple dictionary-based registry for all known classes is provided. It allows us to search in classes.
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/src/zope/apidoc/README.txt
README.txt
"""Interface Inspection Utilities """ import inspect from zope.interface import Interface, providedBy from zope.interface.interfaces import IInterface, ISpecification from zope.interface.interfaces import IElement, IAttribute, IMethod from zope.schema.interfaces import IField from zope.apidoc.utilities import getPythonPath, renderText, getDocFormat def getElements(iface, type=IElement): """Return a dictionary containing the elements in an interface. The type specifies whether we are looking for attributes or methods.""" items = {} for name in iface: attr = iface[name] if type.providedBy(attr): items[name] = attr return items def getFieldsInOrder(iface, _itemkey=lambda x: x[1].order): """Return a list of (name, field) tuples in native interface order.""" return sorted(getElements(iface, IField).items(), key=_itemkey) def getAttributes(iface): """Returns a list of attributes specified in the interface.""" return [(name, attr) for name, attr in getElements(iface, IAttribute).items() if not (IField.providedBy(attr) or IMethod.providedBy(attr))] def getMethods(iface): """Returns a list of methods specified in the interface.""" return getElements(iface, IMethod).items() def getFields(iface): """Returns a list of fields specified in the interface.""" return getFieldsInOrder(iface) def getInterfaceTypes(iface): """Return a list of interface types that are specified for this interface. Note that you should only expect one type at a time. """ types = list(providedBy(iface).flattened()) # Remove interfaces provided by every interface instance types.remove(ISpecification) types.remove(IElement) types.remove(Interface) # Remove interface provided by every interface type types.remove(IInterface) return types def getFieldInterface(field): """Return the interface representing the field.""" name = field.__class__.__name__ field_iface = None ifaces = tuple(providedBy(field).flattened()) for iface in ifaces: # All field interfaces implement `IField`. In case the name match # below does not work, use the first `IField`-based interface found if field_iface is None and iface.extends(IField): field_iface = iface # Usually fields have interfaces with the same name (with an 'I') if iface.getName() == 'I' + name: return iface # If not even a `IField`-based interface was found, return the first # interface of the implemented interfaces list. return field_iface or ifaces[0] def _getDocFormat(attr): module = inspect.getmodule(attr.interface) return getDocFormat(module) def getAttributeInfoDictionary(attr, format=None): """Return a page-template-friendly information dictionary.""" format = format or _getDocFormat(attr) return {'name': attr.getName(), 'doc': renderText(attr.getDoc() or u'', format=format)} def getMethodInfoDictionary(method, format=None): """Return a page-template-friendly information dictionary.""" format = format or _getDocFormat(method) return {'name': method.getName(), 'signature': method.getSignatureString(), 'doc': renderText(method.getDoc() or u'', format=format)} def getFieldInfoDictionary(field, format=None): """Return a page-template-friendly information dictionary.""" format = format or _getDocFormat(field) info = {'name': field.getName(), 'required': field.required, 'required_string': field.required and u'required' or u'optional', 'default': repr(field.default), 'title': field.title} # Determine the interface of the field iface = getFieldInterface(field) info['iface'] = {'name': iface.getName(), 'id': getPythonPath(iface)} # Determine the field class class_ = field.__class__ info['class'] = {'name': class_.__name__, 'path': getPythonPath(class_).replace('.', '/')} # Render the field description info['description'] = renderText(field.description or u'', format=format) return info
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/src/zope/apidoc/interface.py
interface.py
"""Utilties to make the life of Documentation Modules easier. """ __docformat__ = 'restructuredtext' import re import sys import types import inspect from os.path import dirname from zope.component import createObject, getMultiAdapter from zope.interface import implements, implementedBy from zope.publisher.browser import TestRequest from zope.security.checker import getCheckerForInstancesOf, Global from zope.security.interfaces import INameBasedChecker from zope.security.proxy import isinstance, removeSecurityProxy import zope.i18nmessageid from zope.container.interfaces import IReadContainer from zope.apidoc._compat import unicode, MethodType, PY3 from zope.apidoc.classregistry import safe_import, IGNORE_MODULES _ = zope.i18nmessageid.MessageFactory("zope") _remove_html_overhead = re.compile( r'(?sm)^<html.*<body.*?>\n(.*)</body>\n</html>\n') space_re = re.compile('\n^( *)\S', re.M) _marker = object() BASEDIR = dirname(dirname(dirname(zope.__file__))) def relativizePath(path): return path.replace(BASEDIR, 'Zope3') def truncateSysPath(path): """Remove the system path prefix from the path.""" for syspath in sys.path: if path.startswith(syspath): return path.replace(syspath, '')[1:] return path def getPythonPath(obj): """Return the path of the object in standard Python notation. This method should try very hard to return a string, even if it is not a valid Python path. """ if obj is None: return None # Even for methods `im_class` and `__module__` is not allowed to be # accessed (which is probably not a bad idea). So, we remove the security # proxies for this check. naked = removeSecurityProxy(obj) name = naked.__name__ if hasattr(naked, "im_class"): naked = naked.im_class name = naked.__name__ # Py3 version: if PY3 and isinstance(naked, types.FunctionType): name = naked.__qualname__.split('.')[0] module = getattr(naked, '__module__', _marker) if module is _marker: return name return '%s.%s' %(module, name) def isReferencable(path): """Return whether the Python path is referencable.""" # Sometimes no path exists, so make a simple check first; example: None if path is None: return False # There are certain paths that we do not want to reference, most often # because they are outside the scope of this documentation for exclude_name in IGNORE_MODULES: if path.startswith(exclude_name): return False split_path = path.rsplit('.', 1) if len(split_path) == 2: module_name, obj_name = split_path else: module_name, obj_name = split_path[0], None # Do not allow private attributes to be accessible if (obj_name is not None and obj_name.startswith('_') and not (obj_name.startswith('__') and obj_name.endswith('__'))): return False module = safe_import(module_name) if module is None: return False # If the module imported correctly and no name is provided, then we are # all good. if obj_name is None: return True obj = getattr(module, obj_name, _marker) if obj is _marker: return False # Detect singeltons; those are not referencable in apidoc (yet) if hasattr(obj, '__class__') and getPythonPath(obj.__class__) == path: return False return True def _evalId(id): if isinstance(id, Global): id = id.__name__ if id == 'CheckerPublic': id = 'zope.Public' return id def getPermissionIds(name, checker=_marker, klass=_marker): """Get the permissions of an attribute.""" assert (klass is _marker) != (checker is _marker) entry = {} if klass is not _marker: checker = getCheckerForInstancesOf(klass) if checker is not None and INameBasedChecker.providedBy(checker): entry['read_perm'] = _evalId(checker.permission_id(name)) \ or _('n/a') entry['write_perm'] = _evalId(checker.setattr_permission_id(name)) \ or _('n/a') else: entry['read_perm'] = entry['write_perm'] = None return entry def getFunctionSignature(func): """Return the signature of a function or method.""" if not isinstance(func, (types.FunctionType, types.MethodType)): raise TypeError("func must be a function or method") args, varargs, varkw, defaults = inspect.getargspec(func) placeholder = object() sig = '(' # By filling up the default tuple, we now have equal indeces for args and # default. if defaults is not None: defaults = (placeholder,)*(len(args)-len(defaults)) + defaults else: defaults = (placeholder,)*len(args) str_args = [] for name, default in zip(args, defaults): # Neglect self, since it is always there and not part of the signature. # This way the implementation and interface signatures should match. if name == 'self' and type(func) == MethodType: continue # Make sure the name is a string if isinstance(name, (tuple, list)): name = '(' + ', '.join(name) + ')' elif not isinstance(name, str): name = repr(name) if default is placeholder: str_args.append(name) else: str_args.append(name + '=' + repr(default)) if varargs: str_args.append('*'+varargs) if varkw: str_args.append('**'+varkw) sig += ', '.join(str_args) return sig + ')' def getPublicAttributes(obj): """Return a list of public attribute names.""" attrs = [] for attr in dir(obj): if attr.startswith('_'): continue try: getattr(obj, attr) except AttributeError: continue attrs.append(attr) return attrs def getInterfaceForAttribute(name, interfaces=_marker, klass=_marker, asPath=True): """Determine the interface in which an attribute is defined.""" if (interfaces is _marker) and (klass is _marker): raise ValueError("need to specify interfaces or klass") if (interfaces is not _marker) and (klass is not _marker): raise ValueError("must specify only one of interfaces and klass") if interfaces is _marker: direct_interfaces = list(implementedBy(klass)) interfaces = {} for interface in direct_interfaces: interfaces[interface] = 1 for base in interface.getBases(): interfaces[base] = 1 interfaces = interfaces.keys() for interface in interfaces: if name in interface.names(): if asPath: return getPythonPath(interface) return interface return None def columnize(entries, columns=3): """Place a list of entries into columns.""" if len(entries) % columns == 0: per_col = len(entries) // columns last_full_col = columns else: per_col = len(entries) // columns + 1 last_full_col = len(entries) % columns columns = [] col = [] in_col = 0 for entry in entries: if in_col < per_col - int(len(columns)+1 > last_full_col): col.append(entry) in_col += 1 else: columns.append(col) col = [entry] in_col = 1 if col: columns.append(col) return columns _format_dict = { 'plaintext': 'zope.source.plaintext', 'structuredtext': 'zope.source.stx', 'restructuredtext': 'zope.source.rest' } def getDocFormat(module): """Convert a module's __docformat__ specification to a renderer source id""" format = getattr(module, '__docformat__', 'structuredtext').lower() # The format can also contain the language, so just get the first part format = format.split(' ')[0] return _format_dict.get(format, 'zope.source.stx') def dedentString(text): """Dedent the docstring, so that docutils can correctly render it.""" dedent = min([len(match) for match in space_re.findall(text)] or [0]) return re.compile('\n {%i}' % dedent, re.M).sub('\n', text) def renderText(text, module=None, format=None, dedent=True): if not text: return u'' if module is not None: if isinstance(module, (str, unicode)): module = sys.modules.get(module, None) if format is None: format = getDocFormat(module) if format is None: format = 'zope.source.rest' assert format in _format_dict.values() text = dedentString(text) if not isinstance(text, unicode): text = text.decode('latin-1', 'replace') source = createObject(format, text) renderer = getMultiAdapter((source, TestRequest())) return renderer.render()
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/src/zope/apidoc/utilities.py
utilities.py
"""Views/Presentation Utilities """ import six from zope.browserresource.icon import IconViewFactory from zope.component import getGlobalSiteManager from zope.component.registry import AdapterRegistration from zope.i18nmessageid import ZopeMessageFactory as _ from zope.interface import Interface from zope.publisher.interfaces import IRequest from zope.publisher.interfaces.browser import IBrowserRequest from zope.publisher.interfaces.xmlrpc import IXMLRPCRequest from zope.publisher.interfaces.http import IHTTPRequest from zope.publisher.interfaces.ftp import IFTPRequest from zope.apidoc._compat import unicode from zope.apidoc.utilities import getPythonPath, relativizePath from zope.apidoc.utilities import getPermissionIds from zope.apidoc.component import getParserInfoInfoDictionary from zope.apidoc.component import getInterfaceInfoDictionary SPECIFIC_INTERFACE_LEVEL = 1 EXTENDED_INTERFACE_LEVEL = 2 GENERIC_INTERFACE_LEVEL = 4 BROWSER_DIRECTIVES_MODULE = 'zope.browserpage.viewmeta' XMLRPC_DIRECTIVES_MODULE = 'zope.app.publisher.xmlrpc.metaconfigure' JSONRPC_DIRECTIVES_MODULE = 'jsonserver.metaconfigure' def getViewFactoryData(factory): """Squeeze some useful information out of the view factory""" info = {'path': None, 'url': None, 'template': None, 'resource': None, 'referencable': True} # Always determine the most basic factory # Commonly, factories are wrapped to provide security or location, for # example. If those wrappers play nice, then they provide a `factory` # attribute, that points to the original factory. while hasattr(factory, 'factory'): factory = factory.factory if hasattr(factory, '__name__') and \ factory.__name__.startswith('SimpleViewClass'): # In the case of a SimpleView, the base is really what we are # interested in. Usually the first listed class is the interesting one. base = factory.__bases__[0] info['path'] = base.__module__ + '.' + base.__name__ info['template'] = relativizePath(factory.index.filename) info['template_obj'] = factory.index # Basic Type is a factory elif isinstance(factory, (bytes, unicode, float, int, list, tuple)): info['referencable'] = False elif factory.__module__ is not None and \ factory.__module__.startswith(BROWSER_DIRECTIVES_MODULE): info['path'] = getPythonPath(factory.__bases__[0]) # XML-RPC view factory, generated during registration elif factory.__module__ is not None and \ factory.__module__.startswith(XMLRPC_DIRECTIVES_MODULE): # Those factories are method publisher and security wrapped info['path'] = getPythonPath(factory.__bases__[0].__bases__[0]) # JSON-RPC view factory, generated during registration # This is needed for the 3rd party jsonserver implementation # TODO: See issue http://www.zope.org/Collectors/Zope3-dev/504, ri elif factory.__module__ is not None and \ factory.__module__.startswith(JSONRPC_DIRECTIVES_MODULE): # Those factories are method publisher and security wrapped info['path'] = getPythonPath(factory.__bases__[0].__bases__[0]) # A factory that is a class instance; since we cannot reference instances, # reference the class. elif not hasattr(factory, '__name__'): info['path'] = getPythonPath(factory.__class__) # A simple class-based factory elif isinstance(factory, six.class_types): info['path'] = getPythonPath(factory) # We have tried our best; just get the Python path as good as you can. else: info['path'] = getPythonPath(factory) if info['referencable']: info['url'] = info['path'].replace('.', '/') if isinstance(factory, IconViewFactory): info['resource'] = factory.rname return info def getPresentationType(iface): """Get the presentation type from a layer interface.""" # Note that the order of the requests matters here, since we want to # inspect the most specific one first. For example, IBrowserRequest is also # an IHTTPRequest. for type in [IBrowserRequest, IXMLRPCRequest, IHTTPRequest, IFTPRequest]: if iface.isOrExtends(type): return type return iface def getViews(iface, type=IRequest): """Get all view registrations for a particular interface.""" gsm = getGlobalSiteManager() for reg in gsm.registeredAdapters(): if (len(reg.required) > 0 and reg.required[-1] is not None and reg.required[-1].isOrExtends(type)): for required_iface in reg.required[:-1]: if required_iface is None or iface.isOrExtends(required_iface): yield reg def filterViewRegistrations(regs, iface, level=SPECIFIC_INTERFACE_LEVEL): """Return only those registrations that match the specifed level""" for reg in regs: if level & GENERIC_INTERFACE_LEVEL: for required_iface in reg.required[:-1]: if required_iface in (Interface, None): yield reg continue if level & EXTENDED_INTERFACE_LEVEL: for required_iface in reg.required[:-1]: if required_iface is not Interface and \ iface.extends(required_iface): yield reg continue if level & SPECIFIC_INTERFACE_LEVEL: for required_iface in reg.required[:-1]: if required_iface is iface: yield reg continue def getViewInfoDictionary(reg): """Build up an information dictionary for a view registration.""" # get configuration info if isinstance(reg.info, (str, unicode)): doc = reg.info zcml = None else: doc = None zcml = getParserInfoInfoDictionary(reg.info) info = {'name' : unicode(reg.name) or _('<i>no name</i>'), 'type' : getPythonPath(getPresentationType(reg.required[-1])), 'factory' : getViewFactoryData(reg.factory), 'required': [getInterfaceInfoDictionary(iface) for iface in reg.required], 'provided' : getInterfaceInfoDictionary(reg.provided), 'doc': doc, 'zcml': zcml } # Educated guess of the attribute name info.update(getPermissionIds('publishTraverse', klass=reg.factory)) return info
zope.apidoc
/zope.apidoc-2.0.0a1.zip/zope.apidoc-2.0.0a1/src/zope/apidoc/presentation.py
presentation.py
========== Overview ========== .. currentmodule:: zope.app.apidoc.apidoc This Zope 3 package provides fully dynamic API documentation of Zope 3 and registered add-on components. The package is very extensible and can be easily extended by implementing new modules. Besides being an application, the API doctool also provides several public APIs to extract information from various objects used by Zope 3. * utilities -- Miscellaneous classes and functions that aid all documentation modules. They are broadly usable. * interface -- This module contains functions to inspect interfaces and schemas. * component -- This modules provides utility functions to lookup components given an interface. * presentation -- Presentation components are generally more complex than others, so a separate utilities module is provided to inspect views. * classregistry -- Here a simple dictionary-based registry for all known classes is provided. It allows us to search in classes. Using the API Dcoumentation =========================== The :class:`APIDocumentation` class provides access to all available documentation modules. Documentation modules are utilities providing :class:`~zope.app.apidoc.interfaces.IDocumentationModule`: >>> from zope import component as ztapi >>> from zope.app.apidoc.interfaces import IDocumentationModule >>> from zope.app.apidoc.ifacemodule.ifacemodule import InterfaceModule >>> from zope.app.apidoc.zcmlmodule import ZCMLModule >>> ztapi.provideUtility(InterfaceModule(), IDocumentationModule, ... 'Interface') >>> ztapi.provideUtility(ZCMLModule(), IDocumentationModule, 'ZCML') Now we can instantiate the class (which is usually done when traversing '++apidoc++') and get a list of available modules: >>> from zope.app.apidoc.apidoc import APIDocumentation >>> doc = APIDocumentation(None, '++apidoc++') >>> modules = sorted(doc.keys()) >>> modules ['Interface', 'ZCML'] >>> doc['ZCML'] <zope.app.apidoc.zcmlmodule.ZCMLModule 'ZCML' at ...> Developing a Module =================== 1. Implement a class that realizes the :class:`~zope.app.apidoc.interfaces.IDocumentationModule` interface. 2. Register this class as a utility using something like this:: <utility provides="zope.app.apidoc.interfaces.IDocumentationModule" factory=".examplemodule.ExampleModule" name="Example" /> 3. Take care of security by allowing at least :class:`~zope.app.apidoc.interfaces.IDocumentationModule`:: <class class=".ExampleModule"> <allow interface="zope.app.apidoc.interfaces.IDocumentationModule" /> </class> 4. Provide a browser view called ``menu.html``. 5. Provide another view, usually ``index.html``, that can show the details for the various menu items. Note: There are several modules that come with the product. Just look in them for some guidance. New Static APIDOC-Version ========================= An alternative APIDOC-Version is available through ``++apidoc++/static.html`` Find and Tree are implemented in Javascript, so it should be possible to do a "wget" - Offline-Version of APIDOC. In fact, this package comes with a somewhat smarter version of "wget" that can load a Zope configuration and export the documentation. For more information, see :doc:`static`.
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/README.rst
README.rst
========================= Miscellaneous Utilities ========================= .. currentmodule:: zope.app.apidoc.utilities The utilities module provides some useful helper functions and classes that make the work of the API doctool and inspection code easier. >>> from zope.app.apidoc import utilities :func:`relativizePath` ====================== When dealing with files, such as page templates and text files, and not with Python paths, it is necessary to keep track of the the absolute path of the file. However, for presentation purposes, the absolute path is inappropriate and we are commonly interested in the path starting at the Zope 3 root directory. This function attempts to remove the absolute path to the root directory and replaces it with "Zope3". >>> import os >>> path = os.path.join(os.path.dirname(utilities.__file__), 'README.txt') >>> path = utilities.relativizePath(path) >>> path.replace('\\', '/') # Be kind to Windows users 'Zope3/zope/app/apidoc/README.txt' If the base path is not found in a particular path, the original path is returned: >>> otherpath = 'foo/bar/blah.txt' >>> utilities.relativizePath(otherpath) 'foo/bar/blah.txt' :func:`truncateSysPath` ========================= In some cases it is useful to just know the path after the sys path of a module. For example, you have a path of a file in a module. To look up the module, the simplest to do is to retrieve the module path and look into the system's modules list. >>> import sys >>> sysBase = sys.path[0] >>> utilities.truncateSysPath(sysBase + '/some/module/path') 'some/module/path' If there is no matching system path, then the whole path is returned: >>> utilities.truncateSysPath('some/other/path') 'some/other/path' :class:`ReadContainerBase` ========================== This class serves as a base class for :class:`zope.container.interfaces.IReadContainer` objects that minimizes the implementation of an ``IReadContainer`` to two methods, ``get()`` and ``items()``, since the other methods can be implemented using these two. Note that this implementation might be very expensive for certain container, especially if collecting the items is of high order. However, there are many scenarios when one has a complete mapping already and simply want to persent it as an ``IReadContainer``. Let's start by making a simple ``IReadContainer`` implementation using the class: >>> class Container(utilities.ReadContainerBase): ... def get(self, key, default=None): ... return {'a': 1, 'b': 2}.get(key, default) ... def items(self): ... return [('a', 1), ('b', 2)] >>> container = Container() Now we can use the methods. First ``get()`` >>> container.get('a') 1 >>> container.get('c') is None True >>> container['b'] 2 and then ``items()`` >>> container.items() [('a', 1), ('b', 2)] >>> container.keys() ['a', 'b'] >>> container.values() [1, 2] Then naturally, all the other methods work as well: * ``__getitem__(key)`` >>> container['a'] 1 >>> container['c'] Traceback (most recent call last): ... KeyError: 'c' * ``__contains__(key)`` >>> 'a' in container True >>> 'c' in container False * ``keys()`` >>> container.keys() ['a', 'b'] * ``__iter__()`` >>> iterator = iter(container) >>> next(iterator) 1 >>> next(iterator) 2 >>> next(iterator) Traceback (most recent call last): ... StopIteration * ``values()`` >>> container.values() [1, 2] * ``__len__()`` >>> len(container) 2 :func:`getPythonPath` ===================== Return the path of the object in standard Python dot-notation. This function makes only sense for objects that provide a name, since we cannot determine the path otherwise. Instances, for example, do not have a ``__name__`` attribute, so we would expect them to fail. For interfaces we simply get >>> from zope.interface import Interface >>> class ISample(Interface): ... pass >>> utilities.getPythonPath(ISample) 'zope.app.apidoc.doctest.ISample' and for classes we get the name of the class >>> class Sample(object): ... def sample(self): ... pass >>> utilities.getPythonPath(Sample) 'zope.app.apidoc.doctest.Sample' If a method is passed in, its class path is returned: >>> utilities.getPythonPath(Sample().sample) 'zope.app.apidoc.doctest.Sample' >>> utilities.getPythonPath(Sample.sample) 'zope.app.apidoc.doctest.Sample' Plain functions are also supported: >>> def sample(): ... pass >>> utilities.getPythonPath(sample) 'zope.app.apidoc.doctest.sample' Modules are another kind of objects that can return a python path: >>> utilities.getPythonPath(utilities) 'zope.app.apidoc.utilities' Passing in ``None`` returns ``None``: >>> utilities.getPythonPath(None) Clearly, instance lookups should fail: >>> utilities.getPythonPath(Sample()) Traceback (most recent call last): ... AttributeError: 'Sample' object has no attribute '__name__'... :func:`isReferencable` ====================== Determine whether a path can be referenced in the API doc, usually by the code browser module. Initially you might think that all objects that have paths can be referenced somehow. But that's not true, partially by design of apidoc, but also due to limitations of the Python language itself. First, here are some cases that work: >>> utilities.isReferencable('zope') True >>> utilities.isReferencable('zope.app') True >>> utilities.isReferencable('zope.app.apidoc.apidoc.APIDocumentation') True >>> utilities.isReferencable('zope.app.apidoc.apidoc.handleNamespace') True The first case is ``None``. When you ask for the python path of ``None``, you get ``None``, so that result should not be referencable: >>> utilities.isReferencable(None) False By design we also do not document any private classes and functions: >>> utilities.isReferencable('some.path.to._Private') False >>> utilities.isReferencable('some.path.to.__Protected') False >>> utilities.isReferencable('zope.app.apidoc.__doc__') True Some objects might fake their module name, so that it does not exist: >>> utilities.isReferencable('foo.bar') False On the other hand, you might have a valid module, but non-existent attribute: >>> utilities.isReferencable('zope.app.apidoc.MyClass') False Note that this case is also used for types that are generated using the ``type()`` function: >>> mytype = type('MyType', (object,), {}) >>> path = utilities.getPythonPath(mytype) >>> path 'zope.app.apidoc.doctest.MyType' >>> utilities.isReferencable(path) False Next, since API doc does not allow the documentation of instances yet, it is not possible to document singletons, so they are not referencable: >>> class Singelton(object): ... pass >>> utilities.isReferencable('zope.app.apidoc.doctest.Singelton') True >>> Singelton = Singelton() >>> utilities.isReferencable('zope.app.apidoc.doctest.Singelton') False Finally, the global ``IGNORE_MODULES`` list from the class registry is also used to give a negative answer. If a module is listed in ``IGNORE_MODULES``, then ``False`` is returned. >>> from zope.app.apidoc import classregistry >>> classregistry.IGNORE_MODULES.append('zope.app.apidoc') >>> utilities.isReferencable('zope.app') True >>> utilities.isReferencable('zope.app.apidoc') False >>> utilities.isReferencable('zope.app.apidoc.apidoc.APIDocumentation') False >>> classregistry.IGNORE_MODULES.pop() 'zope.app.apidoc' >>> utilities.isReferencable('zope.app.apidoc') True :func:`getPermissionIds` ======================== Get the permissions of a class attribute. The attribute is specified by name. Either the ``klass`` or the ``checker`` argument must be specified. If the class is specified, then the checker for it is looked up. Furthermore, this function only works with ``INameBasedChecker`` checkers. If another checker is found, ``None`` is returned for the permissions. We start out by defining the class and then the checker for it: >>> from zope.security.checker import Checker, defineChecker >>> from zope.security.checker import CheckerPublic >>> class Sample(object): ... attr = 'value' ... attr3 = 'value3' >>> class Sample2(object): ... pass >>> checker = Checker({'attr': 'zope.Read', 'attr3': CheckerPublic}, ... {'attr': 'zope.Write', 'attr3': CheckerPublic}) >>> defineChecker(Sample, checker) Now let's see how this function works: >>> entries = utilities.getPermissionIds('attr', klass=Sample) >>> entries['read_perm'] 'zope.Read' >>> entries['write_perm'] 'zope.Write' >>> from zope.security.checker import getCheckerForInstancesOf >>> entries = utilities.getPermissionIds('attr', ... getCheckerForInstancesOf(Sample)) >>> entries['read_perm'] 'zope.Read' >>> entries['write_perm'] 'zope.Write' The ``Sample`` class does not know about the ``attr2`` attribute: >>> entries = utilities.getPermissionIds('attr2', klass=Sample) >>> print(entries['read_perm']) n/a >>> print(entries['write_perm']) n/a The ``Sample2`` class does not have a checker: >>> entries = utilities.getPermissionIds('attr', klass=Sample2) >>> entries['read_perm'] is None True >>> entries['write_perm'] is None True Finally, the ``Sample`` class' ``attr3`` attribute is public: >>> entries = utilities.getPermissionIds('attr3', klass=Sample) >>> print(entries['read_perm']) zope.Public >>> print(entries['write_perm']) zope.Public :func:`getFunctionSignature` ============================ Return the signature of a function or method. The ``func`` argument *must* be a generic function or a method of a class. First, we get the signature of a function that has a specific positional and keyword argument: >>> def func(attr, attr2=None): ... pass >>> utilities.getFunctionSignature(func) '(attr, attr2=None)' Here is a function that has an unspecified amount of keyword arguments: >>> def func(attr, **kw): ... pass >>> utilities.getFunctionSignature(func) '(attr, **kw)' And here we mix specified and unspecified keyword arguments: >>> def func(attr, attr2=None, **kw): ... pass >>> utilities.getFunctionSignature(func) '(attr, attr2=None, **kw)' In the next example we have unspecified positional and keyword arguments: >>> def func(*args, **kw): ... pass >>> utilities.getFunctionSignature(func) '(*args, **kw)' And finally an example, where we have on unspecified keyword arguments without any positional arguments: >>> def func(**kw): ... pass >>> utilities.getFunctionSignature(func) '(**kw)' Next we test whether the signature is correctly determined for class methods. Note that the ``self`` argument is removed from the signature, since it is not essential for documentation: We start out with a simple positional argument: >>> class Klass(object): ... def func(self, attr): ... pass >>> utilities.getFunctionSignature(Klass.func, ignore_self=True) '(attr)' >>> utilities.getFunctionSignature(Klass().func) '(attr)' Next we have specific and unspecified positional arguments as well as unspecified keyword arguments: >>> class Klass(object): ... def func(self, attr, *args, **kw): ... pass >>> utilities.getFunctionSignature(Klass().func, ignore_self=True) '(attr, *args, **kw)' >>> utilities.getFunctionSignature(Klass().func) '(attr, *args, **kw)' If you do not pass a function or method to the function, it will fail: >>> utilities.getFunctionSignature('func') Traceback (most recent call last): ... TypeError: func must be a function or method not a ... However, lists of this type are not allowed inside the argument list:: >>> def func([arg1, arg2]): ... pass Traceback (most recent call last): ... SyntaxError: invalid syntax... Internal assignment is also not legal:: >>> def func((arg1, arg2=1)): ... pass Traceback (most recent call last): ... SyntaxError: invalid syntax... :func:`getPublicAttributes` =========================== Return a list of public attribute names for a given object. This excludes any attribute starting with '_', which includes attributes of the form ``__attr__``, which are commonly considered public, but they are so special that they are excluded. The ``obj`` argument can be either a classic class, type or instance of the previous two. Note that the term "attributes" here includes methods and properties. First we need to create a class with some attributes, properties and methods: >>> class Nonattr(object): ... def __get__(*a): ... raise AttributeError('nonattr') >>> class Sample(object): ... attr = None ... def __str__(self): ... return '' ... def func(self): ... pass ... def _getAttr(self): ... return self.attr ... attr2 = property(_getAttr) ... ... nonattr = Nonattr() # Should not show up in public attrs We can simply pass in the class and get the public attributes: >>> attrs = utilities.getPublicAttributes(Sample) >>> attrs.sort() >>> attrs ['attr', 'attr2', 'func'] Note that we exclude attributes that would raise attribute errors, like our silly Nonattr. But an instance of that class will work as well. >>> attrs = utilities.getPublicAttributes(Sample()) >>> attrs.sort() >>> attrs ['attr', 'attr2', 'func'] The function will also take inheritance into account and return all inherited attributes as well: >>> class Sample2(Sample): ... attr3 = None >>> attrs = utilities.getPublicAttributes(Sample2) >>> attrs.sort() >>> attrs ['attr', 'attr2', 'attr3', 'func'] :func:`getInterfaceForAttribute` ================================ Determine the interface in which an attribute is defined. This function is nice, if you have an attribute name which you retrieved from a class and want to know which interface requires it to be there. Either the ``interfaces`` or ``klass`` argument must be specified. If ``interfaces`` is not specified, the ``klass`` is used to retrieve a list of interfaces. ``interfaces`` must be iterable. ``asPath`` specifies whether the dotted name of the interface or the interface object is returned. First, we need to create some interfaces and a class that implements them: >>> from zope.interface import Interface, Attribute, implementer >>> class I1(Interface): ... attr = Attribute('attr') >>> class I2(I1): ... def getAttr(): ... '''get attr''' >>> @implementer(I2) ... class Sample(object): ... pass First we check whether an aatribute can be found in a list of interfaces: >>> utilities.getInterfaceForAttribute('attr', (I1, I2), asPath=False) <InterfaceClass zope.app.apidoc.doctest.I1> >>> utilities.getInterfaceForAttribute('getAttr', (I1, I2), asPath=False) <InterfaceClass zope.app.apidoc.doctest.I2> Now we are repeating the same lookup, but using the class, instead of a list of interfaces: >>> utilities.getInterfaceForAttribute('attr', klass=Sample, asPath=False) <InterfaceClass zope.app.apidoc.doctest.I1> >>> utilities.getInterfaceForAttribute('getAttr', klass=Sample, asPath=False) <InterfaceClass zope.app.apidoc.doctest.I2> By default, ``asPath`` is ``True``, which means the path of the interface is returned: >>> utilities.getInterfaceForAttribute('attr', (I1, I2)) 'zope.app.apidoc.doctest.I1' If no match is found, ``None`` is returned. >>> utilities.getInterfaceForAttribute('attr2', (I1, I2)) is None True >>> utilities.getInterfaceForAttribute('attr2', klass=Sample) is None True If both, the ``interfaces`` and ``klass`` argument are missing, raise an error: >>> utilities.getInterfaceForAttribute('getAttr') Traceback (most recent call last): ... ValueError: need to specify interfaces or klass Similarly, it does not make sense if both are specified: >>> utilities.getInterfaceForAttribute('getAttr', interfaces=(I1,I2), ... klass=Sample) Traceback (most recent call last): ... ValueError: must specify only one of interfaces and klass :func:`columnize` ================= This function places a list of entries into columns. Here are some examples: >>> utilities.columnize([1], 3) [[1]] >>> utilities.columnize([1, 2], 3) [[1], [2]] >>> utilities.columnize([1, 2, 3], 3) [[1], [2], [3]] >>> utilities.columnize([1, 2, 3, 4], 3) [[1, 2], [3], [4]] >>> utilities.columnize([1], 2) [[1]] >>> utilities.columnize([1, 2], 2) [[1], [2]] >>> utilities.columnize([1, 2, 3], 2) [[1, 2], [3]] >>> utilities.columnize([1, 2, 3, 4], 2) [[1, 2], [3, 4]] :func:`getDocFormat` ==================== This function inspects a module to determine the supported documentation format. The function returns a valid renderer source factory id. If the ``__docformat__`` module attribute is specified, its value will be used to look up the factory id: >>> from zope.app.apidoc import apidoc >>> utilities.getDocFormat(apidoc) 'zope.source.rest' By default restructured text is returned: >>> utilities.getDocFormat(object()) 'zope.source.rest' This is a sensible default since much documentation is now written with Sphinx in mind (which of course defaults to rendering restructured text). As long as docutils' error reporting level is set sufficiently high (``severe``), unknown Sphinx directives and slightly malformed markup do not produce error messages, either on the console or in the generated HTML. The ``__docformat__`` attribute can also optionally specify a language field. We simply ignore it: >>> class Module(object): ... pass >>> module = Module() >>> module.__docformat__ = 'structuredtext en' >>> utilities.getDocFormat(module) 'zope.source.stx' :func:`dedentString` ==================== Before doc strings can be processed using STX or ReST they must be dendented, since otherwise the output will be incorrect. Let's have a look at some docstrings and see how they are correctly dedented. Let's start with a simple one liner. Nothing should happen: >>> def func(): ... '''One line documentation string''' >>> utilities.dedentString(func.__doc__) 'One line documentation string' Now what about one line docstrings that start on the second line? While this format is discouraged, it is frequently used: >>> def func(): ... ''' ... One line documentation string ... ''' >>> utilities.dedentString(func.__doc__) '\nOne line documentation string\n' We can see that the leading whitespace on the string is removed, but not the newline character. Let's now try a simple multi-line docstring: >>> def func(): ... '''Short description ... ... Lengthy description, giving some more background information and ... discuss some edge cases. ... ''' >>> print(utilities.dedentString(func.__doc__)) Short description <BLANKLINE> Lengthy description, giving some more background information and discuss some edge cases. <BLANKLINE> Again, the whitespace was removed only after the first line. Also note that the function determines the indentation level correctly. So what happens if there are multiple indentation levels? The smallest amount of indentation is chosen: >>> def func(): ... '''Short description ... ... Root Level ... ... Second Level ... ''' >>> print(utilities.dedentString(func.__doc__)) Short description <BLANKLINE> Root Level <BLANKLINE> Second Level <BLANKLINE> >>> def func(): ... '''Short description ... ... $$$ print 'example' ... example ... ... And now the description. ... ''' >>> print(utilities.dedentString(func.__doc__)) Short description <BLANKLINE> $$$ print 'example' example <BLANKLINE> And now the description. <BLANKLINE> :func:`renderText` ================== A function that quickly renders the given text using the specified format. If the ``module`` argument is specified, the function will try to determine the format using the module. If the ``format`` argument is given, it is simply used. Clearly, you cannot specify both, the ``module`` and ``format`` argument. You specify the format as follows: >>> utilities.renderText(u'Hello!\n', format='zope.source.rest') '<p>Hello!</p>\n' Note that the format string must be a valid source factory id; if the factory id is not given, 'zope.source.stx' is used. Thus, specifying the module is often safer (if available): >>> utilities.renderText(u'Hello!\n', module=apidoc) '<p>Hello!</p>\n' Byte input is accepted, so long as it can be decoded: >>> utilities.renderText(b'Hello!\n', module=apidoc) '<p>Hello!</p>\n'
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/utilities.rst
utilities.rst
"""Class Registry """ import operator import sys from zope.testing.cleanup import addCleanUp __docformat__ = 'restructuredtext' __import_unknown_modules__ = False # List of modules that should never be imported. # TODO: List hard-coded for now. IGNORE_MODULES = ['twisted'] _pathgetter = operator.itemgetter(0) class ClassRegistry(dict): """A simple registry for classes.""" # This is not a WeakValueDictionary; the classes in here # are kept alive almost certainly by the codemodule.class_.Class object, # which in turn is kept alive by a codemodule.module.Module chain going # all the way back to the APIDocumentation object registered with the # global site manager. So they can't go away without clearing all that, # which happens (usually only) with test tear downs. def getClassesThatImplement(self, iface): """Return all class items that implement iface. Methods returns a sorted list of 2-tuples of the form (path, class). """ return sorted(((path, klass) for path, klass in self.items() if iface.implementedBy(klass)), key=_pathgetter) def getSubclassesOf(self, klass): """Return all class items that are proper subclasses of klass. Methods returns a sorted list of 2-tuples of the form (path, class). """ return sorted(((path, klass2) for path, klass2 in self.items() if issubclass(klass2, klass) and klass2 is not klass), key=_pathgetter) #: The global class registry object. Cleaned up #: in tests by :mod:`zope.testing.cleanup`. classRegistry = ClassRegistry() def cleanUp(): classRegistry.clear() addCleanUp(cleanUp) def safe_import(path, default=None): """Import a given path as efficiently as possible and without failure.""" module = sys.modules.get(path, default) for exclude_name in IGNORE_MODULES: if path.startswith(exclude_name): return default if module is default and __import_unknown_modules__: try: module = __import__(path, {}, {}, ('*',)) # Some software, we cannot control, might raise all sorts of errors; # thus catch all exceptions and return the default. except Exception: return default return module
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/classregistry.py
classregistry.py
__docformat__ = 'restructuredtext' import types import zope.interface.declarations from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory from zope.i18nmessageid import ZopeMessageFactory as _ from zope.interface import Interface from zope.interface.interface import InterfaceClass from zope.publisher.interfaces import IRequest from zope.app.apidoc.classregistry import classRegistry from zope.app.apidoc.utilities import getPythonPath from zope.app.apidoc.utilities import isReferencable from zope.app.apidoc.utilities import relativizePath from zope.app.apidoc.utilities import renderText from zope.app.apidoc.utilities import truncateSysPath from zope.app.apidoc.utilitymodule import utilitymodule SPECIFIC_INTERFACE_LEVEL = 1 EXTENDED_INTERFACE_LEVEL = 2 GENERIC_INTERFACE_LEVEL = 4 def _adapterishRegistrations(registry): for registrations in (registry.registeredAdapters, registry.registeredSubscriptionAdapters, registry.registeredHandlers): yield from registrations() def _ignore_adapter(reg, withViews=False): return ( # Ignore adapters that have no required interfaces not reg.required # Ignore views or (not withViews and reg.required[-1].isOrExtends(IRequest))) def getRequiredAdapters(iface, withViews=False): """Get global adapter registrations where the specified interface is required.""" gsm = getGlobalSiteManager() for reg in _adapterishRegistrations(gsm): if _ignore_adapter(reg, withViews): continue # Only get the adapters for which this interface is required for required_iface in reg.required: if iface.isOrExtends(required_iface): yield reg def getProvidedAdapters(iface, withViews=False): """Get global adapter registrations where this interface is provided.""" gsm = getGlobalSiteManager() for reg in _adapterishRegistrations(gsm): if _ignore_adapter(reg, withViews): continue # Only get adapters for which this interface is provided if reg.provided is None or not reg.provided.isOrExtends(iface): continue yield reg def filterAdapterRegistrations(regs, iface, level=SPECIFIC_INTERFACE_LEVEL): """Return only those registrations that match the specifed level""" for reg in regs: if level & GENERIC_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface in (Interface, None): yield reg continue if level & EXTENDED_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface is not Interface and \ iface.extends(required_iface): yield reg continue if level & SPECIFIC_INTERFACE_LEVEL: for required_iface in reg.required: if required_iface is iface: yield reg continue def getClasses(iface): """Get the classes that implement this interface.""" return classRegistry.getClassesThatImplement(iface) def getFactories(iface): """Return the global factory registrations, who will return objects providing this interface.""" gsm = getGlobalSiteManager() for reg in gsm.registeredUtilities(): if reg.provided is not IFactory: continue interfaces = reg.component.getInterfaces() if hasattr(interfaces, 'isOrExtends'): # Single interface interfaces = (interfaces,) for interface in interfaces: if interface.isOrExtends(iface): yield reg break def getUtilities(iface): """Return all global utility registrations that provide the interface.""" gsm = getGlobalSiteManager() for reg in gsm.registeredUtilities(): if reg.provided.isOrExtends(iface): yield reg def getRealFactory(factory): """Get the real factory. Sometimes the original factory is masked by functions. If the function keeps track of the original factory, use it. """ # Remove all wrappers until none are found anymore. while hasattr(factory, 'factory'): factory = factory.factory # If we have an instance, return its class if not hasattr(factory, '__name__'): return factory.__class__ return factory def getParserInfoInfoDictionary(info): """Return a PT-friendly info dictionary for a parser info object.""" return {'file': relativizePath(info.file), 'url': truncateSysPath(info.file).replace('\\', '/'), 'line': info.line, 'eline': info.eline, 'column': info.column, 'ecolumn': info.ecolumn} def getInterfaceInfoDictionary(iface): """Return a PT-friendly info dictionary for an interface.""" if isinstance(iface, zope.interface.declarations.Implements): iface = iface.inherit if iface is None: return None return {'module': getattr(iface, '__module__', _('<unknown>')), 'name': getattr(iface, '__name__', _('<unknown>'))} def getTypeInfoDictionary(type): """Return a PT-friendly info dictionary for a type.""" path = getPythonPath(type) return {'name': type.__name__, 'module': type.__module__, 'url': isReferencable(path) and path.replace('.', '/') or None} def getSpecificationInfoDictionary(spec): """Return an info dictionary for one specification.""" info = {'isInterface': False, 'isType': False} if zope.interface.interfaces.IInterface.providedBy(spec): info.update(getInterfaceInfoDictionary(spec)) info['isInterface'] = True else: info.update(getTypeInfoDictionary(spec.inherit)) info['isType'] = True return info def getAdapterInfoDictionary(reg): """Return a PT-friendly info dictionary for an adapter registration.""" factory = getRealFactory(reg.factory) path = getPythonPath(factory) url = None if isReferencable(path): url = path.replace('.', '/') if isinstance(reg.info, str): doc = reg.info zcml = None else: doc = None zcml = getParserInfoInfoDictionary(reg.info) name = getattr(reg, 'name', '') name = name.decode('utf-8') if isinstance(name, bytes) else name return { 'provided': getInterfaceInfoDictionary(reg.provided), 'required': [getSpecificationInfoDictionary(iface) for iface in reg.required if iface is not None], 'name': name, 'factory': path, 'factory_url': url, 'doc': doc, 'zcml': zcml } def getFactoryInfoDictionary(reg): """Return a PT-friendly info dictionary for a factory.""" factory = reg.component callable = factory # Usually only zope.component.factory.Factory instances have this attribute if IFactory.providedBy(factory) and hasattr(factory, '_callable'): callable = factory._callable elif hasattr(callable, '__class__'): callable = callable.__class__ path = getPythonPath(callable) return {'name': str(reg.name) or _('<i>no name</i>'), 'title': getattr(factory, 'title', ''), 'description': renderText(getattr(factory, 'description', ''), module=callable.__module__), 'url': isReferencable(path) and path.replace('.', '/') or None} def getUtilityInfoDictionary(reg): """Return a PT-friendly info dictionary for a factory.""" component = reg.component # Check whether we have an instance of some custom type or not # Unfortunately, a lot of utilities have a `__name__` attribute, so we # cannot simply check for its absence # TODO: Once we support passive display of instances, this insanity can go # away. if not isinstance(component, (types.MethodType, types.FunctionType, type, InterfaceClass)): component = getattr(component, '__class__', component) path = getPythonPath(component) # provided interface id iface_id = '{}.{}'.format(reg.provided.__module__, reg.provided.getName()) # Determine the URL if isinstance(component, InterfaceClass): url = 'Interface/%s' % path else: url = None if isReferencable(path): url = 'Code/%s' % path.replace('.', '/') return {'name': str(reg.name) or _('<i>no name</i>'), 'url_name': utilitymodule.encodeName(reg.name or '__noname__'), 'iface_id': iface_id, 'path': path, 'url': url}
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/component.py
component.py
================================ Interface Inspection Utilities ================================ .. currentmodule:: zope.app.apidoc.interface This document is a presentation of the utility functions provided by >>> from zope.app.apidoc import interface For the following demonstrations, we need a nice interface that we can inspect: >>> from zope.interface import Interface, Attribute >>> from zope.schema import Field, TextLine >>> class IFoo(Interface): ... foo = Field(title=u"Foo") ... ... bar = TextLine(title=u"Bar", ... description=u"The Bar", ... required=True, ... default=u"My Bar") ... ... baz = Attribute('baz', ... 'This is the baz attribute') ... ... def blah(one, two, three=None, *args, **kwargs): ... """This is the `blah` method.""" :func:`getElements` =================== Return a dictionary containing all elements in an interface. The type specifies whether we are looking for attributes, fields or methods. So let's look at an example. First, let's get the methods of an interface: >>> from zope.interface.interfaces import IMethod >>> sorted(interface.getElements(IFoo, type=IMethod).keys()) ['blah'] and now the fields: >>> from zope.schema.interfaces import IField >>> names = sorted(interface.getElements(IFoo, type=IField).keys()) >>> names ['bar', 'foo'] We can also get all attributes of course. >>> from zope.interface.interfaces import IAttribute >>> names = sorted(interface.getElements(IFoo, type=IAttribute).keys()) >>> names ['bar', 'baz', 'blah', 'foo'] You might be surprised by the above result, since the fields and methods are again included. However, fields and methods are just attributes and thus extend the simple attribute implementation. If you want to get a list of attributes that does not include fields and methods, see the :func:`getAttributes` function. The default type is :class:`zope.interface.interfaces.IElement` which will simply return all elements of the interface: >>> names = sorted(interface.getElements(IFoo).keys()) >>> names ['bar', 'baz', 'blah', 'foo'] Note: The interface you pass to this function *cannot* be proxied! Presentation code often like to wrap interfaces in security proxies and apidoc even uses location proxies for interface. :func:`getFieldsInOrder` ======================== For presentation purposes we often want fields to have the a certain order, most comonly the order they have in the interface. This function returns a list of (name, field) tuples in a specified order. The ``_itemkey`` argument provides the function that is used to extract the key on which to order the fields. The default function, which uses the fields' ``order`` attribute, should be the correct one for 99% of your needs. Reusing the interface created above, we check the output: >>> [n for n, a in interface.getFieldsInOrder(IFoo)] ['foo', 'bar'] By changing the sort method to sort by names, we get: >>> [n for n, a in interface.getFieldsInOrder( ... IFoo, _itemkey=lambda x: x[0])] ['bar', 'foo'] :func:`getAttributes` ===================== This function returns a (name, attr) tuple for every attribute in the interface. Note that this function will only return pure attributes; it ignores methods and fields. >>> attrs = interface.getAttributes(IFoo) >>> attrs.sort() >>> attrs [('baz', <zope.interface.interface.Attribute object at ...>)] :func:`getMethods` ================== This function returns a (name, method) tuple for every declared method in the interface. >>> methods = sorted(interface.getMethods(IFoo)) >>> methods [('blah', <zope.interface.interface.Method object at ...>)] :func:`getFields` ================= This function returns a (name, field) tuple for every declared field in the interface. >>> sorted(interface.getFields(IFoo)) [('bar', <zope.schema._bootstrapfields.TextLine object at ...>), ('foo', <zope.schema._bootstrapfields.Field object at ...>)] Note that this returns the same result as ``getFieldsInOrder()`` with the fields sorted by their ``order`` attribute, except that you cannot specify the sort function here. This function was mainly provided for symmetry with the other functions. :func:`getInterfaceTypes` ========================= Interfaces can be categorized/grouped by using interface types. Interface types simply extend :class:`zope.interface.interfaces.IInterface`, which are basically meta-interfaces. The interface types are then provided by particular interfaces. The :func:`getInterfaceTypes` function returns a list of interface types that are provided for the specified interface. Note that you commonly expect only one type per interface, though. Before we assign any type to our ``IFoo`` interface, there are no types declared. >>> interface.getInterfaceTypes(IFoo) [] Now we define a new type called ``IContentType`` >>> from zope.interface.interfaces import IInterface >>> class IContentType(IInterface): ... pass and have our interface provide it: >>> from zope.interface import directlyProvides >>> directlyProvides(IFoo, IContentType) Note that ZCML has some more convenient methods of doing this. Now let's get the interface types again: >>> interface.getInterfaceTypes(IFoo) [<InterfaceClass zope.app.apidoc.doctest.IContentType>] Again note that the interface passed to this function *cannot* be proxied, otherwise this method will pick up the proxy's interfaces as well. :func:`getFieldInterface` ========================= This function tries pretty hard to determine the best-matching interface that represents the field. Commonly the field class has the same name as the field interface (minus an "I"). So this is our first choice: >>> from zope.schema import Text, Int >>> interface.getFieldInterface(Text()) <InterfaceClass zope.schema.interfaces.IText> >>> interface.getFieldInterface(Int()) <InterfaceClass zope.schema.interfaces.IInt> If the name matching method fails, it picks the first interface that extends :class:`~.IField`: >>> from zope.schema.interfaces import IField >>> class ISpecialField(IField): ... pass >>> class ISomething(Interface): ... pass >>> from zope.interface import implementer >>> @implementer(ISomething, ISpecialField) ... class MyField: ... pass >>> interface.getFieldInterface(MyField()) <InterfaceClass zope.app.apidoc.doctest.ISpecialField> :func:`getAttributeInfoDictionary` ================================== This function returns a page-template-friendly dictionary for a simple attribute: >>> from pprint import pprint >>> pprint(interface.getAttributeInfoDictionary(IFoo['baz'])) {'doc': '<p>This is the baz attribute</p>\n', 'name': 'baz'} :func:`getMethodInfoDictionary` =============================== This function returns a page-template-friendly dictionary for a method: >>> pprint(interface.getMethodInfoDictionary(IFoo['blah'])) {'doc': '<p>This is the <cite>blah</cite> method.</p>\n', 'name': 'blah', 'signature': '(one, two, three=None, *args, **kwargs)'} :func:`getFieldInfoDictionary` ============================== This function returns a page-template-friendly dictionary for a field: >>> pprint(interface.getFieldInfoDictionary(IFoo['bar']), width=50) {'class': {'name': 'TextLine', 'path': 'zope/schema/_bootstrapfields/TextLine'}, 'default': "'My Bar'", 'description': '<p>The Bar</p>\n', 'iface': {'id': 'zope.schema.interfaces.ITextLine', 'name': 'ITextLine'}, 'name': 'bar', 'required': True, 'required_string': 'required', 'title': 'Bar'}
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/interface.rst
interface.rst
=================================== Presentation Inspection Utilities =================================== .. currentmodule:: zope.app.apidoc.presentation The ``presentation`` module provides some nice utilities to inspect presentation registrations. >>> from zope.app.apidoc import presentation :func:`getViewFactoryData` ========================== This function tries really hard to determine the correct information about a view factory. For example, when you create a page, a new type is dynamically generated upon registration. Let's look at a couple examples. First, let's inspect a case where a simple browser page was configured without a special view class. In these cases the factory is a :class:`~.SimpleViewClass`: >>> from zope.browserpage.simpleviewclass import SimpleViewClass >>> view = SimpleViewClass('browser/index.pt') >>> info = presentation.getViewFactoryData(view) Before we can check the result, we have to make sure that all Windows paths are converted to Unix-like paths. We also clip off instance-specific parts of the template path: >>> info['template'] = info['template'].replace('\\', '/')[-32:] >>> from pprint import pprint >>> pprint(info) {'path': 'zope.browserpage.simpleviewclass.simple', 'referencable': True, 'resource': None, 'template': 'zope/app/apidoc/browser/index.pt', 'template_obj': <BoundPageTemplateFile of None>, 'url': 'zope/browserpage/simpleviewclass/simple'} So in the result above we see what the function returns. It is a dictionary (converted to a list for test purposes) that contains the Python path of the view class, a flag that specifies whether the factory can be referenced and thus be viewed by the class browser, the (page) template used for the view and the URL under which the factory will be found in the class browser. Some views, like icons, also use resources to provide their data. In these cases the name of the resource will be provided. Of course, not in all cases all values will be available. Empty values are marked with ``None``. Believe it or not, in some cases the factory is just a simple type. In these cases we cannot retrieve any useful information: >>> info = presentation.getViewFactoryData(3) >>> pprint(info) {'path': 'builtins.int', 'referencable': False, 'resource': None, 'template': None, 'url': None} In some cases factories are callable class instances, where we cannot directly have a referencable name, so we lookup the class and use its name: >>> class Factory(object): ... pass >>> info = presentation.getViewFactoryData(Factory()) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} One of the more common cases, however, is that the factory is a class or type. In this case we can just retrieve the reference directly: >>> info = presentation.getViewFactoryData(Factory) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} When factories are created by a directive, they can also be functions. In those cases we just simply return the function path: >>> def factory(): ... pass >>> factory.__module__ = 'zope.app.apidoc.doctest' # The testing framework does not set the __module__ correctly >>> info = presentation.getViewFactoryData(factory) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/factory'} However, the function is rather unhelpful, since it will be the same for all views that use that code path. For this reason the function keeps track of the original factory component in a function attribute called ``factory``: >>> factory.factory = Factory >>> info = presentation.getViewFactoryData(factory) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} Let's now have a look at some extremly specific cases. If a view is registered using the ``zope:view`` directive and a permission is specified, a ``ProxyView`` class instance is created that references its original factory: >>> class ProxyView(object): ... ... def __init__(self, factory): ... self.factory = factory >>> proxyView = ProxyView(Factory) >>> info = presentation.getViewFactoryData(proxyView) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} Another use case is when a new type is created by the ``browser:page`` or ``browser:view`` directive. In those cases the true/original factory is really the first base class. Those cases are detected by inspecting the ``__module__`` string of the type: >>> new_class = type(Factory.__name__, (Factory,), {}) >>> new_class.__module__ = 'zope.app.publisher.browser.viewmeta' >>> info = presentation.getViewFactoryData(new_class) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} The same sort of thing happens for XML-RPC views, except that those are wrapped twice: >>> new_class = type(Factory.__name__, (Factory,), {}) >>> new_class.__module__ = 'zope.app.publisher.xmlrpc.metaconfigure' >>> new_class2 = type(Factory.__name__, (new_class,), {}) >>> new_class2.__module__ = 'zope.app.publisher.xmlrpc.metaconfigure' >>> info = presentation.getViewFactoryData(new_class2) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} Finally, it sometimes happens that a factory is wrapped and the wrapper is wrapped in return: >>> def wrapper1(*args): ... return Factory(*args) >>> def wrapper2(*args): ... return wrapper1(*args) Initially, the documentation is not very helpful: >>> info = presentation.getViewFactoryData(wrapper2) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.wrapper2', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/wrapper2'} However, if those wrappers play nicely, they provide a factory attribute each step of the way ... >>> wrapper1.factory = Factory >>> wrapper2.factory = wrapper1 and the result is finally our original factory: >>> info = presentation.getViewFactoryData(wrapper2) >>> pprint(info) {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'} :func:`getPresentationType` =========================== In Zope 3, presentation types (i.e. browser, ftp, ...) are defined through their special request interface, such as :class:`~.IBrowserRequest` or :class:`~.IFTPRequest`. To complicate matters further, layer interfaces are used in browser presentations to allow skinning. Layers extend any request type, but most commonly ``IBrowserRequest``. This function inspects the request interface of any presentation multi-adapter and determines its type, which is returned in form of an interface. >>> from zope.app.apidoc.presentation import getPresentationType >>> from zope.publisher.interfaces.http import IHTTPRequest >>> from zope.publisher.interfaces.browser import IBrowserRequest >>> class ILayer1(IBrowserRequest): ... pass >>> presentation.getPresentationType(ILayer1) <InterfaceClass zope.publisher.interfaces.browser.IBrowserRequest> >>> class ILayer2(IHTTPRequest): ... pass >>> presentation.getPresentationType(ILayer2) <InterfaceClass zope.publisher.interfaces.http.IHTTPRequest> If the function cannot determine the presentation type, the interface itself is returned: >>> from zope.interface import Interface >>> class ILayer3(Interface): ... pass >>> presentation.getPresentationType(ILayer3) <InterfaceClass zope.app.apidoc.doctest.ILayer3> Note that more specific presentation types are considered first. For example, :class:`zope.publisher.interfaces.browser.IBrowserRequest` extends :class:`zope.publisher.interfaces.http.IHTTPRequest`, but it will always determine the presentation type to be an :class:`~zope.publisher.interfaces.browser.IBrowserRequest`. :func:`getViews` ================ This function retrieves all available view registrations for a given interface and presentation type. The default argument for the presentation type is :class:`zope.publisher.interfaces.IRequest`, which will effectively return all views for the specified interface. To see how this works, we first have to register some views: >>> class IFoo(Interface): ... pass >>> from zope import component as ztapi >>> ztapi.provideAdapter(adapts=(IFoo, IHTTPRequest), provides=Interface, factory=None, name='foo') >>> ztapi.provideAdapter(adapts=(Interface, IHTTPRequest), provides=Interface, factory=None, ... name='bar') >>> ztapi.provideAdapter(adapts=(IFoo, IBrowserRequest), provides=Interface, factory=None, ... name='blah') Now let's see what we've got. If we do not specify a type, all registrations should be returned: >>> regs = list(presentation.getViews(IFoo)) >>> regs.sort() >>> regs [AdapterRegistration(<BaseGlobalComponents base>, [IFoo, IBrowserRequest], Interface, 'blah', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [IFoo, IHTTPRequest], Interface, 'foo', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [Interface, IHTTPRequest], Interface, 'bar', None, '')] >>> regs = list(presentation.getViews(Interface, IHTTPRequest)) >>> regs.sort() >>> regs [AdapterRegistration(<BaseGlobalComponents base>, [Interface, IHTTPRequest], Interface, 'bar', None, '')] :func:`filterViewRegistrations` =============================== Oftentimes the amount of views that are being returned for a particular interface are too much to show at once. It is then good to split the view into categories. The ``filterViewRegistrations()`` function allows you to filter the views on how specific they are to the interface. Here are the three levels you can select from: * SPECIFC_INTERFACE_LEVEL -- Only return registrations that require the specified interface directly. * EXTENDED_INTERFACE_LEVEL -- Only return registrations that require an interface that the specified interface extends. * GENERIC_INTERFACE_LEVEL -- Only return registrations that explicitely require the ``Interface`` interface. So, let's see how this is done. We first need to create a couple of interfaces and register some views: >>> class IContent(Interface): ... pass >>> class IFile(IContent): ... pass Clear out the registries first, so we know what we have. >>> from zope.testing.cleanup import cleanUp >>> cleanUp() >>> ztapi.provideAdapter(adapts=(IContent, IHTTPRequest), provides=Interface, ... factory=None, name='view.html') >>> ztapi.provideAdapter(adapts=(IContent, IHTTPRequest), provides=Interface, ... factory=None, name='edit.html') >>> ztapi.provideAdapter(adapts=(IFile, IHTTPRequest), provides=Interface, ... factory=None, name='view.html') >>> ztapi.provideAdapter(adapts=(Interface, IHTTPRequest), provides=Interface, ... factory=None, name='view.html') Now we get all the registrations: >>> regs = list(presentation.getViews(IFile, IHTTPRequest)) Let's now filter those registrations: >>> result = list(presentation.filterViewRegistrations( ... regs, IFile, level=presentation.SPECIFIC_INTERFACE_LEVEL)) >>> result.sort() >>> result [AdapterRegistration(<BaseGlobalComponents base>, [IFile, IHTTPRequest], Interface, 'view.html', None, '')] >>> result = list(presentation.filterViewRegistrations( ... regs, IFile, level=presentation.EXTENDED_INTERFACE_LEVEL)) >>> result.sort() >>> result [AdapterRegistration(<BaseGlobalComponents base>, [IContent, IHTTPRequest], Interface, 'edit.html', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [IContent, IHTTPRequest], Interface, 'view.html', None, '')] >>> result = list(presentation.filterViewRegistrations( ... regs, IFile, level=presentation.GENERIC_INTERFACE_LEVEL)) >>> result.sort() >>> result [AdapterRegistration(<BaseGlobalComponents base>, [Interface, IHTTPRequest], Interface, 'view.html', None, '')] You can also specify multiple levels at once using the Boolean OR operator, since all three levels are mutually exclusive. >>> result = list(presentation.filterViewRegistrations( ... regs, IFile, level=presentation.SPECIFIC_INTERFACE_LEVEL | ... presentation.EXTENDED_INTERFACE_LEVEL)) >>> result.sort() >>> result [AdapterRegistration(<BaseGlobalComponents base>, [IContent, IHTTPRequest], Interface, 'edit.html', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [IContent, IHTTPRequest], Interface, 'view.html', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [IFile, IHTTPRequest], Interface, 'view.html', None, '')] >>> result = list(presentation.filterViewRegistrations( ... regs, IFile, level=presentation.SPECIFIC_INTERFACE_LEVEL | ... presentation.GENERIC_INTERFACE_LEVEL)) >>> result.sort() >>> result [AdapterRegistration(<BaseGlobalComponents base>, [IFile, IHTTPRequest], Interface, 'view.html', None, ''), AdapterRegistration(<BaseGlobalComponents base>, [Interface, IHTTPRequest], Interface, 'view.html', None, '')] :func:`getViewInfoDictionary` ============================= Now that we have all these utilities to select the registrations, we need to prepare the them for output. For page templates the best data structures are dictionaries and tuples/lists. This utility will generate an informational dictionary for the specified registration. Let's first create a registration: >>> from zope.interface.registry import AdapterRegistration >>> reg = AdapterRegistration(None, (IFile, Interface, IHTTPRequest), ... Interface, 'view.html', Factory, 'reg info') >>> pprint(presentation.getViewInfoDictionary(reg), width=50) {'doc': 'reg info', 'factory': {'path': 'zope.app.apidoc.doctest.Factory', 'referencable': True, 'resource': None, 'template': None, 'url': 'zope/app/apidoc/doctest/Factory'}, 'name': 'view.html', 'provided': {'module': 'zope.interface', 'name': 'Interface'}, 'read_perm': None, 'required': [{'module': 'zope.app.apidoc.doctest', 'name': 'IFile'}, {'module': 'zope.interface', 'name': 'Interface'}, {'module': 'zope.publisher.interfaces.http', 'name': 'IHTTPRequest'}], 'type': 'zope.publisher.interfaces.http.IHTTPRequest', 'write_perm': None, 'zcml': None}
zope.app.apidoc
/zope.app.apidoc-5.0-py3-none-any.whl/zope/app/apidoc/presentation.rst
presentation.rst