metadata
dict
text
stringlengths
60
3.49M
{ "source": "0x6d736c/MacrosCalculator", "score": 2 }
#### File: MacrosCalculator/test/ui_test.py ```python import unittest import os, sys, inspect currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) from MacroEstimator import Person, Diet from PyQt4.QtGui import QApplication from PyQt4.QtTest import QTest from PyQt4.QtCore import Qt class TestUi(unittest.TestCase): """Test UI class""" def test_defaults(self): """Test the GUI in its default state""" pass if __name__ == '__main__': unittest.main() ```
{ "source": "0x6f736f646f/automating-home-lights", "score": 3 }
#### File: homeAutomation/server/main.py ```python from flask import Flask, request import paho.mqtt.client as mqtt from dotenv import load_dotenv import os load_dotenv(dotenv_path="broker.env") broker_url = os.getenv("MQTT_HOST") broker_port = int(os.getenv("MQTT_PORT")) mqtt_topic = os.getenv("MQTT_TOPIC") client = mqtt.Client() client.connect(broker_url, broker_port) app = Flask(__name__) @app.route('/', methods=['POST', 'GET']) def publish(): if request.method == "GET": return "Server is up" global client command = request.get_json()['queryResult']['queryText'] if command.__contains__("on"): client.publish(topic=mqtt_topic, payload="ON", qos=0, retain=True) return { "payload": { "google": { "expectUserResponse": True, "richResponse": { "items": [ { "simpleResponse": { "textToSpeech": "The lights are on" } } ] } } } } elif command.__contains__("off"): client.publish(topic=mqtt_topic, payload="OFF", qos=0, retain=True) return { "payload": { "google": { "expectUserResponse": True, "richResponse": { "items": [ { "simpleResponse": { "textToSpeech": "The lights are off" } } ] } } } } else: return { "payload": { "google": { "expectUserResponse": True, "richResponse": { "items": [ { "simpleResponse": { "textToSpeech": "Do you want to turn " "on or off the lights" } } ] } } } } if __name__ == "__main__": app.run(port=5000) ```
{ "source": "0x6f736f646f/backend-blog-application", "score": 3 }
#### File: backend-blog-application/src/manage.py ```python import os import coverage import unittest from api import app, db from flask_migrate import Migrate, MigrateCommand from flask_script import Manager COV = coverage.coverage( branch=True, include="api/*", omit=[ 'tests/*', 'config.py', '*/__init__.py' ] ) COV.start() migrate = Migrate(app, db) manager = Manager(app) # migrations manager.add_command('db', MigrateCommand) @manager.command def test(): """ Runs the unit tests without test coverage """ tests = unittest.TestLoader().discover('tests', pattern="test*.py") result = unittest.TextTestRunner(verbosity=2).run(tests) if result.wasSuccessful(): return 0 return 1 @manager.command def cov(): """ Runs the unit tests with coverage """ tests = unittest.TestLoader().discover('tests') result = unittest.TextTestRunner(verbosity=2).run(tests) if result.wasSuccessful(): COV.stop() COV.save() print("Coverage summary: ") COV.report() basedir = os.path.abspath(os.path.dirname(__file__)) covdir = os.path.join(basedir, 'coverage') COV.html_report(directory=covdir) COV.erase() return 0 return 1 @manager.command def coveralls(): """Runs the unit tests with coverage.""" tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests) COV.stop() COV.save() COV.report() basedir = os.path.abspath(os.path.dirname(__file__)) covdir = os.path.join(basedir, 'tmp/coverage') COV.html_report(directory=covdir) COV.erase() @manager.command def create_database(): """ Creates the database tables """ db.create_all() @manager.command def drop_database(): """ Drops the database tables """ db.drop_all() if __name__ == "__main__": manager.run() ```
{ "source": "0x6f736f646f/Big4Trendanalysis", "score": 3 }
#### File: 0x6f736f646f/Big4Trendanalysis/api.py ```python from flask import Flask, jsonify import Sentiment as s api = Flask(__name__) @api.route("/") def index(): return jsonify({ "message": "Success" }) @api.route("/data/<message>", methods=["GET", "PUT"]) def data(message): a = s.sentiment(str(message)) if a[0] == 1: return jsonify({ "message" : message, "response": "The message is positive with a confidence of {}".format(a[1] * 100) }) elif a[0] == 0: return jsonify({ "message" : message, "response": "The message is negative with a confidence of {}".format(a[1] * 100) }) if __name__ == '__main__': api.run("0.0.0.0", port=5000) ```
{ "source": "0x6f736f646f/DarajaInFlask", "score": 2 }
#### File: 0x6f736f646f/DarajaInFlask/func.py ```python import requests from requests.auth import HTTPBasicAuth import json from datetime import datetime import base64 import os from dotenv import load_dotenv dotenv_path = os.path.join(os.path.dirname(__file__), '.env') load_dotenv(dotenv_path) def authentication(): url = "https://sandbox.safaricom.co.ke/oauth/v1/generate?grant_type=client_credentials" consumer_key = os.getenv("consumer_key") consumer_secret = os.getenv("consumer_secret") response = requests.get(url=url, auth=HTTPBasicAuth(consumer_key, consumer_secret)) access_token = json.loads(response.text) mpesa_access_token = access_token['access_token'] return mpesa_access_token def lipaNaMpesa(mpesa_access_token): url = "https://sandbox.safaricom.co.ke/mpesa/stkpush/v1/processrequest" passkey = "<KEY>" code = "174379" lipa_time = datetime.now().strftime("%Y%m%d%H%M%S") data = code + passkey + lipa_time password = <PASSWORD>64.b64encode(data.<PASSWORD>()) password = <PASSWORD>('<PASSWORD>') request_data = { "BusinessShortCode": code, "Password": password, "Timestamp": lipa_time, "TransactionType": "CustomerPayBillOnline", "Amount": 1, "PartyA": 254720136609, "PartyB": code, "PhoneNumber": 254720136609, "CallBackURL": "https://sandbox.safaricom.co.ke/mpesa/", "AccountReference": "Rodney", "TransactionDesc": "Testing stk push" } headers = {"Authorization": "Bearer %s" % mpesa_access_token} response = requests.post(url=url, json=request_data, headers=headers) return response.content def lipaNaMpesaQuery(mpesa_access_token): url = "https://sandbox.safaricom.co.ke/mpesa/stkpushquery/v1/query" code = "174379" lipa_time = datetime.now().strftime("%Y%m%d%H%M%S") passkey = "<KEY>" data = code + passkey + lipa_time password = <PASSWORD>(data.<PASSWORD>()) password = <PASSWORD>') request_data = { "BusinessShortCode": code, "Password": password, "Timestamp": lipa_time, "CheckoutRequestID": "ws_CO_DMZ_401817453_22092019181533246" } headers = {"Authorization": "Bearer %s" % mpesa_access_token} response = requests.post(url=url, json=request_data, headers=headers) return response.content def accountbalance(): url = "https://sandbox.safaricom.co.ke/mpesa/accountbalance/v1/query" code = "174379" request_data = { "Initiator": "", "SecurityCredential": "", "CommandID": "AccountBalance", "PartyA": code, "IdentifierType": "4", "Remarks": "Hello", "QueueTimeOutURL": "", "ResultURL": "" } def c2b(mpesa_access_token): url = "https://sandbox.safaricom.co.ke/mpesa/c2b/v1/simulate" code = "600000" request_data = { "ShortCode": code, "CommandID": "CustomerPayBillOnline", "Amount": "1", "Msisdn": 254720136609, "BillRefNumber": "Rodney" } headers = {"Authorization": "Bearer %s" % mpesa_access_token} response = requests.post(url=url, json=request_data, headers=headers) return response.content if __name__ == "__main__": lipaNaMpesa(authentication()) ```
{ "source": "0x6f736f646f/Emojidetection", "score": 3 }
#### File: 0x6f736f646f/Emojidetection/videostream.py ```python import cv2 import os import pickle BASE_DIR = os.path.dirname(os.path.abspath(__file__)) face_cascade = cv2.CascadeClassifier("Models/haarcascade_frontalface_alt2.xml") recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read("Models/trainer.yml") labels = {} with open("Models/labels.pkl", "rb") as f: og_labels = pickle.load(f) labels = {v:k for k, v in og_labels.items()} class VideoStream(): def __init__(self): #Constructor that returns a video camera input. self.video = cv2.VideoCapture(0) def __del__(self): #Class destructor. self.video.release() def get_frame_grs(self): #Camera input for processing.Returns a grayscale image. success, image = self.video.read() image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) ret, jpeg = cv2.imencode('.jpg', image) return jpeg.tobytes() def get_frame_col(self): #Camera input for processing.Returns a color image. success, image = self.video.read() ret, jpeg = cv2.imencode('.jpg', image) return jpeg.tobytes() def detect_faces(self): #Camera input for processing.Returns color image with face detection. ret, frame = self.video.read() gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) faces = face_cascade.detectMultiScale(gray, scaleFactor=1.5, minNeighbors=5) for (x, y, w, h) in faces: roi_gray = gray[y:y+h, x:x+w] roi_color = frame[y:y+h, x:x+w] id_, conf = recognizer.predict(roi_gray) if conf >= 45 and conf <= 85: font = cv2.FONT_HERSHEY_SIMPLEX name = labels[id_] color = (180, 255, 10) #BGR stroke = 2 #Thickness end_cord_x = x + w end_cord_y = y + h cv2.rectangle(frame, (x,y), (end_cord_x, end_cord_y), color, stroke) cv2.putText(frame, name, (x,y), font, 1, color, stroke, cv2.LINE_AA) ret1, jpeg = cv2.imencode('.jpg', frame) return jpeg.tobytes() ```
{ "source": "0x6f736f646f/Investing.comData", "score": 3 }
#### File: Investing.comData/Utilis/webscrapper.py ```python import requests from bs4 import BeautifulSoup import pandas as pd import os import sys sys.path.append("..") from config import SQLALCHEMY_DATABASE_URI from Utilis import seeder def get_data(url, header, data): print("Getting data ......") response = requests.post(url, headers=header, data=data) if response.status_code != 200: raise Exception("Response code is {}".format(response.status_code)) else: return response.content def parse_data(content): print("Parsing data ......") soup = BeautifulSoup(content, 'html.parser') table = soup.find('table', {"id":"curr_table"}) #Generate lists A=[] B=[] C=[] D=[] E=[] F=[] G=[] for row in table.findAll("tr"): cells = row.findAll('td') if len(cells) == 7: #Only extract table body not heading A.append(cells[0].find(text=True)) B.append(cells[1].find(text=True)) C.append(cells[2].find(text=True)) D.append(cells[3].find(text=True)) E.append(cells[4].find(text=True)) F.append(cells[5].find(text=True)) G.append(cells[6].find(text=True)) return A, B, C, D, E, F, G def generate_csv(A, B, C, D, E, F, G, header): titles = ['Date', 'Price', 'Open', 'High', 'Low', 'Vol'] print("Generating csv ......") df = pd.DataFrame(G, columns=['Date']) df[str(titles[0])] = A df[str(titles[1])] = B df[str(titles[2])] = C df[str(titles[3])] = D df[str(titles[4])] = E df[str(titles[5])] = F title = str(header['Referer'][35:]) + '.csv' os.chdir("../") df.to_csv("Data/{}".format(title)) print("Generated csv") def push_to_db(A, B, C, D, E, F, G, header): print(str(SQLALCHEMY_DATABASE_URI)) c , conn = seeder.create_connection(SQLALCHEMY_DATABASE_URI) title = str(header['Referer'][35:].replace("-historical-data", "").replace("-", "")) seeder.sql_insert_company(name=title) length = len(A) for i in range(0,length, 1): seeder.sql_insert_data(date=G[i], price=A[i], open_price=B[i], high=C[i], low=D[i], vol=E[i], name=title, c=c, connection=conn) ```
{ "source": "0x6f736f646f/sdk-py", "score": 3 }
#### File: sdk-py/tests/test_things.py ```python from lib import sdk import json, requests_mock s = sdk.SDK() thing = {"thing_name": "thing"} thing_id = "123-456-789" thing_id1 = "123-223-333" channel_id = "654-654-654" channel_id1 = "654-654-654" token = "<PASSWORD>" url = "http://localhost" params = None def test_create_thing(requests_mock): requests_mock.register_uri("POST", url + "/things", headers={"location": "/things/" + thing_id}, status_code=201) r = s.things.create(thing, token) assert r.error.status == 0 assert thing_id == r.value def test_create_existing_thing(requests_mock): requests_mock.register_uri("POST", url + "/things", headers={"location": "/things/" + thing_id}, status_code=409) r = s.things.create(thing, token) assert r.error.status == 1 assert r.error.message == "Entity already exist." def test_create_bulk_things(requests_mock): requests_mock.register_uri("POST", url + "/things/bulk", json=[thing_id, thing_id1], headers={"location": "/things/" + thing_id}, status_code=201) r = s.things.create_bulk(thing_id, token) assert r.error.status == 0 assert [thing_id, thing_id1] == r.value def test_create_bulk_things_missing_token(requests_mock): requests_mock.register_uri("POST", url + "/things/bulk", json=[thing_id, thing_id1], headers={"location": "/things/" + thing_id}, status_code=401) r = s.things.create_bulk(thing_id, token) assert r.error.status == 1 assert r.error.message == "Missing or invalid access token provided." def test_get_thing(requests_mock): requests_mock.register_uri("GET", url + "/things/" + thing_id, json=thing, status_code=200) r = s.things.get(thing_id, token) assert r.error.status == 0 assert thing == r.value def test_get_thing_malformed_query(requests_mock): requests_mock.register_uri("GET", url + "/things/" + thing_id, json=thing, status_code=400) r = s.things.get(thing_id, token) assert r.error.status == 1 assert r.error.message == "Failed due to malformed query parameters." def test_get_all_things(requests_mock): requests_mock.register_uri("GET", url + "/things", json=[thing_id, thing_id1], status_code=200) r = s.things.get_all(token) assert r.error.status == 0 assert [thing_id, thing_id1] == r.value def test_get_all_thing_does_not_exist(requests_mock): requests_mock.register_uri("GET", url + "/things", json=[thing_id, thing_id1], status_code=404) r = s.things.get_all(token) assert r.error.status == 1 assert r.error.message == "Thing does not exist." def test_get_by_channel(requests_mock): requests_mock.register_uri("GET", url + "/channels/" + channel_id + "/things", json=channel_id, headers={"Authorization": "/channels/" + channel_id + "/things"}, status_code=200) r = s.things.get_by_channel(channel_id, params, token) assert r.error.status == 0 assert channel_id == r.value def test_get_by_channel_missing_token(requests_mock): requests_mock.register_uri("GET", url + "/channels/" + channel_id + "/things", json=channel_id, headers={"Authorization": "/channels/" + channel_id + "/things"}, status_code=401) r = s.things.get_by_channel(channel_id, params, token) assert r.error.status == 1 assert r.error.message == "Missing or invalid access token provided." def test_update_thing(requests_mock): requests_mock.register_uri("PUT", url + "/things/" + thing_id, json=json.dumps(thing), status_code=200) r = s.things.update(thing_id, token, thing) assert r.error.status == 0 def test_update_thing_bad_json(requests_mock): requests_mock.register_uri("PUT", url + "/things/" + thing_id, json=json.dumps(thing), status_code=400) r = s.things.update(thing_id, token, thing) assert r.error.status == 1 assert r.error.message == "Failed due to malformed JSON." def test_delete_thing(requests_mock): requests_mock.register_uri("DELETE", url + "/things/" + thing_id, status_code=204) r = s.things.delete(thing_id, token) assert r.error.status == 0 def test_delete_bad_thing_id(requests_mock): requests_mock.register_uri("DELETE", url + "/things/" + thing_id, status_code=400) r = s.things.delete(thing_id, token) assert r.error.status == 1 assert r.error.message == "Failed due to malformed thing's ID." def test_connect_thing(requests_mock): requests_mock.register_uri("POST", url + "/connect", json=[channel_id, thing_id], status_code=201) r = s.things.connect(channel_id, thing_id, token) assert r.error.status == 0 assert [channel_id, thing_id] == r.value def test_connect_non_existing_entity(requests_mock): requests_mock.register_uri("POST", url + "/connect", json=[channel_id, thing_id], status_code=404) r = s.things.connect(channel_id, thing_id, token) assert r.error.status == 1 assert r.error.message == "A non-existent entity request." def test_disconnect_thing(requests_mock): requests_mock.register_uri("DELETE", url + "/channels/" + channel_id + "/things/" + thing_id, status_code=204) r = s.things.disconnect(channel_id, thing_id, token) assert r.error.status == 0 def test_disconnect_thing_or_channel_does_not_exist(requests_mock): requests_mock.register_uri("DELETE", url + "/channels/" + channel_id + "/things/" + thing_id, status_code=404) r = s.things.disconnect(channel_id, thing_id, token) assert r.error.status == 1 assert r.error.message == "Channel or thing does not exist." def test_disconnect_things(requests_mock): requests_mock.register_uri("PUT", url + "/disconnect/", status_code=200) r = s.things.disconnect_things([channel_id], [thing_id, thing_id1], token) assert r.error.status == 1 def test_disconnect_things_bad_json(requests_mock): requests_mock.register_uri("PUT", url + "/disconnect/", status_code=400) r = s.things.disconnect_things([channel_id], [thing_id, thing_id1], token) assert r.error.status == 1 assert r.error.message == "Failed due to malformed thing's ID." ``` #### File: sdk-py/tests/test_version.py ```python from lib import sdk import json, requests_mock s = sdk.SDK() def test_version(requests_mock): requests_mock.get("http://localhost/version", json='{"version":"0.15.0"}') v = s.version() assert "0.15.0" == json.loads(v)["version"] ```
{ "source": "0x6f736f646f/serverless_pipelines", "score": 3 }
#### File: serverless_pipelines/kiva_org/loans_bigquery.py ```python from __future__ import absolute_import import argparse import csv import logging import apache_beam as beam from apache_beam.metrics.metric import Metrics from apache_beam.options.pipeline_options import PipelineOptions from apache_beam.options.pipeline_options import SetupOptions from apache_beam.options.pipeline_options import GoogleCloudOptions class ParseLoanRecordFn(beam.DoFn): """Parses the raw loan record into a Python tuple. Each record has the following format: loan_id,loan_name,original_language,description,funded_amount,loan_amount,status,image_id,video_id,activity_name,sector_name,loan_use ... """ def __init__(self): super(ParseLoanRecordFn, self).__init__() self.num_parse_errors = Metrics.counter(self.__class__, 'num_parse_errors') def process(self, elem): try: row = list(csv.reader([elem]))[0] yield row[0], float(row[5]), float(row[6]), row[7] except: # Log and count parse errors self.num_parse_errors.inc() logging.error('Parse error on "%s"', elem[:40]) class LoanRecordsDict(beam.DoFn): """Format the loan data to a dictionary of BigQuery columns with their values. Receives a tuple of the sanitized loans and formats it to a dictionary in the format {'bigquery_column': value} """ def process(self, loan_record): (loan, amount, funded, status) = loan_record yield { 'loan_id': loan, 'funded_amount': amount, 'loan_amount': funded, 'status': status } class WriteToBigQuery(beam.PTransform): """Generate, format and write BigQuery table row.""" def __init__(self, table_name, dataset, schema, project): """Initializes the transform. Args: table_name: Name of the BigQuery table to use. dataset: Name of dataset. schema: Dictionary in the format {'column_name': 'bigquery_type'} project: Name of the GCP project to use. """ super(WriteToBigQuery, self).__init__() self.table_name = table_name self.dataset = dataset self.schema = schema self.project = project def get_schema(self): """Build the output table schema.""" return ', '.join( '%s:%s' % (col, self.schema[col]) for col in self.schema ) def expand(self, pcoll): return ( pcoll | 'ConvertToRow' >> beam.Map(lambda elem: {col: elem[col] for col in self.schema}) | 'WriteToBQ' >> beam.io.WriteToBigQuery(self.table_name, self.dataset, self.project, self.get_schema()) ) class SanitizeLoanData(beam.PTransform): def expand(self, pcoll): return ( pcoll | 'ParseLoanRecordFn' >> beam.ParDo(ParseLoanRecordFn()) ) def run(argv=None): """Defines and runs the loans pipeline.""" parser = argparse.ArgumentParser() parser.add_argument('--input', type=str, default='/Users/ke-fvf2362hv2h/ownspace/workshops/data/kiva/loans_mini.csv', help='Path to the data file(s) containing loan data.') parser.add_argument('--output', type=str, default='tmp/loans_bq', help='Path to the output file(s).') parser.add_argument('--dataset', type=str, required=True, help='BigQuery dataset') parser.add_argument('--table_name', type=str, required=True, help='The table to be used') args, pipeline_args = parser.parse_known_args(argv) options = PipelineOptions(pipeline_args) # We use the save_main_session option because one or more DoFn's in this # workflow rely on global context (e.g., a module imported at module level). options.view_as(SetupOptions).save_main_session = True with beam.Pipeline(options=options) as p: (p | 'ReadInputText' >> beam.io.ReadFromText(file_pattern=args.input,skip_header_lines=1) | 'SanitizeLoanData' >> SanitizeLoanData() | 'FormDict' >> beam.ParDo(LoanRecordsDict()) | 'WriteToBigQuery' >> WriteToBigQuery( args.table_name, args.dataset, { 'loan_id':'STRING', 'funded_amount': 'FLOAT', 'loan_amount': 'FLOAT', 'status': 'STRING', }, options.view_as(GoogleCloudOptions).project) ) if __name__ == '__main__': logging.getLogger().setLevel(logging.INFO) run() ```
{ "source": "0x6f736f646f/variational-quantum-classifier-on-heartattack", "score": 2 }
#### File: Src/Scripts/Benchmarking.py ```python from qiskit import QuantumCircuit from qiskit.aqua.components.optimizers import COBYLA, ADAM, SPSA from qiskit.circuit.library import ZZFeatureMap, RealAmplitudes, ZFeatureMap, PauliFeatureMap from qiskit.quantum_info import Statevector import numpy as np import pandas as pd from sklearn.preprocessing import MinMaxScaler from sklearn.model_selection import train_test_split from sklearn.utils import shuffle import csv import warnings warnings.filterwarnings("ignore") class Benchmark: """ Benchmarking different optimizers, featuremaps and depth of variational circuits """ def __init__(self, optimizer, variational_depth, feature_map, X_train, X_test, Y_train, Y_test): """ Initial function :param optimizer: The optimizer to benchmark :param variational_depth: The depth of the variational circuit :param feature_map: The featuremap that encodes data :param X_train: The x data for training :param X_test: The x data for testing :param Y_train: The y data for training :param Y_test: The y data for testing """ self.optimizer = optimizer self.variational_depth = variational_depth self.feature_map = feature_map self.no_qubit = 4 self.random_state = 42 self.class_labels = ['yes', 'no'] self.circuit = None self.var_form = RealAmplitudes(self.no_qubit, reps=self.variational_depth) self.sv = Statevector.from_label('0' * self.no_qubit) self.X_train, self.X_test, self.Y_train, self.Y_test = X_train, X_test, Y_train, Y_test self.cost_list = [] def prepare_circuit(self): """ Prepares the circuit. Combines an encoding circuit, feature map, to a variational circuit, RealAmplitudes :return: """ self.circuit = self.feature_map.combine(self.var_form) # circuit.draw(output='mpl') def get_data_dict(self, params, x): """ Assign the params to the variational circuit and the data to the featuremap :param params: Parameter for training the variational circuit :param x: The data :return parameters: """ parameters = {} for i, p in enumerate(self.feature_map.ordered_parameters): parameters[p] = x[i] for i, p in enumerate(self.var_form.ordered_parameters): parameters[p] = params[i] return parameters def assign_label(self, bit_string): """ Based on the output from measurements assign no if it odd parity and yes if it is even parity :param bit_string: The bit string eg 00100 :return class_label: Yes or No """ hamming_weight = sum([int(k) for k in list(bit_string)]) is_odd_parity = hamming_weight & 1 if is_odd_parity: return self.class_labels[1] else: return self.class_labels[0] def return_probabilities(self, counts): """ Calculates the probabilities of the class label after assigning the label from the bit string measured as output :type counts: dict :param counts: The counts from the measurement of the quantum circuit :return result: The probability of each class """ shots = sum(counts.values()) result = {self.class_labels[0]: 0, self.class_labels[1]: 0} for key, item in counts.items(): label = self.assign_label(key) result[label] += counts[key] / shots return result def classify(self, x_list, params): """ Assigns the x and params to the quantum circuit the runs a measurement to return the probabilities of each class :type params: List :type x_list: List :param x_list: The x data :param params: Parameters for optimizing the variational circuit :return probs: The probabilities """ qc_list = [] for x in x_list: circ_ = self.circuit.assign_parameters(self.get_data_dict(params, x)) qc = self.sv.evolve(circ_) qc_list += [qc] probs = [] for qc in qc_list: counts = qc.to_counts() prob = self.return_probabilities(counts) probs += [prob] return probs @staticmethod def mse_cost(probs, expected_label): """ Calculates the mean squared error from the expected values and calculated values :type expected_label: List :type probs: List :param probs: The expected values :param expected_label: The real values :return mse: The mean squared error """ p = probs.get(expected_label) actual, pred = np.array(1), np.array(p) mse = np.square(np.subtract(actual, pred)).mean() return mse def cost_function(self, X, Y, params, print_value=False): """ This is the cost function and returns cost for optimization :type print_value: Boolean :type params: List :type Y: List :type X: List :param X: The x data :param Y: The label :param params: The parameters :param print_value: If you want values to be printed :return cost: """ # map training input to list of labels and list of samples cost = 0 training_labels = [] training_samples = [] for sample in X: training_samples += [sample] for label in Y: if label == 0: training_labels += [self.class_labels[0]] elif label == 1: training_labels += [self.class_labels[1]] probs = self.classify(training_samples, params) # evaluate costs for all classified samples for i, prob in enumerate(probs): cost += self.mse_cost(prob, training_labels[i]) cost /= len(training_samples) # print resulting objective function if print_value: print('%.4f' % cost) # return objective value self.cost_list.append(cost) return cost def test_model(self, X, Y, params): """ Test the model based on x test and y test :type params: List :type Y: List :type X: List :param X: The x test set :param Y: The y test set :param params: The parameters :return: """ accuracy = 0 training_samples = [] for sample in X: training_samples += [sample] probs = self.classify(training_samples, params) for i, prob in enumerate(probs): if (prob.get('yes') >= prob.get('no')) and (Y[i] == 0): accuracy += 1 elif (prob.get('no') >= prob.get('yes')) and (Y[i] == 1): accuracy += 1 accuracy /= len(Y) print("Test accuracy: {}".format(accuracy)) def run(self): """ Runs the whole code 1. Prepares the circuit 2. define the objective function 3. Initialize the paramters 4. Optimize the paramters by training the classifier :return: """ self.prepare_circuit() # define objective function for training objective_function = lambda params: self.cost_function(self.X_train, self.Y_train, params, print_value=False) # randomly initialize the parameters np.random.seed(self.random_state) init_params = 2 * np.pi * np.random.rand(self.no_qubit * self.variational_depth * 2) # train classifier opt_params, value, _ = self.optimizer.optimize(len(init_params), objective_function, initial_point=init_params) # print results # print() # print('opt_params:', opt_params) # print('opt_value: ', value) self.test_model(self.X_test, self.Y_test, opt_params) def get_cost_list(self): """ Return the cost list :return cost list: """ return self.cost_list def normalize_data(dataPath="../../Data/Processed/heartdata.csv"): """ Normalizes the data :return X_train, X_test, Y_train, Y_test: """ # Reads the data data = pd.read_csv(dataPath) data = shuffle(data, random_state=42) X, Y = data[['sex', 'cp', 'exang', 'oldpeak']].values, data['num'].values # normalize the data scaler = MinMaxScaler(feature_range=(-2 * np.pi, 2 * np.pi)) X = scaler.fit_transform(X) X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=42) return X_train, X_test, Y_train, Y_test def main(): data = {} feature_maps = ['ZZFeatureMap(4, reps=1)', 'ZZFeatureMap(4, reps=2)', 'ZZFeatureMap(4, reps=4)', 'ZFeatureMap(4, reps=1)', 'ZFeatureMap(4, reps=2)', 'ZFeatureMap(4, reps=4)', 'PauliFeatureMap(4, reps=1)', 'PauliFeatureMap(4, reps=2)', 'PauliFeatureMap(4, reps=4)'] optimizers = ["COBYLA(maxiter=50)", "SPSA(max_trials=50)", "ADAM(maxiter=50)"] x_train, x_test, y_train, y_test = normalize_data() for fe in feature_maps: for i in [1, 3, 5]: for opt in optimizers: print("FE: {}\tDepth: {}\tOpt: {}".format(fe, i, opt)) test_benchmark = Benchmark(optimizer=eval(opt), variational_depth=i, feature_map=eval(fe), X_train=x_train, X_test=x_test, Y_train=y_train, Y_test=y_test) test_benchmark.run() data_list = "{} {} vdepth {}".format(fe, opt, i) data[data_list] = test_benchmark.get_cost_list() w = csv.writer(open("../../Data/Processed/heartcosts.csv", "w")) for key, val in data.items(): w.writerow([key, val]) if __name__ == "__main__": main() ```
{ "source": "0x7067/imagevision-bot", "score": 3 }
#### File: 0x7067/imagevision-bot/translator.py ```python from yandex_translate import YandexTranslate # Your Yandex API credentials here translator = YandexTranslate('') def translate_en_pt(message): phrase_translated = translator.translate(message, 'pt') str = "".join(phrase_translated.get('text')) return str def translate_en_persian(message): phrase_translated = translator.translate(message, 'fa') str = "".join(phrase_translated.get('text')) return str ```
{ "source": "0x715C/jak-project", "score": 3 }
#### File: decompiler/gui/decompiler_gui.py ```python from PyQt5.QtWidgets import QApplication, QLabel, QMainWindow, QTreeView, QVBoxLayout, QWidget, QPlainTextEdit, QLineEdit, QListView, QDialog, QSplitter, QSizePolicy from PyQt5.Qt import QStandardItemModel, QStandardItem, QFont, QModelIndex import json import re import os def get_monospaced_font(): """ Get a monospaced font. Should work on both windows and linux. """ font = QFont("monospace") font.setStyleHint(QFont.TypeWriter) return font def get_jak_path(): """ Get a path to jak-project/ """ return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..") def segment_id_to_name(sid): """ GOAL segment ID to name string """ if sid == 0: return "main" elif sid == 1: return "debug" elif sid == 2: return "top-level" else: return "INVALID-SEGMENT" # Hold all the metadata for an object file class ObjFile(): def __init__(self, obj): """ Convert the json data in obj.txt. If the format changes or we add new fields, they should be added here. """ self.unique_name = obj[0] # The unique name that's used by the decompiler. self.name_in_dgo = obj[1] # Name in the game. self.version = obj[2] # GOAL object file format version def get_description(self): return "Name: {}\n Version: {}\n Name in game: {}".format(self.unique_name, self.version, self.name_in_dgo) # Hold all of the object files in a dgo. class DgoFile(): def __init__(self): self.obj_files = dict() def add_obj(self, obj): self.obj_files[obj[0]] = ObjFile(obj) # Hold all DGOs/Object files. class FileMap(): def __init__(self): self.dgo_files = dict() self.all_objs = dict() def add_obj_to_dgo(self, dgo, obj): if not(dgo in self.dgo_files): self.dgo_files[dgo] = DgoFile() self.dgo_files[dgo].add_obj(obj) self.all_objs[obj[0]] = ObjFile(obj) def get_objs_matching_regex(self, regex): """ Get a list of object files with a name that matches the given regex. """ try: r = re.compile(regex) except: return [] return list(filter(r.match, self.all_objs.keys())) def load_obj_map_file(file_path): """ Load the obj.txt file generated by the decompiler. Return a FileMap. """ file_map = FileMap() with open(file_path) as f: json_data = json.loads(f.read()) for obj_file in json_data: for dgo in obj_file[3]: file_map.add_obj_to_dgo(dgo, obj_file) return file_map class ObjectFileView(QDialog): def __init__(self, name): super().__init__() self.setWindowTitle(name) with open(os.path.join(get_jak_path(), "decompiler_out", "{}_asm.json".format(name))) as f: self.asm_data = json.loads(f.read()) main_layout = QVBoxLayout() monospaced_font = get_monospaced_font() self.header_label = QLabel() main_layout.addWidget(self.header_label) function_splitter = QSplitter() function_splitter.setSizePolicy(QSizePolicy(QSizePolicy.Expanding,QSizePolicy.Expanding)) self.function_list = QTreeView() self.function_list_model = QStandardItemModel() self.functions_by_name = dict() root = self.function_list_model.invisibleRootItem() seg_roots = [] for i in range(3): seg_entry = QStandardItem(segment_id_to_name(i)) seg_entry.setFont(monospaced_font) seg_entry.setEditable(False) root.appendRow(seg_entry) seg_roots.append(seg_entry) for f in self.asm_data["functions"]: function_entry = QStandardItem(f["name"]) function_entry.setFont(monospaced_font) function_entry.setEditable(False) seg_roots[f["segment"]].appendRow(function_entry) self.functions_by_name[f["name"]] = f self.header_label.setText("Object File {} Functions ({} total):".format(name, len(self.asm_data["functions"]))) self.function_list.setModel(self.function_list_model) self.function_list.clicked.connect(self.display_function) function_splitter.addWidget(self.function_list) layout = QVBoxLayout() self.function_header_label = QLabel("No function selected") self.function_header_label.setFont(monospaced_font) self.header_label.setSizePolicy(QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)) layout.addWidget(self.function_header_label) self.op_asm_split_view = QSplitter() self.op_asm_split_view.setSizePolicy(QSizePolicy(QSizePolicy.Expanding,QSizePolicy.Expanding)) self.basic_op_pane = QListView() self.basic_op_pane.clicked.connect(self.basic_op_clicked) #layout.addWidget(self.basic_op_pane) self.op_asm_split_view.addWidget(self.basic_op_pane) self.asm_pane = QListView() self.op_asm_split_view.addWidget(self.asm_pane) layout.addWidget(self.op_asm_split_view) self.asm_display = QPlainTextEdit() self.asm_display.setMaximumHeight(80) layout.addWidget(self.asm_display) self.warnings_label = QLabel() layout.addWidget(self.warnings_label) widget = QWidget() widget.setLayout(layout) function_splitter.addWidget(widget) main_layout.addWidget(function_splitter) # add it to the window! self.setLayout(main_layout) def display_function(self, item): name = item.data() monospaced_font = get_monospaced_font() func = self.functions_by_name[name] basic_op_model = QStandardItemModel() basic_op_root = basic_op_model.invisibleRootItem() asm_model = QStandardItemModel() asm_root = asm_model.invisibleRootItem() self.basic_id_to_asm = [] self.current_function = name op_idx = 0 basic_idx = 0 for op in func["asm"]: if "label" in op: asm_item = QStandardItem(op["label"] + "\n " + op["asm_op"]) else: asm_item = QStandardItem(" " + op["asm_op"]) asm_item.setFont(monospaced_font) asm_item.setEditable(False) asm_root.appendRow(asm_item) if "basic_op" in op: if "label" in op: basic_item = QStandardItem(op["label"] + "\n " + op["basic_op"]) else: basic_item = QStandardItem(" " + op["basic_op"]) basic_item.setFont(monospaced_font) basic_item.setEditable(False) basic_op_root.appendRow(basic_item) self.basic_id_to_asm.append(op_idx) basic_idx = basic_idx + 1 op_idx = op_idx + 1 self.basic_id_to_asm.append(op_idx) self.basic_op_pane.setModel(basic_op_model) self.asm_pane.setModel(asm_model) self.warnings_label.setText(func["warnings"]) self.asm_display.setPlainText("") self.function_header_label.setText("{}, type: {}\nfunc: {} obj: {}".format(name, func["type"], func["name"], func["parent_object"])) def basic_op_clicked(self, item): text = "" added_reg = 0 asm_idx = self.basic_id_to_asm[item.row()] asm_op = self.functions_by_name[self.current_function]["asm"][asm_idx] if "type_map" in asm_op: for reg, type_name in asm_op["type_map"].items(): text += "{}: {} ".format(reg, type_name) added_reg += 1 if added_reg >= 4: text += "\n" added_reg = 0 text += "\n" for i in range(asm_idx, self.basic_id_to_asm[item.row() + 1]): text += self.functions_by_name[self.current_function]["asm"][i]["asm_op"] + "\n" op = self.functions_by_name[self.current_function]["asm"][asm_idx] if "referenced_string" in op: text += op["referenced_string"] self.asm_display.setPlainText(text) self.asm_display.setFont(get_monospaced_font()) self.asm_pane.setCurrentIndex(self.asm_pane.model().index(asm_idx, 0)) # A window for browsing all the object files. # Doesn't actually know anything about what's in the files, it's just used to select a file. class ObjectFileBrowser(QMainWindow): def __init__(self, obj_map): self.obj_map = obj_map super().__init__() self.setWindowTitle("Object File Browser") self.childen_windows = [] layout = QVBoxLayout() monospaced_font = get_monospaced_font() layout.addWidget(QLabel("Browse object files by dgo...")) # Set up the tree view self.tree = QTreeView() self.tree_model = QStandardItemModel() self.tree_root = self.tree_model.invisibleRootItem() for dgo_name, dgo in obj_map.dgo_files.items(): dgo_entry = QStandardItem(dgo_name) dgo_entry.setFont(monospaced_font) dgo_entry.setEditable(False) for obj_name, obj in dgo.obj_files.items(): obj_entry = QStandardItem(obj_name) obj_entry.setFont(monospaced_font) obj_entry.setEditable(False) dgo_entry.appendRow(obj_entry) self.tree_root.appendRow(dgo_entry) self.tree.setModel(self.tree_model) self.tree.clicked.connect(self.handle_tree_click) self.tree.doubleClicked.connect(self.handle_tree_double_click) layout.addWidget(self.tree) # Set up the Search Box layout.addWidget(QLabel("Or search for object (regex):")) self.search_box = QLineEdit() self.search_box.textChanged.connect(self.handle_search_change) layout.addWidget(self.search_box) # Set up Search Results self.search_result = QListView() layout.addWidget(self.search_result) self.search_result.clicked.connect(self.handle_search_result_click) self.search_result.doubleClicked.connect(self.handle_search_result_double_click) self.search_result.setMaximumHeight(200) # Set up the info box at the bottom self.text_box = QPlainTextEdit() self.text_box.setReadOnly(True) self.text_box.setFont(monospaced_font) layout.addWidget(self.text_box) self.text_box.setMaximumHeight(100) self.text_box.setPlainText("Select an object file to see details. Double click to open.") # add it to the window! widget = QWidget() widget.setLayout(layout) self.setCentralWidget(widget) def handle_tree_click(self, val): if not(val.parent().isValid()): return dgo = val.parent().data() obj = val.data() obj_info = self.obj_map.dgo_files[dgo].obj_files[obj] self.text_box.setPlainText("{}\n DGO: {}".format(obj_info.get_description(), dgo)) def handle_search_change(self, text): objs = self.obj_map.get_objs_matching_regex(text) model = QStandardItemModel() root = model.invisibleRootItem() monospaced_font = get_monospaced_font() for x in objs: entry = QStandardItem(x) entry.setFont(monospaced_font) entry.setEditable(False) root.appendRow(entry) self.search_result.setModel(model) def handle_search_result_click(self, val): obj = val.data() obj_info = self.obj_map.all_objs[obj] self.text_box.setPlainText(obj_info.get_description()) def handle_search_result_double_click(self, val): obj = val.data() window = ObjectFileView(obj) window.show() # prevents window from being GC'd and closed. self.childen_windows.append(window) def handle_tree_double_click(self, val): if not(val.parent().isValid()): return obj = val.data() window = ObjectFileView(obj) window.show() # prevents window from being GC'd and closed. self.childen_windows.append(window) map_file = load_obj_map_file(os.path.join(get_jak_path(), "decompiler_out", "obj.txt")) app = QApplication([]) app.setStyle('Windows') window = ObjectFileBrowser(map_file) window.show() app.exec_() ``` #### File: decompiler/scripts/create_dgo_name_list.py ```python import argparse import glob import os # Create a dgo_names = ["...."] json config entry text for a folder of DGOs. def main(): parser = argparse.ArgumentParser() parser.add_argument(dest='folder', help='folder containing dgos') args = parser.parse_args() files = sorted([os.path.basename(x) for x in glob.glob(os.path.join(args.folder, "*.*GO"))]) dgo_names = "\"dgo_names\":[" count = 0 for file in files: dgo_names += "\"" + file + "\", " count += 1 if count == 8: count = 0 dgo_names += "\n " dgo_names = dgo_names[:-2] # remove last ", " dgo_names += "]\n" print(dgo_names) if __name__ == "__main__": main() ```
{ "source": "0x776b7364/kali-scripts", "score": 2 }
#### File: 0x776b7364/kali-scripts/my-certstream-watchdog.py ```python import certstream keywords = ['domain1', 'domain2'] def extract_domains(domains): res = [] for domain in domains: for keyword in keywords: if keyword in domain: res.append(domain) return res def print_callback(message, context): domains = message['data']['leaf_cert']['all_domains'] res = extract_domains(domains) if len(res) > 0: for result in res: print(result) def on_open(instance): # Instance is the CertStreamClient instance that was opened print("Connection successfully established!") def on_error(instance, exception): # Instance is the CertStreamClient instance that barfed print("Exception in CertStreamClient! -> {}".format(exception)) certstream.listen_for_events(print_callback, on_open=on_open, on_error=on_error) ```
{ "source": "0x78f1935/flask-smorest", "score": 2 }
#### File: flask-smorest/tests/test_spec.py ```python from collections import OrderedDict import json import http import pytest from flask_smorest import Api, Blueprint from flask_smorest import etag as fs_etag from .conftest import AppConfig from .utils import get_responses, get_headers, get_parameters, build_ref class TestAPISpec: """Test APISpec class""" @pytest.mark.parametrize('openapi_version', ['2.0', '3.0.2']) def test_apispec_sets_produces_consumes(self, app, openapi_version): app.config['OPENAPI_VERSION'] = openapi_version api = Api(app) spec = api.spec.to_dict() if openapi_version == '2.0': assert spec['produces'] == ['application/json', ] assert spec['consumes'] == ['application/json', ] else: assert 'produces' not in spec assert 'consumes' not in spec @pytest.mark.parametrize('openapi_version', ['2.0', '3.0.2']) def test_api_lazy_registers_error_responses(self, app, openapi_version): """Test error responses are registered""" app.config['OPENAPI_VERSION'] = openapi_version api = Api(app) # Declare a dummy response to ensure get_response doesn't fail response_1 = {"description": "Reponse 1"} api.spec.components.response("Response_1", response_1) # No route registered -> default errors not registered responses = get_responses(api.spec) for status in http.HTTPStatus: assert status.name not in responses # Register routes with all error responses blp = Blueprint('test', 'test', url_prefix='/test') for status in http.HTTPStatus: @blp.route(f"/{status.name}") @blp.alt_response(400, status.name) def test(val): pass api.register_blueprint(blp) # Errors are now registered for status in http.HTTPStatus: if openapi_version == '2.0': assert responses[status.name] == { 'description': status.phrase, 'schema': build_ref(api.spec, 'schema', 'Error'), } else: assert responses[status.name] == { 'description': status.phrase, 'content': { 'application/json': { 'schema': build_ref(api.spec, 'schema', 'Error') } } } @pytest.mark.parametrize('openapi_version', ['2.0', '3.0.2']) def test_api_lazy_registers_etag_headers(self, app, openapi_version): """Test etag headers are registered""" app.config['OPENAPI_VERSION'] = openapi_version api = Api(app) # Declare dummy components to ensure get_* don't fail if openapi_version == "3.0.2": header_1 = {"description": "Header 1"} api.spec.components.header("Header_1", header_1) parameter_1 = {"description": "Parameter 1"} api.spec.components.parameter("Parameter_1", "header", parameter_1) # No route registered -> etag headers not registered if openapi_version == "3.0.2": headers = get_headers(api.spec) assert headers == {"Header_1": header_1} parameters = get_parameters(api.spec) assert parameters == { "Parameter_1": { **parameter_1, "in": "header", "name": "Parameter_1" } } # Register routes with etag blp = Blueprint('test', 'test', url_prefix='/test') @blp.route("/etag_get", methods=["GET"]) @blp.etag @blp.response(200) def test_get(val): pass @blp.route("/etag_pet", methods=["PUT"]) @blp.etag @blp.response(200) def test_put(val): pass api.register_blueprint(blp) if openapi_version == "3.0.2": headers = get_headers(api.spec) assert headers["ETAG"] == fs_etag.ETAG_HEADER parameters = get_parameters(api.spec) assert parameters["IF_NONE_MATCH"] == fs_etag.IF_NONE_MATCH_HEADER assert parameters["IF_MATCH"] == fs_etag.IF_MATCH_HEADER def test_api_lazy_registers_pagination_header(self, app): """Test pagination header is registered""" api = Api(app) # Declare dummy header to ensure get_headers doesn't fail header_1 = {"description": "Header 1"} api.spec.components.header("Header_1", header_1) # No route registered -> parameter header not registered headers = get_headers(api.spec) assert headers == {"Header_1": header_1} # Register routes with pagination blp = Blueprint('test', 'test', url_prefix='/test') @blp.route("/") @blp.response(200) @blp.paginate() def test_get(val): pass api.register_blueprint(blp) headers = get_headers(api.spec) assert headers["PAGINATION"] == { 'description': 'Pagination metadata', 'schema': {'$ref': '#/components/schemas/PaginationMetadata'}, } def test_apispec_print_openapi_doc(self, app): api = Api(app) result = app.test_cli_runner().invoke(args=('openapi', 'print')) assert result.exit_code == 0 assert json.loads(result.output) == api.spec.to_dict() def test_apispec_write_openapi_doc(self, app, tmp_path): output_file = tmp_path / 'openapi.json' api = Api(app) result = app.test_cli_runner().invoke( args=('openapi', 'write', str(output_file)) ) assert result.exit_code == 0 with open(output_file) as output: assert json.loads(output.read()) == api.spec.to_dict() class TestAPISpecServeDocs: """Test APISpec class doc-serving features""" @pytest.mark.parametrize( 'prefix', ( None, 'docs_url_prefix', '/docs_url_prefix', 'docs_url_prefix/', '/docs_url_prefix/' ) ) def test_apispec_serve_spec_prefix(self, app, prefix): """Test url prefix default value and leading/trailing slashes issues""" class NewAppConfig(AppConfig): if prefix is not None: OPENAPI_URL_PREFIX = prefix app.config.from_object(NewAppConfig) Api(app) client = app.test_client() resp_json_docs = client.get('/docs_url_prefix/openapi.json') if app.config.get('OPENAPI_URL_PREFIX') is None: assert resp_json_docs.status_code == 404 else: assert resp_json_docs.json['info'] == { 'version': '1', 'title': 'API Test'} @pytest.mark.parametrize('prefix', (None, 'docs_url_prefix')) @pytest.mark.parametrize('json_path', (None, 'spec.json')) def test_apispec_serve_spec_json_path(self, app, prefix, json_path): class NewAppConfig(AppConfig): if prefix is not None: OPENAPI_URL_PREFIX = prefix if json_path is not None: OPENAPI_JSON_PATH = json_path app.config.from_object(NewAppConfig) Api(app) client = app.test_client() resp_json_docs_default = client.get('/docs_url_prefix/openapi.json') resp_json_docs_custom = client.get('/docs_url_prefix/spec.json') if app.config.get('OPENAPI_URL_PREFIX') is None: assert resp_json_docs_default.status_code == 404 assert resp_json_docs_custom.status_code == 404 else: if json_path is None: assert resp_json_docs_default.json['info'] == ( {'version': '1', 'title': 'API Test'} ) assert resp_json_docs_custom.status_code == 404 else: assert resp_json_docs_custom.json['info'] == ( {'version': '1', 'title': 'API Test'} ) assert resp_json_docs_default.status_code == 404 @pytest.mark.parametrize('prefix', (None, 'docs_url_prefix')) @pytest.mark.parametrize('redoc_path', (None, 'redoc')) @pytest.mark.parametrize('redoc_url', (None, 'https://my-redoc/')) def test_apispec_serve_spec_redoc( self, app, prefix, redoc_path, redoc_url ): class NewAppConfig(AppConfig): if prefix is not None: OPENAPI_URL_PREFIX = prefix if redoc_path is not None: OPENAPI_REDOC_PATH = redoc_path if redoc_url is not None: OPENAPI_REDOC_URL = redoc_url title_tag = '<title>API Test</title>' app.config.from_object(NewAppConfig) Api(app) client = app.test_client() response_redoc = client.get('/docs_url_prefix/redoc') if app.config.get('OPENAPI_URL_PREFIX') is None: assert response_redoc.status_code == 404 else: if ( app.config.get('OPENAPI_REDOC_PATH') is None or app.config.get('OPENAPI_REDOC_URL') is None ): assert response_redoc.status_code == 404 else: assert response_redoc.status_code == 200 assert (response_redoc.headers['Content-Type'] == 'text/html; charset=utf-8') assert title_tag in response_redoc.get_data(True) @pytest.mark.parametrize('prefix', (None, 'docs_url_prefix')) @pytest.mark.parametrize('swagger_ui_path', (None, 'swagger-ui')) @pytest.mark.parametrize('swagger_ui_url', (None, 'https://my-swagger/')) def test_apispec_serve_spec_swagger_ui( self, app, prefix, swagger_ui_path, swagger_ui_url ): class NewAppConfig(AppConfig): if prefix is not None: OPENAPI_URL_PREFIX = prefix if swagger_ui_path is not None: OPENAPI_SWAGGER_UI_PATH = swagger_ui_path if swagger_ui_url is not None: OPENAPI_SWAGGER_UI_URL = swagger_ui_url title_tag = '<title>API Test</title>' app.config.from_object(NewAppConfig) Api(app) client = app.test_client() response_swagger_ui = client.get('/docs_url_prefix/swagger-ui') if app.config.get('OPENAPI_URL_PREFIX') is None: assert response_swagger_ui.status_code == 404 else: if ( app.config.get('OPENAPI_SWAGGER_UI_PATH') is None or app.config.get('OPENAPI_SWAGGER_UI_URL') is None ): assert response_swagger_ui.status_code == 404 else: assert response_swagger_ui.status_code == 200 assert (response_swagger_ui.headers['Content-Type'] == 'text/html; charset=utf-8') assert title_tag in response_swagger_ui.get_data(True) def test_apispec_serve_spec_swagger_ui_config(self, app): class NewAppConfig(AppConfig): OPENAPI_URL_PREFIX = "/" OPENAPI_SWAGGER_UI_PATH = "/" OPENAPI_SWAGGER_UI_URL = "https://domain.tld/swagger-ui" OPENAPI_SWAGGER_UI_CONFIG = { "supportedSubmitMethods": ["get", "put", "post", "delete"], } app.config.from_object(NewAppConfig) Api(app) client = app.test_client() response_swagger_ui = client.get("/") assert ( 'var override_config = {' '"supportedSubmitMethods": ["get", "put", "post", "delete"]' '};' ) in response_swagger_ui.get_data(True) @pytest.mark.parametrize('prefix', (None, 'docs_url_prefix')) @pytest.mark.parametrize('rapidoc_path', (None, 'rapidoc')) @pytest.mark.parametrize('rapidoc_url', (None, 'https://my-rapidoc/')) def test_apispec_serve_spec_rapidoc( self, app, prefix, rapidoc_path, rapidoc_url ): class NewAppConfig(AppConfig): if prefix is not None: OPENAPI_URL_PREFIX = prefix if rapidoc_path is not None: OPENAPI_RAPIDOC_PATH = rapidoc_path if rapidoc_url is not None: OPENAPI_RAPIDOC_URL = rapidoc_url title_tag = '<title>API Test</title>' app.config.from_object(NewAppConfig) Api(app) client = app.test_client() response_rapidoc = client.get('/docs_url_prefix/rapidoc') if app.config.get('OPENAPI_URL_PREFIX') is None: assert response_rapidoc.status_code == 404 else: if ( app.config.get('OPENAPI_RAPIDOC_PATH') is None or app.config.get('OPENAPI_RAPIDOC_URL') is None ): assert response_rapidoc.status_code == 404 else: assert response_rapidoc.status_code == 200 assert (response_rapidoc.headers['Content-Type'] == 'text/html; charset=utf-8') assert title_tag in response_rapidoc.get_data(True) def test_apispec_serve_spec_rapidoc_config(self, app): class NewAppConfig(AppConfig): OPENAPI_URL_PREFIX = "/" OPENAPI_RAPIDOC_PATH = "/" OPENAPI_RAPIDOC_URL = "https://domain.tld/rapidoc" OPENAPI_RAPIDOC_CONFIG = {"theme": "dark"} app.config.from_object(NewAppConfig) Api(app) client = app.test_client() response_rapidoc = client.get("/") assert 'theme = "dark"' in response_rapidoc.get_data(True) @pytest.mark.parametrize('prefix', ('', '/')) @pytest.mark.parametrize('path', ('', '/')) @pytest.mark.parametrize( 'tested', ('json', 'redoc', 'swagger-ui', 'rapidoc') ) def test_apispec_serve_spec_empty_path(self, app, prefix, path, tested): """Test empty string or (equivalently) single slash as paths Documentation can be served at root of application. """ class NewAppConfig(AppConfig): OPENAPI_URL_PREFIX = prefix OPENAPI_REDOC_URL = "https://domain.tld/redoc" OPENAPI_SWAGGER_UI_URL = "https://domain.tld/swagger-ui" OPENAPI_RAPIDOC_URL = "https://domain.tld/rapidoc" mapping = { 'json': 'OPENAPI_JSON_PATH', 'redoc': 'OPENAPI_REDOC_PATH', 'swagger-ui': 'OPENAPI_SWAGGER_UI_PATH', 'rapidoc': 'OPENAPI_RAPIDOC_PATH', } setattr(NewAppConfig, mapping[tested], path) app.config.from_object(NewAppConfig) Api(app) client = app.test_client() if tested == 'json': response_json_docs = client.get('/') else: response_json_docs = client.get('openapi.json') response_doc_page = client.get('/') assert response_doc_page.status_code == 200 assert (response_doc_page.headers['Content-Type'] == 'text/html; charset=utf-8') assert response_json_docs.json['info'] == { 'version': '1', 'title': 'API Test'} def test_apispec_serve_spec_preserve_order(self, app): app.config['OPENAPI_URL_PREFIX'] = '/api-docs' api = Api(app) client = app.test_client() # Add ordered stuff. This is invalid, but it will do for the test. paths = OrderedDict( [('/path_{}'.format(i), str(i)) for i in range(20)]) api.spec._paths = paths response_json_docs = client.get('/api-docs/openapi.json') assert response_json_docs.status_code == 200 assert response_json_docs.json['paths'] == paths ```
{ "source": "0x7c2/cpme2", "score": 2 }
#### File: 0x7c2/cpme2/cpme.py ```python import menu import content import sys import func # # set debug level, # 0 : disabled debug # [...] # 5 : max debug output # debug = 0 mycontent = content.content(debugLevel = debug) mymenu = menu.mymenu(mycontent, debugLevel = debug) def loopme(): while True: mymenu.show_menu() if len(sys.argv) < 2: func.info() loopme() else: if sys.argv[1] == "--version": func.info() func.info_version() elif sys.argv[1] == "--update": func.info() func.self_update() else: func.info() func.usage() ``` #### File: 0x7c2/cpme2/gaia.py ```python from templates import check import func class check_gaia_hwinfo(check): page = "GAiA.0verview" category = "Appliance" title = "" isFirewall = True isManagement = True minVersion = 8020 command = "cpstat -f hw_info os" isCommand = True def run_check(self): for line in self.commandOut: if ":" in line: data = line.split(':') a_field = data[0].strip() if len(data) > 1: a_val = data[1].strip() else: a_val = "" self.add_result(a_field, "INFO", a_val) class check_gaia_scheduled_backup(check): page = "GAiA.0verview" category = "GAiA Settings" title = "Scheduled Backup Config" isFirewall = True isManagement = True minVersion = 8020 command = "func.gaia_get_value('backup-scheduled')" isCommand = False def run_check(self): if self.commandOut: self.add_result(self.title, 'PASS', '') else: self.add_result(self.title, 'WARN', 'not configured') class check_gaia_check_snapshots(check): page = "GAiA.0verview" category = "Environment" title = "Existing GAiA Snapshots" isFirewall = True isManagement = True minVersion = 8020 command = "lvs | grep -v 'wi-ao' | tail -n +2" isCommand = True def run_check(self): found = False for o in self.commandOut: temp = ' '.join(o.split()) cols = temp.split(' ') if len(cols)>1: found = True name = cols[0].strip(' ').strip('\n') vg = cols[1].strip(' ').strip('\n') attr = cols[2].strip(' ').strip('\n') size = cols[3].strip(' ').strip('\n') detail = vg + " / " + name + " (" + size + ")" if "hwdiag" in name or "fcd_GAIA" in name: self.add_result(self.title, 'INFO', detail) else: self.add_result(self.title, 'WARN', detail) if not found: self.add_result(self.title, 'INFO', '') class check_gaia_check_cpuse_agent_version(check): page = "GAiA.CPUSE" category = "Agent" title = "Deployment Agent Version" isFirewall = True isManagement = True minVersion = 8020 command = "$DADIR/bin/da_cli da_status" isCommand = True def run_check(self): found = False for o in self.commandOut: if 'up to date' in o: found = True self.add_result(self.title, 'PASS', '') if not found: self.add_result(self.title, 'WARN', 'new version available') class check_gaia_check_cpuse_agent_pending_reboot(check): page = "GAiA.CPUSE" category = "Agent" title = "Deployment Agent Pending Reboot" isFirewall = True isManagement = True minVersion = 8020 command = "$DADIR/bin/da_cli is_pending_reboot" isCommand = True def run_check(self): found = False for o in self.commandOut: if 'no reboot' in o: found = True self.add_result(self.title, 'PASS', '') if not found: self.add_result(self.title, 'WARN', 'Reboot pending!') class check_gaia_check_cpuse_agent_packages(check): page = "GAiA.CPUSE" category = "Packages" title = "Packages available for install" isFirewall = True isManagement = True minVersion = 8020 command = "$DADIR/bin/da_cli packages_info status=available" isCommand = True def run_check(self): found = False for o in self.commandOut: if 'filename' in o: tmp = o.split(':')[1].replace('"','').replace(',','') self.add_result(self.title, 'WARN', tmp) found = True if not found: self.add_result(self.title, 'PASS', '') class check_gaia_check_proxy_settings(check): page = "GAiA.0verview" category = "GAiA Settings" title = "Proxy Configuration" isFirewall = True isManagement = True minVersion = 8020 command = "func.gaia_get_value('proxy:ip-address')" isCommand = False def run_check(self): if self.commandOut: proxy_port = func.gaia_get_value('proxy:port') self.add_result(self.title, 'INFO', self.commandOut + ':' + proxy_port) else: self.add_result(self.title, 'INFO', 'direct') class check_gaia_ntp(check): page = "GAiA.0verview" category = "GAiA Settings" title = "NTP - Time and Date" isFirewall = True isManagement = True minVersion = 8020 command = "ntpstat" isCommand = True def run_check(self): found = False for o in self.commandOut: if 'synchronised to' in o: self.add_result(self.title, "PASS", "") found = True if not found: self.add_result(self.title, "FAIL", "") class check_gaia_dns_external_checkpoint(check): page = "GAiA.Connectivity" category = "DNS Resolver" title = "DNS Lookup [checkpoint.com]" isFirewall = True isManagement = True minVersion = 8020 command = "nslookup checkpoint.com | awk 'NR>3 { print $0 }'" isCommand = True def run_check(self): passme = False detail = "" for line in self.commandOut: if 'Address:' in line: if '209' in line: passme = True detail = line.strip() if passme: self.add_result(self.title, 'PASS', detail) else: self.add_result(self.title, 'FAIL', detail) class check_gaia_dns_external_heise(check): page = "GAiA.Connectivity" category = "DNS Resolver" title = "DNS Lookup [heise.de]" isFirewall = True isManagement = True minVersion = 8020 command = "nslookup heise.de | awk 'NR>3 { print $0 }'" isCommand = True def run_check(self): passme = False detail = "" for line in self.commandOut: if 'Address:' in line: if '193' in line: passme = True detail = line.strip() if passme: self.add_result(self.title, 'PASS', detail) else: self.add_result(self.title, 'FAIL', detail) class check_gaia_z_check_connectivity(check): page = "GAiA.Connectivity" category = "Check Point Services" title = "Connection" isFirewall = True isManagement = True minVersion = 8020 command = "ls" isCommand = True runOnStartup = False def run_check(self): proxy = "" urls = [] urls.append(['http://cws.checkpoint.com/APPI/SystemStatus/type/short','Social Media Widget Detection']) urls.append(['http://cws.checkpoint.com/URLF/SystemStatus/type/short','URL Filtering Cloud Categorization']) urls.append(['http://cws.checkpoint.com/AntiVirus/SystemStatus/type/short','Virus Detection']) urls.append(['http://cws.checkpoint.com/Malware/SystemStatus/type/short','Bot Detection']) urls.append(['https://updates.checkpoint.com/','IPS Updates']) urls.append(['http://dl3.checkpoint.com','Download Service Updates ']) urls.append(['https://usercenter.checkpoint.com/usercenter/services/ProductCoverageService','Contract Entitlement ']) urls.append(['https://usercenter.checkpoint.com/usercenter/services/BladesManagerService','Software Blades Manager Service']) urls.append(['http://resolver1.chkp.ctmail.com','Suspicious Mail Outbreaks']) urls.append(['http://download.ctmail.com','Anti-Spam']) urls.append(['http://te.checkpoint.com','Threat Emulatin']) urls.append(['http://teadv.checkpoint.com','Threat Emulation Advanced']) urls.append(['http://kav8.zonealarm.com/version.txt','Deep inspection']) urls.append(['http://kav8.checkpoint.com','Traditional Anti-Virus']) urls.append(['http://avupdates.checkpoint.com/UrlList.txt','Traditional Anti-Virus, Legacy URL Filtering']) urls.append(['http://sigcheck.checkpoint.com/Siglist2.txt','Download of signature updates']) urls.append(['http://secureupdates.checkpoint.com','Manage Security Gateways']) urls.append(['https://productcoverage.checkpoint.com/ProductCoverageService','Makes sure the machines contracts are up-to-date']) urls.append(['https://sc1.checkpoint.com/sc/images/checkmark.gif','Download of icons and screenshots from Check Point media storage servers']) urls.append(['https://sc1.checkpoint.com/za/images/facetime/large_png/60342479_lrg.png','Download of icons and screenshots from Check Point media storage servers']) urls.append(['https://sc1.checkpoint.com/za/images/facetime/large_png/60096017_lrg.png','Download of icons and screenshots from Check Point media storage servers']) urls.append(['https://push.checkpoint.com','Push Notifications ']) urls.append(['http://downloads.checkpoint.com','Download of Endpoint Compliance Updates']) for url in urls: if self.runOnStartup: out, err = func.execute_command('curl_cli -Lisk ' + proxy + url[0] + ' | head -n1') data = out.read().strip('\n').strip(' ') if "OK" in data or "Found" in data or "Moved" in data or "Connection established" in data: state = "PASS" detail = "" else: state = "FAIL" detail = data self.add_result(self.title + " [" + url[1] + "]", state, detail) else: self.add_result(self.title + " [" + url[1] + "]", 'WAIT', '') def set_command(self): self.runOnStartup = True class check_gaia_interface_bonds(check): page = "GAiA.Networking" category = "Bonding" title = "Bond" isFirewall = True isManagement = True minVersion = 8020 command = "ifconfig | grep -c bond" isCommand = True def run_check(self): if int(self.commandOut[0]) > 0: cmd = "cphaprob show_bond" b_out, b_err = func.execute_command(cmd) for data in b_out: if "|" in data and "bond" in data: cols = data.split("|") b_name = cols[0].strip() b_mode = cols[1].strip() b_stat = cols[2].strip() b_cfg = cols[3].strip() b_up = cols[4].strip() b_req = cols[5].strip() state = "PASS" if b_stat != "UP": state = "WARN" self.add_result(self.title + " [" + b_name + ", " + b_mode + "]", state, b_up + "/" + b_cfg + " , Required: " + b_req) else: self.add_result("No bonding found", "PASS", "") class check_gaia_interface_buffers(check): page = "GAiA.Networking" category = "Ring Buffer" title = "Buffer Size" isFirewall = True isManagement = True isClusterXL = False minVersion = 8020 command = "ifconfig | grep HWaddr" isCommand = True def run_check(self): for line in self.commandOut: b_rx = "" b_tx = "" state = "PASS" nic = line.split()[0].strip() b_out, b_err = func.execute_command('ethtool -g ' + nic) for data in b_out: if "RX:" in data: b_rx = data.split()[1].strip() if "TX:" in data: b_tx = data.split()[1].strip() if b_rx != "256": state = "WARN" if b_tx != "1024": state = "WARN" detail = "RX: " + b_rx + ", TX: " + b_tx if not "." in nic: self.add_result(self.title + " [" + nic + "]", state, detail) class check_gaia_interface_stats(check): page = "GAiA.Networking" category = "Statistics" title = "Interface statistics" isFirewall = True isManagement = True isClusterXL = False minVersion = 8020 command = "ls" isCommand = True def run_check(self): values_rx = ["rx_dropped", "rx_crc_errors", "rx_errors", "rx_fifo_errors", "rx_frame_errors", "rx_length_errors", "rx_missed_errors", "rx_over_errors"] values_tx = ["tx_aborted_errors", "tx_carrier_errors", "tx_dropped", "tx_errors", "tx_fifo_errors", "tx_heartbeat_errors", "tx_window_errors"] out, err = func.execute_command('ls -1 /sys/class/net | grep -vE "(lo|bond|vpn|sit|\.)"') for line in out: interface = line.strip('\n') i = 0 error = False while i<len(values_rx): read, err = func.execute_command('cat /sys/class/net/'+interface+'/statistics/'+values_rx[i]) val = read.read().strip('\n') state = "PASS" detail = "" if val != "0": state = "FAIL" detail = val error = True self.add_result(self.title + " (" + interface + " - " + values_rx[i] + ")", state, detail) i = i + 1 if not error: for t in values_rx: self.results.pop() self.add_result(self.title + " (" + interface + " - rx/all" + ")", "PASS", "") i = 0 error = False while i<len(values_tx): read, err = func.execute_command('cat /sys/class/net/'+interface+'/statistics/'+values_tx[i]) val = read.read().strip('\n') state = "PASS" detail = "" if val != "0": state = "FAIL" detail = val error = True self.add_result(self.title + " (" + interface + " - " + values_rx[i] + ")", state, detail) i = i + 1 if not error: for t in values_tx: self.results.pop() self.add_result(self.title + " (" + interface + " - tx/all" + ")", "PASS", "") class check_gaia_disk_space(check): page = "GAiA.0verview" category = "Harddisk" title = "Disk Space" isFirewall = True isManagement = True isClusterXL = False minVersion = 8020 command = "df -h | sed s/\ \ */\;/g | cut -d ';' -f 6,4 | awk 'NR>1 {print $1}'" isCommand = True def run_check(self): for line in self.commandOut: state = "FAIL" data = str(line).strip('\n').split(";") if len(data) < 2: continue if "M" in data[0]: state = "WARN" if "G" in data[0]: state = "PASS" if data[1] == "/boot" or data[1] == "/dev/shm": state = "PASS" self.add_result(self.title + " (" + data[1] + ")", state, data[0]) class check_gaia_cpu_smt(check): page = "GAiA.0verview" category = "CPU" title = "Hyperthreading/SMT" isFirewall = True isManagement = False minVersion = 8020 command = 'if [ ! -f "/proc/smt_status" ] ; then echo "Not available" ; else cat /proc/smt_status ; fi' isCommand = True def run_check(self): data = self.commandOut[0].strip() if "Unsupported" in data: self.add_result(self.title, "INFO", "Disabled") else: self.add_result(self.title, "INFO", data) class check_gaia_cpu_usage(check): page = "GAiA.0verview" category = "CPU" title = "CPU Usage" isFirewall = True isManagement = True isClusterXL = False minVersion = 8020 command = "ls" isCommand = True def run_check(self): if func.isFirewall(): out, err = func.execute_command("fw ctl affinity -l") affinity = out.read() else: affinity = "" dbcur = func.execute_sqlite_query("select name_of_cpu,max(cpu_usage) from UM_STAT_UM_CPU_UM_CPU_ORDERED_TABLE group by name_of_cpu;") for row in dbcur: worker = "" nic = "" daemon = "" cpu = row[0] for line in affinity.split('\n'): if "CPU "+str(cpu)+'#' in line +'#': if "Kernel" in line: if worker != "": worker = worker + ", " worker = worker + line.split(":")[0].replace("Kernel ", "") elif "Daemon" in line: daemon = "Daemon(s), " else: if nic != "": nic = nic + ", " nic = nic + line.split(":")[0] load = str(row[1]).split(".")[0] state = "PASS" if int(load) > 85 and nic != "": state = "FAIL" elif int(load) > 85 and nic == "": state = "WARN" if nic != "": nic = nic + ", " self.add_result(self.title + " (peak - CPU " + str(cpu) + "): " + daemon + nic + worker, state, load + "%") dbcur = func.execute_sqlite_query("select name_of_cpu,avg(cpu_usage) from UM_STAT_UM_CPU_UM_CPU_ORDERED_TABLE group by name_of_cpu;") for row in dbcur: worker = "" nic = "" daemon = "" cpu = row[0] for line in affinity.split('\n'): if "CPU "+str(cpu)+'#' in line+'#': if "Kernel" in line: if worker != "": worker = worker + ", " worker = worker + line.split(":")[0].replace("Kernel ", "") elif "Daemon" in line: daemon = "Daemon(s), " else: if nic != "": nic = nic + ", " nic = nic + line.split(":")[0] load = str(row[1]).split(".")[0] state = "PASS" if int(load) > 50: state = "WARN" if int(load) > 50 and nic != "": state = "FAIL" if int(load) > 85 and worker != "": state = "FAIL" if nic != "": nic = nic + ", " self.add_result(self.title + " (avg - CPU " + str(cpu) + "): " + daemon + nic + worker, state, load + "%") dbcur.close() class check_gaia_memory_usage(check): page = "GAiA.0verview" category = "Memory" title = "Memory Usage" isFirewall = True isManagement = True isClusterXL = False minVersion = 8020 command = "ls" isCommand = True def run_check(self): mem_total = 0 mem_avg = 0 mem_peak = 0 dbcur = func.execute_sqlite_query("select max(real_total) from UM_STAT_UM_MEMORY;") for row in dbcur: mem_total = row[0] dbcur = func.execute_sqlite_query("select avg(real_used) from UM_STAT_UM_MEMORY;") for row in dbcur: mem_avg = row[0] dbcur = func.execute_sqlite_query("select max(real_used) from UM_STAT_UM_MEMORY;") for row in dbcur: mem_peak = row[0] dbcur.close() mem_avg_used = int(str(mem_avg/mem_total*100).split(".")[0]) mem_peak_used = int(str(mem_peak/mem_total*100).split(".")[0]) state = "PASS" if mem_avg_used > 70: state = "WARN" if mem_avg_used > 90: state = "FAIL" self.add_result(self.title + " (average)", state, str(mem_avg_used)+"%") state = "PASS" if mem_peak_used > 80: state = "WARN" self.add_result(self.title + " (peak)", state, str(mem_peak_used)+"%") out, err = func.execute_command("free -g | grep -i swap | awk '{print $3,$4}'") data = out.read().strip('\n').split(" ") used = data[0] avail = data[1] percent = str(int(used) / int(avail) * 100).split(".")[0] state = "WARN" if percent == "0": state = "PASS" self.add_result(self.title + " (swap)", state, percent + "%") ``` #### File: 0x7c2/cpme2/management.py ```python from templates import check import func import datetime import json def mgmt_fetch_uid_firewall_properties(): if func.isManagement(): out, err = func.execute_command('mgmt_cli show-generic-objects name "firewall_properties" -r true -f json --unsafe true') data = json.load(out) return data['objects'][0]['uid'] else: return "-1" class check_mgmt_global_properties_general(check): page = "Management.Global Properties" category = "General" title = "Global/General" isFirewall = False isManagement = True minVersion = 8020 command = "mgmt_cli show generic-object uid '" + mgmt_fetch_uid_firewall_properties() + "' -r true -f json --unsafe true" isCommand = True check = [] check.append([ "fwDropOutOfStateIcmp", True, "WARN", "Drop Out of State: ICMP" ]) check.append([ "fwDropOutOfStateUdp", True, "WARN", "Drop Out of State: UDP" ]) check.append([ "fwAllowOutOfStateTcp", 0, "WARN", "Drop Out of State: TCP" ]) check.append([ "fwDropOutOfStateSctp", True, "WARN", "Drop Out of State: SCTP" ]) check.append([ "logImpliedRules", True, "INFO", "Log implied Rules" ]) check.append([ "natAutomaticArp", True, "INFO", "Automatic NAT - ARP" ]) check.append([ "natAutomaticRulesMerge", True, "INFO", "Merge manual NAT Rules" ]) check.append([ "natDstClientSide", True, "FAIL", "Translate DST on Client Side" ]) check.append([ "udpreply", True, "WARN", "Accept statful UDP replies" ]) check.append([ "allowDownloadContent", True, "INFO", "Allow Download Content" ]) check.append([ "allowUploadContent", False, "INFO", "Improve product experience.." ]) check.append([ "fw1enable", True, "WARN", "Imp.Rules: Control Connections"]) check.append([ "raccessenable", True, "WARN", "Imp.Rules: RAS Connections" ]) check.append([ "outgoing", True, "WARN", "Imp.Rules: Outgoing from GW" ]) check.append([ "acceptOutgoingToCpServices", True, "WARN", "Imp.Rules: Gateway to CP" ]) def run_check(self): config = json.loads('\n'.join(self.commandOut)) for c in self.check: if config[c[0]] == c[1]: state = "PASS" detail = str(c[1]) else: state = c[2] detail = str(config[c[0]]) self.add_result(self.title + " [" + c[3] + "]", state, detail) class check_mgmt_global_properties_si(check_mgmt_global_properties_general): category = "Stateful Inspection" title = "Global/Stateful" check = [] check.append([ "tcpstarttimeout", 25, "WARN", "TCP start timeout" ]) check.append([ "tcptimeout", 3600, "WARN", "TCP session timeout" ]) check.append([ "tcpendtimeout", 20, "WARN", "TCP end timeout" ]) check.append([ "tcpendtimeoutCern", 5, "WARN", "TCP end timeout, R80.20 and higher" ]) check.append([ "udptimeout", 40, "WARN", "UDP virtual session timeout" ]) check.append([ "icmptimeout", 30, "WARN", "ICMP virtual session timeout" ]) check.append([ "othertimeout", 60, "WARN", "Other Protocol session timeout" ]) check.append([ "sctpstarttimeout", 30, "WARN", "SCTP start timeout" ]) check.append([ "sctptimeout", 3600, "WARN", "SCTP session timeout" ]) check.append([ "sctpendtimeout", 20, "WARN", "SCTP end timeout" ]) ``` #### File: 0x7c2/cpme2/troubleshooting.py ```python from templates import diag import os, time import func class diag_troubleshooting_f2f_worker(diag): page = "Troubleshooting.F2F Worker" title = "Debug F2F Worker Connections" isFirewall = True isManagement = False isClusterXL = False minVersion = 8020 content = ['This command enables:', 'echo 1 > /proc/cpkstats/fw_worker_[workerid]_stats', '', 'and prints the results:', 'cat /proc/cpkstats/fw_worker_[workerid]_stats', '', 'last but no least, it will disable debug:', 'echo 0 > /proc/cpkstats/fw_worker_[workerid]_stats'] isTable = False workers = [] def change_f2f_stats(self, worker_id, val): self.debug(3, "echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats") os.system("echo " + str(val) + " > /proc/cpkstats/fw_worker_" + str(worker_id) + "_stats") def getall_f2f_worker(self): workers = [] for filename in os.listdir("/proc/cpkstats/"): if "fw_worker_" in filename and "_stats" in filename and not "raw" in filename: workers.append(int(filename.replace("fw_worker_","").replace("_stats",""))) return workers def enable_disable(self, action = 0): self.workers = self.getall_f2f_worker() for worker in self.workers: self.change_f2f_stats(worker, action) def set_enable(self): self.isTable = True self.enable_disable(1) def set_disable(self): self.enable_disable(0) def run_loop(self): self.content = [] stats = [] stats_sort = [] self.content.append([ "Worker", "Type", "Cycles", "Time ago", "Proto", "Source", "SPORT", "Destination", "DPORT" ]) for worker in self.workers: for line in func.tail_and_head('/proc/cpkstats/fw_worker_' + str(worker) + '_stats', 18, 16): raw = str(line).replace('\t','').replace('\n','') raw = raw.split() s_worker = worker s_type = raw[0].replace(':','') s_cycles = int(raw[1]) s_timeago = int(raw[2]) raw = raw[3:] s_data = ' '.join(raw) new = { 'worker': s_worker, 'type': s_type, 'cycles': s_cycles, 'timeago': s_timeago, 'data': s_data } stats.append(new) stats_sort = sorted(stats, key=lambda k: k['cycles'], reverse=True) for s in stats_sort: if "," in s["data"]: data = s["data"].replace("<","").replace(">","").split(",") if len(data) > 4: proto = str(data[5]).strip() if proto == "1": proto = "ICMP" if proto == "6": proto = "TCP" if proto == "17": proto = "UDP" src = data[1].strip() src_p = data[2].strip() dst = data[3].strip() dst_p = data[4].strip() self.content.append([ str(s["worker"]), str(s["type"]), str(s["cycles"]), str(s["timeago"]), proto, src, src_p, dst, dst_p ]) class diag_troubleshooting_clusterxl_state(diag): page = "Troubleshooting.ClusterXL State" title = "Show ClusterXL State" isFirewall = True isManagement = False isClusterXL = True minVersion = 8020 content = [ 'Starting Output...' ] isDebugCommand = False isTable = False def run_loop(self): out, err = func.execute_command('cphaprob state ; echo ; cphaprob -a if') self.content = out.read().split('\n') class diag_troubleshooting_throughput(diag): page = "Troubleshooting.Throughput" title = "Show troughput" isFirewall = True isManagement = True minVersion = 8020 content = ["Please wait, while starting Output..."] isDebugCommand = False isTable = False last_rx_bytes = {} last_tx_bytes = {} rx_bytes = {} tx_bytes = {} rx_sum = {} tx_sum = {} ipaddr = {} nics = [] def run_loop(self): showme = True # grab all active nics if len(self.nics) < 1: out, err = func.execute_command('ifconfig | grep HWaddr') for data in out.read().split('\n'): if "Ethernet" in data: raw = data.split() nic = raw[0].strip() self.nics.append(nic) # grab ip address from interface if len(self.ipaddr) < 1: for nic in self.nics: if nic not in self.ipaddr: ipa = "0.0.0.0" out, err = func.execute_command('ifconfig ' + nic + ' | grep "inet addr"') data = out.read() if data != "": data = data.split(':')[1] ipa = data.split(' ')[0] self.ipaddr[nic] = ipa # grab rx and tx bytes for nic in self.nics: out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/rx_bytes') data = out.read() if nic not in self.last_rx_bytes: showme = False else: self.rx_bytes[nic] = int(data.strip()) - int(self.last_rx_bytes[nic]) self.last_rx_bytes[nic] = int(data.strip()) out, err = func.execute_command('cat /sys/class/net/' + nic + '/statistics/tx_bytes') data = out.read() if nic not in self.last_tx_bytes: showme = False else: self.tx_bytes[nic] = int(data.strip()) - int(self.last_tx_bytes[nic]) self.last_tx_bytes[nic] = int(data.strip()) # grab rx and tx sum bytes for nic in self.nics: out, err = func.execute_command('ifconfig ' + nic + ' | grep byte') data = out.read() data = data.split(':') self.rx_sum[nic] = data[1].split()[1][1:] + " " + data[1].split()[2][:-1] self.tx_sum[nic] = data[2].split()[1][1:] + " " + data[2].split()[2][:-1] if showme: self.isTable = True self.content = [] self.content.append([ "Interface" , "IP-Address" , "RX Rate", "TX Rate", "RX Sum", "TX Sum" ]) for nic in self.nics: nic_rx_r_txt = "" nic_tx_r_txt = "" nic_ip = self.ipaddr[nic] nic_rx_r = self.rx_bytes[nic] * 8 if nic_rx_r > (1024*1024) and nic_rx_r_txt == "": nic_rx_r_txt = str(round(nic_rx_r/(1024*1024))) + " MBit" if nic_rx_r > (1024) and nic_rx_r_txt == "": nic_rx_r_txt = str(round(nic_rx_r/(1024))) + " KBit" if nic_rx_r <= (1024) and nic_rx_r_txt == "": nic_rx_r_txt = str(round(nic_rx_r)) + " Bit" nic_tx_r = self.tx_bytes[nic] * 8 if nic_tx_r > (1024*1024) and nic_tx_r_txt == "": nic_tx_r_txt = str(round(nic_tx_r/(1024*1024))) + " MBit" if nic_tx_r > (1024) and nic_tx_r_txt == "": nic_tx_r_txt = str(round(nic_tx_r/(1024))) + " KBit" if nic_tx_r <= (1024) and nic_tx_r_txt == "": nic_tx_r_txt = str(round(nic_tx_r)) + " Bit" nic_rx_s = str(self.rx_sum[nic]) nic_tx_s = str(self.tx_sum[nic]) self.content.append([ nic , nic_ip, nic_rx_r_txt, nic_tx_r_txt, nic_rx_s, nic_tx_s ]) ```
{ "source": "0x7c48/mitmproxy", "score": 2 }
#### File: examples/addons/addheader.py ```python class AddHeader: def __init__(self): self.num = 0 def response(self, flow): self.num = self.num + 1 flow.response.headers["count"] = str(self.num) addons = [ AddHeader() ] ``` #### File: examples/complex/tcp_message.py ```python from mitmproxy.utils import strutils from mitmproxy import ctx from mitmproxy import tcp def tcp_message(flow: tcp.TCPFlow): message = flow.messages[-1] old_content = message.content message.content = old_content.replace(b"foo", b"bar") ctx.log.info( "[tcp_message{}] from {} to {}:\n{}".format( " (modified)" if message.content != old_content else "", "client" if message.from_client else "server", "server" if message.from_client else "client", strutils.bytes_to_escaped_str(message.content)) ) ``` #### File: examples/simple/internet_in_mirror.py ```python from mitmproxy import http def response(flow: http.HTTPFlow) -> None: reflector = b"<style>body {transform: scaleX(-1);}</style></head>" flow.response.content = flow.response.content.replace(b"</head>", reflector) ``` #### File: mitmproxy/addons/replace.py ```python import os import re import typing from mitmproxy import exceptions from mitmproxy import flowfilter from mitmproxy import ctx def parse_hook(s): """ Returns a (pattern, regex, replacement) tuple. The general form for a replacement hook is as follows: /patt/regex/replacement The first character specifies the separator. Example: :~q:foo:bar If only two clauses are specified, the pattern is set to match universally (i.e. ".*"). Example: /foo/bar/ Clauses are parsed from left to right. Extra separators are taken to be part of the final clause. For instance, the replacement clause below is "foo/bar/": /one/two/foo/bar/ """ sep, rem = s[0], s[1:] parts = rem.split(sep, 2) if len(parts) == 2: patt = ".*" a, b = parts elif len(parts) == 3: patt, a, b = parts else: raise exceptions.OptionsError( "Invalid replacement specifier: %s" % s ) return patt, a, b class Replace: def __init__(self): self.lst = [] def load(self, loader): loader.add_option( "replacements", typing.Sequence[str], [], """ Replacement patterns of the form "/pattern/regex/replacement", where the separator can be any character. """ ) def configure(self, updated): """ .replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as string. s: the replacement string """ if "replacements" in updated: lst = [] for rep in ctx.options.replacements: fpatt, rex, s = parse_hook(rep) flt = flowfilter.parse(fpatt) if not flt: raise exceptions.OptionsError( "Invalid filter pattern: %s" % fpatt ) try: # We should ideally escape here before trying to compile re.compile(rex) except re.error as e: raise exceptions.OptionsError( "Invalid regular expression: %s - %s" % (rex, str(e)) ) if s.startswith("@") and not os.path.isfile(s[1:]): raise exceptions.OptionsError( "Invalid file path: {}".format(s[1:]) ) lst.append((rex, s, flt)) self.lst = lst def execute(self, f): for rex, s, flt in self.lst: if flt(f): if f.response: self.replace(f.response, rex, s) else: self.replace(f.request, rex, s) def request(self, flow): if not flow.reply.has_message: self.execute(flow) def response(self, flow): if not flow.reply.has_message: self.execute(flow) def replace(self, obj, rex, s): if s.startswith("@"): s = os.path.expanduser(s[1:]) try: with open(s, "rb") as f: s = f.read() except IOError: ctx.log.warn("Could not read replacement file: %s" % s) return obj.replace(rex, s, flags=re.DOTALL) ``` #### File: mitmproxy/addons/save.py ```python import os.path import typing from mitmproxy import command from mitmproxy import exceptions from mitmproxy import flowfilter from mitmproxy import io from mitmproxy import ctx from mitmproxy import flow import mitmproxy.types class Save: def __init__(self): self.stream = None self.filt = None self.active_flows: typing.Set[flow.Flow] = set() def load(self, loader): loader.add_option( "save_stream_file", typing.Optional[str], None, "Stream flows to file as they arrive. Prefix path with + to append." ) loader.add_option( "save_stream_filter", typing.Optional[str], None, "Filter which flows are written to file." ) def open_file(self, path): if path.startswith("+"): path = path[1:] mode = "ab" else: mode = "wb" path = os.path.expanduser(path) return open(path, mode) def start_stream_to_path(self, path, flt): try: f = self.open_file(path) except IOError as v: raise exceptions.OptionsError(str(v)) self.stream = io.FilteredFlowWriter(f, flt) self.active_flows = set() def configure(self, updated): # We're already streaming - stop the previous stream and restart if "save_stream_filter" in updated: if ctx.options.save_stream_filter: self.filt = flowfilter.parse(ctx.options.save_stream_filter) if not self.filt: raise exceptions.OptionsError( "Invalid filter specification: %s" % ctx.options.save_stream_filter ) else: self.filt = None if "save_stream_file" in updated or "save_stream_filter" in updated: if self.stream: self.done() if ctx.options.save_stream_file: self.start_stream_to_path(ctx.options.save_stream_file, self.filt) @command.command("save.file") def save(self, flows: typing.Sequence[flow.Flow], path: mitmproxy.types.Path) -> None: """ Save flows to a file. If the path starts with a +, flows are appended to the file, otherwise it is over-written. """ try: f = self.open_file(path) except IOError as v: raise exceptions.CommandError(v) from v stream = io.FlowWriter(f) for i in flows: stream.add(i) f.close() ctx.log.alert("Saved %s flows." % len(flows)) def tcp_start(self, flow): if self.stream: self.active_flows.add(flow) def tcp_end(self, flow): if self.stream: self.stream.add(flow) self.active_flows.discard(flow) def websocket_start(self, flow): if self.stream: self.active_flows.add(flow) def websocket_end(self, flow): if self.stream: self.stream.add(flow) self.active_flows.discard(flow) def response(self, flow): if self.stream: self.stream.add(flow) self.active_flows.discard(flow) def request(self, flow): if self.stream: self.active_flows.add(flow) def done(self): if self.stream: for f in self.active_flows: self.stream.add(f) self.active_flows = set([]) self.stream.fo.close() self.stream = None ``` #### File: contrib/kaitaistruct/gif.py ```python import array import struct import zlib from enum import Enum from pkg_resources import parse_version from kaitaistruct import __version__ as ks_version, KaitaiStruct, KaitaiStream, BytesIO if parse_version(ks_version) < parse_version('0.7'): raise Exception("Incompatible Kaitai Struct Python API: 0.7 or later is required, but you have %s" % (ks_version)) class Gif(KaitaiStruct): class BlockType(Enum): extension = 33 local_image_descriptor = 44 end_of_file = 59 class ExtensionLabel(Enum): graphic_control = 249 comment = 254 application = 255 def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.hdr = self._root.Header(self._io, self, self._root) self.logical_screen_descriptor = self._root.LogicalScreenDescriptorStruct(self._io, self, self._root) if self.logical_screen_descriptor.has_color_table: self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3)) io = KaitaiStream(BytesIO(self._raw_global_color_table)) self.global_color_table = self._root.ColorTable(io, self, self._root) self.blocks = [] while True: _ = self._root.Block(self._io, self, self._root) self.blocks.append(_) if ((self._io.is_eof()) or (_.block_type == self._root.BlockType.end_of_file)) : break class ImageData(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.lzw_min_code_size = self._io.read_u1() self.subblocks = self._root.Subblocks(self._io, self, self._root) class ColorTableEntry(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.red = self._io.read_u1() self.green = self._io.read_u1() self.blue = self._io.read_u1() class LogicalScreenDescriptorStruct(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.screen_width = self._io.read_u2le() self.screen_height = self._io.read_u2le() self.flags = self._io.read_u1() self.bg_color_index = self._io.read_u1() self.pixel_aspect_ratio = self._io.read_u1() @property def has_color_table(self): if hasattr(self, '_m_has_color_table'): return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None self._m_has_color_table = (self.flags & 128) != 0 return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None @property def color_table_size(self): if hasattr(self, '_m_color_table_size'): return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None self._m_color_table_size = (2 << (self.flags & 7)) return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None class LocalImageDescriptor(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.left = self._io.read_u2le() self.top = self._io.read_u2le() self.width = self._io.read_u2le() self.height = self._io.read_u2le() self.flags = self._io.read_u1() if self.has_color_table: self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3)) io = KaitaiStream(BytesIO(self._raw_local_color_table)) self.local_color_table = self._root.ColorTable(io, self, self._root) self.image_data = self._root.ImageData(self._io, self, self._root) @property def has_color_table(self): if hasattr(self, '_m_has_color_table'): return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None self._m_has_color_table = (self.flags & 128) != 0 return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None @property def has_interlace(self): if hasattr(self, '_m_has_interlace'): return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None self._m_has_interlace = (self.flags & 64) != 0 return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None @property def has_sorted_color_table(self): if hasattr(self, '_m_has_sorted_color_table'): return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None self._m_has_sorted_color_table = (self.flags & 32) != 0 return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None @property def color_table_size(self): if hasattr(self, '_m_color_table_size'): return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None self._m_color_table_size = (2 << (self.flags & 7)) return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None class Block(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.block_type = self._root.BlockType(self._io.read_u1()) _on = self.block_type if _on == self._root.BlockType.extension: self.body = self._root.Extension(self._io, self, self._root) elif _on == self._root.BlockType.local_image_descriptor: self.body = self._root.LocalImageDescriptor(self._io, self, self._root) class ColorTable(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.entries = [] while not self._io.is_eof(): self.entries.append(self._root.ColorTableEntry(self._io, self, self._root)) class Header(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70)) self.version = (self._io.read_bytes(3)).decode(u"ASCII") class ExtGraphicControl(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.block_size = self._io.ensure_fixed_contents(struct.pack('1b', 4)) self.flags = self._io.read_u1() self.delay_time = self._io.read_u2le() self.transparent_idx = self._io.read_u1() self.terminator = self._io.ensure_fixed_contents(struct.pack('1b', 0)) @property def transparent_color_flag(self): if hasattr(self, '_m_transparent_color_flag'): return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None self._m_transparent_color_flag = (self.flags & 1) != 0 return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None @property def user_input_flag(self): if hasattr(self, '_m_user_input_flag'): return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None self._m_user_input_flag = (self.flags & 2) != 0 return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None class Subblock(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.num_bytes = self._io.read_u1() self.bytes = self._io.read_bytes(self.num_bytes) class ExtApplication(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.application_id = self._root.Subblock(self._io, self, self._root) self.subblocks = [] while True: _ = self._root.Subblock(self._io, self, self._root) self.subblocks.append(_) if _.num_bytes == 0: break class Subblocks(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.entries = [] while True: _ = self._root.Subblock(self._io, self, self._root) self.entries.append(_) if _.num_bytes == 0: break class Extension(KaitaiStruct): def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self.label = self._root.ExtensionLabel(self._io.read_u1()) _on = self.label if _on == self._root.ExtensionLabel.application: self.body = self._root.ExtApplication(self._io, self, self._root) elif _on == self._root.ExtensionLabel.comment: self.body = self._root.Subblocks(self._io, self, self._root) elif _on == self._root.ExtensionLabel.graphic_control: self.body = self._root.ExtGraphicControl(self._io, self, self._root) else: self.body = self._root.Subblocks(self._io, self, self._root) ``` #### File: proxy/protocol/rawtcp.py ```python import socket from OpenSSL import SSL import mitmproxy.net.tcp from mitmproxy import tcp from mitmproxy import flow from mitmproxy import exceptions from mitmproxy.proxy.protocol import base class RawTCPLayer(base.Layer): chunk_size = 4096 def __init__(self, ctx, ignore=False): self.ignore = ignore super().__init__(ctx) def __call__(self): self.connect() if not self.ignore: f = tcp.TCPFlow(self.client_conn, self.server_conn, self) self.channel.ask("tcp_start", f) buf = memoryview(bytearray(self.chunk_size)) client = self.client_conn.connection server = self.server_conn.connection conns = [client, server] try: while not self.channel.should_exit.is_set(): r = mitmproxy.net.tcp.ssl_read_select(conns, 10) for conn in r: dst = server if conn == client else client size = conn.recv_into(buf, self.chunk_size) if not size: conns.remove(conn) # Shutdown connection to the other peer if isinstance(conn, SSL.Connection): # We can't half-close a connection, so we just close everything here. # Sockets will be cleaned up on a higher level. return else: dst.shutdown(socket.SHUT_WR) if len(conns) == 0: return continue tcp_message = tcp.TCPMessage(dst == server, buf[:size].tobytes()) if not self.ignore: f.messages.append(tcp_message) self.channel.ask("tcp_message", f) dst.sendall(tcp_message.content) except (socket.error, exceptions.TcpException, SSL.Error) as e: if not self.ignore: f.error = flow.Error("TCP connection closed unexpectedly: {}".format(repr(e))) self.channel.tell("tcp_error", f) finally: if not self.ignore: self.channel.tell("tcp_end", f) ``` #### File: mitmproxy/release/cibuild.py ```python import glob import re import contextlib import os import platform import sys import shutil import subprocess import tarfile import zipfile from os.path import join, abspath, dirname, exists, basename import click import cryptography.fernet # ZipFile and tarfile have slightly different APIs. Fix that. if platform.system() == "Windows": def Archive(name): a = zipfile.ZipFile(name, "w") a.add = a.write return a else: def Archive(name): return tarfile.open(name, "w:gz") PLATFORM_TAG = { "Darwin": "osx", "Windows": "windows", "Linux": "linux", }.get(platform.system(), platform.system()) ROOT_DIR = abspath(join(dirname(__file__), "..")) RELEASE_DIR = join(ROOT_DIR, "release") BUILD_DIR = join(RELEASE_DIR, "build") DIST_DIR = join(RELEASE_DIR, "dist") BDISTS = { "mitmproxy": ["mitmproxy", "mitmdump", "mitmweb"], "pathod": ["pathoc", "pathod"] } if platform.system() == "Windows": BDISTS["mitmproxy"].remove("mitmproxy") TOOLS = [ tool for tools in sorted(BDISTS.values()) for tool in tools ] TAG = os.environ.get("TRAVIS_TAG", os.environ.get("APPVEYOR_REPO_TAG_NAME", None)) BRANCH = os.environ.get("TRAVIS_BRANCH", os.environ.get("APPVEYOR_REPO_BRANCH", None)) if TAG: VERSION = re.sub('^v', '', TAG) UPLOAD_DIR = VERSION elif BRANCH: VERSION = re.sub('^v', '', BRANCH) UPLOAD_DIR = "branches/%s" % VERSION else: print("Could not establish build name - exiting." % BRANCH) sys.exit(0) print("BUILD PLATFORM_TAG=%s" % PLATFORM_TAG) print("BUILD ROOT_DIR=%s" % ROOT_DIR) print("BUILD RELEASE_DIR=%s" % RELEASE_DIR) print("BUILD BUILD_DIR=%s" % BUILD_DIR) print("BUILD DIST_DIR=%s" % DIST_DIR) print("BUILD BDISTS=%s" % BDISTS) print("BUILD TAG=%s" % TAG) print("BUILD BRANCH=%s" % BRANCH) print("BUILD VERSION=%s" % VERSION) print("BUILD UPLOAD_DIR=%s" % UPLOAD_DIR) def archive_name(bdist: str) -> str: if platform.system() == "Windows": ext = "zip" else: ext = "tar.gz" return "{project}-{version}-{platform}.{ext}".format( project=bdist, version=VERSION, platform=PLATFORM_TAG, ext=ext ) @contextlib.contextmanager def chdir(path: str): old_dir = os.getcwd() os.chdir(path) yield os.chdir(old_dir) @click.group(chain=True) def cli(): """ mitmproxy build tool """ pass @cli.command("build") def build(): """ Build a binary distribution """ os.makedirs(DIST_DIR, exist_ok=True) if "WHEEL" in os.environ: whl = build_wheel() else: click.echo("Not building wheels.") if "WHEEL" in os.environ and "DOCKER" in os.environ: # Docker image requires wheels build_docker_image(whl) else: click.echo("Not building Docker image.") if "PYINSTALLER" in os.environ: build_pyinstaller() else: click.echo("Not building PyInstaller packages.") def build_wheel(): click.echo("Building wheel...") subprocess.check_call([ "python", "setup.py", "-q", "bdist_wheel", "--dist-dir", DIST_DIR, ]) whl = glob.glob(join(DIST_DIR, 'mitmproxy-*-py3-none-any.whl'))[0] click.echo("Found wheel package: {}".format(whl)) subprocess.check_call([ "tox", "-e", "wheeltest", "--", whl ]) return whl def build_docker_image(whl): click.echo("Building Docker image...") subprocess.check_call([ "docker", "build", "--build-arg", "WHEEL_MITMPROXY={}".format(os.path.relpath(whl, ROOT_DIR)), "--build-arg", "WHEEL_BASENAME_MITMPROXY={}".format(basename(whl)), "--file", "docker/Dockerfile", "." ]) def build_pyinstaller(): PYINSTALLER_SPEC = join(RELEASE_DIR, "specs") # PyInstaller 3.2 does not bundle pydivert's Windivert binaries PYINSTALLER_HOOKS = join(RELEASE_DIR, "hooks") PYINSTALLER_TEMP = join(BUILD_DIR, "pyinstaller") PYINSTALLER_DIST = join(BUILD_DIR, "binaries", PLATFORM_TAG) # https://virtualenv.pypa.io/en/latest/userguide.html#windows-notes # scripts and executables on Windows go in ENV\Scripts\ instead of ENV/bin/ if platform.system() == "Windows": PYINSTALLER_ARGS = [ # PyInstaller < 3.2 does not handle Python 3.5's ucrt correctly. "-p", r"C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\x86", ] else: PYINSTALLER_ARGS = [] if exists(PYINSTALLER_TEMP): shutil.rmtree(PYINSTALLER_TEMP) if exists(PYINSTALLER_DIST): shutil.rmtree(PYINSTALLER_DIST) for bdist, tools in sorted(BDISTS.items()): with Archive(join(DIST_DIR, archive_name(bdist))) as archive: for tool in tools: # We can't have a folder and a file with the same name. if tool == "mitmproxy": tool = "mitmproxy_main" # This is PyInstaller, so it messes up paths. # We need to make sure that we are in the spec folder. with chdir(PYINSTALLER_SPEC): click.echo("Building PyInstaller %s binary..." % tool) excludes = [] if tool != "mitmweb": excludes.append("mitmproxy.tools.web") if tool != "mitmproxy_main": excludes.append("mitmproxy.tools.console") subprocess.check_call( [ "pyinstaller", "--clean", "--workpath", PYINSTALLER_TEMP, "--distpath", PYINSTALLER_DIST, "--additional-hooks-dir", PYINSTALLER_HOOKS, "--onefile", "--console", "--icon", "icon.ico", # This is PyInstaller, so setting a # different log level obviously breaks it :-) # "--log-level", "WARN", ] + [x for e in excludes for x in ["--exclude-module", e]] + PYINSTALLER_ARGS + [tool] ) # Delete the spec file - we're good without. os.remove("{}.spec".format(tool)) # Test if it works at all O:-) executable = join(PYINSTALLER_DIST, tool) if platform.system() == "Windows": executable += ".exe" # Remove _main suffix from mitmproxy executable if "_main" in executable: shutil.move( executable, executable.replace("_main", "") ) executable = executable.replace("_main", "") click.echo("> %s --version" % executable) click.echo(subprocess.check_output([executable, "--version"]).decode()) archive.add(executable, basename(executable)) click.echo("Packed {}.".format(archive_name(bdist))) @cli.command("upload") def upload(): """ Upload build artifacts Uploads the wheels package to PyPi. Uploads the Pyinstaller and wheels packages to the snapshot server. Pushes the Docker image to Docker Hub. """ # Our credentials are only available from within the main repository and not forks. # We need to prevent uploads from all BUT the branches in the main repository. # Pull requests and master-branches of forks are not allowed to upload. is_pull_request = ( ("TRAVIS_PULL_REQUEST" in os.environ and os.environ["TRAVIS_PULL_REQUEST"] != "false") or "APPVEYOR_PULL_REQUEST_NUMBER" in os.environ ) if is_pull_request: click.echo("Refusing to upload artifacts from a pull request!") return if "AWS_ACCESS_KEY_ID" in os.environ: subprocess.check_call([ "aws", "s3", "cp", "--acl", "public-read", DIST_DIR + "/", "s3://snapshots.mitmproxy.org/{}/".format(UPLOAD_DIR), "--recursive", ]) upload_pypi = ( TAG and "WHEEL" in os.environ and "TWINE_USERNAME" in os.environ and "TWINE_PASSWORD" in os.environ ) if upload_pypi: whl = glob.glob(join(DIST_DIR, 'mitmproxy-*-py3-none-any.whl'))[0] click.echo("Uploading {} to PyPi...".format(whl)) subprocess.check_call([ "twine", "upload", whl ]) upload_docker = ( (TAG or BRANCH == "master") and "DOCKER" in os.environ and "DOCKER_USERNAME" in os.environ and "DOCKER_PASSWORD" in os.environ ) if upload_docker: docker_tag = "dev" if BRANCH == "master" else VERSION click.echo("Uploading Docker image to tag={}...".format(docker_tag)) subprocess.check_call([ "docker", "login", "-u", os.environ["DOCKER_USERNAME"], "-p", os.environ["DOCKER_PASSWORD"], ]) subprocess.check_call([ "docker", "push", "mitmproxy/mitmproxy:{}".format(docker_tag), ]) @cli.command("decrypt") @click.argument('infile', type=click.File('rb')) @click.argument('outfile', type=click.File('wb')) @click.argument('key', envvar='RTOOL_KEY') def decrypt(infile, outfile, key): f = cryptography.fernet.Fernet(key.encode()) outfile.write(f.decrypt(infile.read())) if __name__ == "__main__": cli() ``` #### File: mitmproxy/addons/test_keepserving.py ```python import asyncio import pytest from mitmproxy.addons import keepserving from mitmproxy.test import taddons from mitmproxy import command class Dummy: def __init__(self, val: bool): self.val = val def load(self, loader): loader.add_option("client_replay", bool, self.val, "test") loader.add_option("server_replay", bool, self.val, "test") loader.add_option("rfile", bool, self.val, "test") @command.command("readfile.reading") def readfile(self) -> bool: return self.val @command.command("replay.client.count") def creplay(self) -> int: return 1 if self.val else 0 @command.command("replay.server.count") def sreplay(self) -> int: return 1 if self.val else 0 class TKS(keepserving.KeepServing): _is_shutdown = False def shutdown(self): self.is_shutdown = True @pytest.mark.asyncio async def test_keepserving(): ks = TKS() d = Dummy(True) with taddons.context(ks) as tctx: tctx.master.addons.add(d) ks.running() assert ks.keepgoing() d.val = False assert not ks.keepgoing() await asyncio.sleep(0.3) assert ks.is_shutdown ``` #### File: mitmproxy/contentviews/__init__.py ```python def full_eval(instance): def call(data, **metadata): x = instance(data, **metadata) if x is None: return None name, generator = x return name, list(generator) return call ``` #### File: mitmproxy/contentviews/test_javascript.py ```python import pytest from mitmproxy.contentviews import javascript from . import full_eval def test_view_javascript(): v = full_eval(javascript.ViewJavaScript()) assert v(b"[1, 2, 3]") assert v(b"[1, 2, 3") assert v(b"function(a){[1, 2, 3]}") == ("JavaScript", [ [('text', 'function(a) {')], [('text', ' [1, 2, 3]')], [('text', '}')] ]) assert v(b"\xfe") # invalid utf-8 @pytest.mark.parametrize("filename", [ "simple.js", ]) def test_format_xml(filename, tdata): path = tdata.path("mitmproxy/contentviews/test_js_data/" + filename) with open(path) as f: input = f.read() with open("-formatted.".join(path.rsplit(".", 1))) as f: expected = f.read() js = javascript.beautify(input) assert js == expected ``` #### File: mitmproxy/contentviews/test_json.py ```python from mitmproxy.contentviews import json from . import full_eval def test_pretty_json(): assert json.pretty_json(b'{"foo": 1}') assert not json.pretty_json(b"moo") assert json.pretty_json(b'{"foo" : "\xe4\xb8\x96\xe7\x95\x8c"}') # utf8 with chinese characters assert not json.pretty_json(b'{"foo" : "\xFF"}') def test_view_json(): v = full_eval(json.ViewJSON()) assert v(b"{}") assert not v(b"{") assert v(b"[1, 2, 3, 4, 5]") ``` #### File: data/addonscripts/error.py ```python from mitmproxy import ctx def running(): ctx.log.info("error running") def request(flow): raise ValueError("Error!") ``` #### File: http/http2/test_framereader.py ```python import pytest import codecs from io import BytesIO import hyperframe.frame from mitmproxy import exceptions from mitmproxy.net.http.http2 import read_raw_frame, parse_frame def test_read_raw_frame(): raw = codecs.decode('000006000101234567666f6f626172', 'hex_codec') bio = BytesIO(raw) bio.safe_read = bio.read header, body = read_raw_frame(bio) assert header assert body def test_read_raw_frame_failed(): raw = codecs.decode('485454000000000000', 'hex_codec') bio = BytesIO(raw) bio.safe_read = bio.read with pytest.raises(exceptions.HttpException): read_raw_frame(bio) def test_parse_frame(): f = parse_frame( codecs.decode('000006000101234567', 'hex_codec'), codecs.decode('666f6f626172', 'hex_codec') ) assert isinstance(f, hyperframe.frame.Frame) def test_parse_frame_combined(): f = parse_frame( codecs.decode('000006000101234567666f6f626172', 'hex_codec'), ) assert isinstance(f, hyperframe.frame.Frame) ``` #### File: http/http2/test_utils.py ```python import pytest from mitmproxy.net.http.http2 import parse_headers class TestHttp2ParseHeaders: def test_relative(self): h = dict([ (':authority', "127.0.0.1:1234"), (':method', 'GET'), (':scheme', 'https'), (':path', '/'), ]) first_line_format, method, scheme, host, port, path = parse_headers(h) assert first_line_format == 'relative' assert method == b'GET' assert scheme == b'https' assert host == b'127.0.0.1' assert port == 1234 assert path == b'/' def test_absolute(self): h = dict([ (':authority', "127.0.0.1:1234"), (':method', 'GET'), (':scheme', 'https'), (':path', 'https://127.0.0.1:4321'), ]) first_line_format, method, scheme, host, port, path = parse_headers(h) assert first_line_format == 'absolute' assert method == b'GET' assert scheme == b'https' assert host == b'127.0.0.1' assert port == 1234 assert path == b'https://127.0.0.1:4321' @pytest.mark.parametrize("scheme, expected_port", [ ('http', 80), ('https', 443), ]) def test_without_port(self, scheme, expected_port): h = dict([ (':authority', "127.0.0.1"), (':method', 'GET'), (':scheme', scheme), (':path', '/'), ]) _, _, _, _, port, _ = parse_headers(h) assert port == expected_port def test_without_authority(self): h = dict([ (':method', 'GET'), (':scheme', 'https'), (':path', '/'), ]) _, _, _, host, _, _ = parse_headers(h) assert host == b'localhost' def test_connect(self): h = dict([ (':authority', "127.0.0.1"), (':method', 'CONNECT'), (':scheme', 'https'), (':path', '/'), ]) with pytest.raises(NotImplementedError): parse_headers(h) ``` #### File: mitmproxy/net/test_tls.py ```python import io import pytest from mitmproxy import exceptions from mitmproxy.net import tls from mitmproxy.net.tcp import TCPClient from test.mitmproxy.net.test_tcp import EchoHandler from . import tservers CLIENT_HELLO_NO_EXTENSIONS = bytes.fromhex( "03015658a756ab2c2bff55f636814deac086b7ca56b65058c7893ffc6074f5245f70205658a75475103a152637" "78e1bb6d22e8bbd5b6b0a3a59760ad354e91ba20d353001a0035002f000a000500040009000300060008006000" "61006200640100" ) FULL_CLIENT_HELLO_NO_EXTENSIONS = ( b"\x16\x03\x03\x00\x65" # record layer b"\x01\x00\x00\x61" + # handshake header CLIENT_HELLO_NO_EXTENSIONS ) class TestMasterSecretLogger(tservers.ServerTestBase): handler = EchoHandler ssl = dict( cipher_list="AES256-SHA" ) def test_log(self, tmpdir): testval = b"echo!\n" _logfun = tls.log_master_secret logfile = str(tmpdir.join("foo", "bar", "logfile")) tls.log_master_secret = tls.MasterSecretLogger(logfile) c = TCPClient(("127.0.0.1", self.port)) with c.connect(): c.convert_to_tls() c.wfile.write(testval) c.wfile.flush() assert c.rfile.readline() == testval c.finish() tls.log_master_secret.close() with open(logfile, "rb") as f: assert f.read().count(b"CLIENT_RANDOM") == 2 tls.log_master_secret = _logfun def test_create_logfun(self): assert isinstance( tls.MasterSecretLogger.create_logfun("test"), tls.MasterSecretLogger) assert not tls.MasterSecretLogger.create_logfun(False) class TestTLSInvalid: def test_invalid_ssl_method_should_fail(self): fake_ssl_method = 100500 with pytest.raises(exceptions.TlsException): tls.create_client_context(method=fake_ssl_method) def test_alpn_error(self): with pytest.raises(exceptions.TlsException, match="must be a function"): tls.create_client_context(alpn_select_callback="foo") with pytest.raises(exceptions.TlsException, match="ALPN error"): tls.create_client_context(alpn_select="foo", alpn_select_callback="bar") def test_is_record_magic(): assert not tls.is_tls_record_magic(b"POST /") assert not tls.is_tls_record_magic(b"\x16\x03") assert not tls.is_tls_record_magic(b"\x16\x03\x04") assert tls.is_tls_record_magic(b"\x16\x03\x00") assert tls.is_tls_record_magic(b"\x16\x03\x01") assert tls.is_tls_record_magic(b"\x16\x03\x02") assert tls.is_tls_record_magic(b"\x16\x03\x03") def test_get_client_hello(): rfile = io.BufferedReader(io.BytesIO( FULL_CLIENT_HELLO_NO_EXTENSIONS )) assert tls.get_client_hello(rfile) rfile = io.BufferedReader(io.BytesIO( FULL_CLIENT_HELLO_NO_EXTENSIONS[:30] )) with pytest.raises(exceptions.TlsProtocolException, message="Unexpected EOF"): tls.get_client_hello(rfile) rfile = io.BufferedReader(io.BytesIO( b"GET /" )) with pytest.raises(exceptions.TlsProtocolException, message="Expected TLS record"): tls.get_client_hello(rfile) class TestClientHello: def test_no_extensions(self): c = tls.ClientHello(CLIENT_HELLO_NO_EXTENSIONS) assert repr(c) assert c.sni is None assert c.cipher_suites == [53, 47, 10, 5, 4, 9, 3, 6, 8, 96, 97, 98, 100] assert c.alpn_protocols == [] assert c.extensions == [] def test_extensions(self): data = bytes.fromhex( "03033b70638d2523e1cba15f8364868295305e9c52aceabda4b5147210abc783e6e1000022c02bc02fc02cc030" "cca9cca8cc14cc13c009c013c00ac014009c009d002f0035000a0100006cff0100010000000010000e00000b65" "78616d706c652e636f6d0017000000230000000d00120010060106030501050304010403020102030005000501" "00000000001200000010000e000c02683208687474702f312e3175500000000b00020100000a00080006001d00" "170018" ) c = tls.ClientHello(data) assert repr(c) assert c.sni == 'example.com' assert c.cipher_suites == [ 49195, 49199, 49196, 49200, 52393, 52392, 52244, 52243, 49161, 49171, 49162, 49172, 156, 157, 47, 53, 10 ] assert c.alpn_protocols == [b'h2', b'http/1.1'] assert c.extensions == [ (65281, b'\x00'), (0, b'\x00\x0e\x00\x00\x0bexample.com'), (23, b''), (35, b''), (13, b'\x00\x10\x06\x01\x06\x03\x05\x01\x05\x03\x04\x01\x04\x03\x02\x01\x02\x03'), (5, b'\x01\x00\x00\x00\x00'), (18, b''), (16, b'\x00\x0c\x02h2\x08http/1.1'), (30032, b''), (11, b'\x01\x00'), (10, b'\x00\x06\x00\x1d\x00\x17\x00\x18') ] def test_from_file(self): rfile = io.BufferedReader(io.BytesIO( FULL_CLIENT_HELLO_NO_EXTENSIONS )) assert tls.ClientHello.from_file(rfile) rfile = io.BufferedReader(io.BytesIO( b"" )) with pytest.raises(exceptions.TlsProtocolException): tls.ClientHello.from_file(rfile) rfile = io.BufferedReader(io.BytesIO( b"\x16\x03\x03\x00\x07" # record layer b"\x01\x00\x00\x03" + # handshake header b"foo" )) with pytest.raises(exceptions.TlsProtocolException, message='Cannot parse Client Hello'): tls.ClientHello.from_file(rfile) ``` #### File: tools/console/test_palettes.py ```python import mitmproxy.tools.console.palettes as palettes from ....conftest import skip_appveyor @skip_appveyor class TestPalette: def test_helptext(self): for i in palettes.palettes.values(): assert i.palette(False) for i in palettes.palettes.values(): assert i.palette(True) ```
{ "source": "0x7Fancy/EmailBot", "score": 2 }
#### File: 0x7Fancy/EmailBot/emailbot.py ```python import argparse import threading import time # patch import path import os import sys module_path = os.path.dirname(os.path.realpath(__file__)) sys.path.append(module_path) import config import interact import mime import protocol import rule import utils from utils import logger VERSION = "EmailBot v0.2 (build 20220113)" #********************************************************************** # @Class: EmailBot # @Description: the emailbox main schedule logic, handle the sending and # receiving of emails, as well as the judgment of the rules, and provide an # external calling interface #********************************************************************** class EmailBot: #********************************************************************** # @Function: __init__(self, smtp="", pop3="", smtp_port=0, pop3_port=0, # smtp_ssl=False, pop3_ssl=False) # @Description: the EmailBot object initialize # @Parameter: smtp="", the SMTP server address, using configure if empty # @Parameter: pop3="", the POP3 server address, using configure if empty # @Parameter: smtp_port=0, the SMTP server port, using configure if empty # @Parameter: pop3_port=0, the POP3 server port, using configure if empty # @Parameter: smtp_ssl=False, ssl is required to connect to the SMTP, # using configure if empty # @Parameter: pop3_ssl=False, ssl is required to connect to the POP3, # using configure if empty # @Return: None #********************************************************************** def __init__(self, smtp="", pop3="", smtp_port=0, pop3_port=0, smtp_ssl=False, pop3_ssl=False): # initialize field self.smtp_address = smtp if smtp else config.SMTP_SERVER self.smtp_port = smtp_port if smtp_port else config.SMTP_PORT self.smtp_ssl = smtp_ssl if smtp_ssl else config.SMTP_SSL self.pop3_address = pop3 if pop3 else config.POP3_SERVER self.pop3_port = pop3_port if pop3_port else config.POP3_PORT self.pop3_ssl = pop3_ssl if pop3_ssl else config.POP3_SSL # reset/initialize value by "self.login()" self.username = config.USERNAME self.password = <PASSWORD> self.smtp = None self.pop3 = None # email send/receive mananger self._send_queue = [] self._mutex = threading.Lock() self._recv_cache = None # the rule list, add it by "interact.py" && "add_rule()" self.rule = [] for name in interact.INTERACTS: logger.debug("load [%s] rule from config.py" % name) self.rule.append(interact.INTERACTS[name]) # end __init__() #********************************************************************** # @Function: _send_manager(self) # @Description: send email manager, when the user sends a email, the email to # be sent is added to the queue, and the manager will be sent one by one in # order; when the sending is wrong, manager will automatically retry until it # succeeds (usually due to network reasons or temporary failures, because # login() check has been passed) # @Parameter: None # @Return: None #********************************************************************** def _send_manager(self): while True: # check and get the email waiting to be sent self._mutex.acquire() wait_count = len(self._send_queue) if wait_count > 0: e = self._send_queue[0] else: e = None self._mutex.release() if e == None: time.sleep(10) continue logger.info("send email [%s] to %s" % (e.subject, e.receiver)) # send email, when it fails, we will not remove this email, # it will try again in the next loop result = self.smtp.send(e) if result == False: # wait a little longer time.sleep(60) continue # send success, remove this email self._mutex.acquire() self._send_queue = self._send_queue[1:] self._mutex.release() # end while # end _send_manager() #********************************************************************** # @Function: _recv_manager(self) # @Description: receiver email manager, each time the uidl() list is polled # from the email server, the hash of the new and old emailing lists is # compared to distinguish which are new emails. when new emails are received, # they are parsed and the rules are matched, and call callback function. # # Here we need to use hash to distinguish new emails, instead of using the # number of emails directly, because: # 1.you can choose the time range for receiving email(eg: 30day / 90day / # 1year), when the time node is switched, the number of received email # will change(reduce), # 2.when we manually delete emails, the number of mailboxes will also change # so the number of mailboxes cannot be directly used to determine new email # # every time we poll the hash of the inbox mail through uidl(), if there are # too many inbox mails and the time range for receiving mail is not set, it # will cause additional resource cost; the user can manually set the time # range for receiving mail in the mailbox to optimize the problem # @Parameter: None # @Return: None #********************************************************************** def _recv_manager(self): while True: old_cache = self._recv_cache self._recv_cache = self.pop3.uidl() # _recv_manager() need initialize or get uidl error if old_cache == None or self._recv_cache == None: time.sleep(10) continue # end if # find new email start position # we start to compare the last item of old_cache with the new # result. if it is not found, it means that the last email has been # deleted. use the previous item of old_cache to continue to find # the starting position of the new email. position = -1 for oc in reversed(old_cache): for nc in reversed(self._recv_cache): _, ohash = self._parse_uidl_line(oc) nid, nhash = self._parse_uidl_line(nc) if ohash == nhash: position = nid break # end for if position >= 0: break # end for # receive or one or more emails for i in range(position+1, len(self._recv_cache)+1): # receive new email e = self.pop3.recv(i) if e == None: continue logger.info("receive new email [%s] by %s" % (e.subject, e.sender)) # rule check and execute self._route_by_rules(self, e) # end for time.sleep(60) # end while # end _recv_manager() #********************************************************************** # @Function: _parse_uidl_line(self, line) # @Description: parse uidl single line data # the uidl() single response format: b'1 ZC3130-wi6DhoW5iDuIEDhYOkraUbh' # @Parameter: line, the single line uidl data # @Return: (id, hash), the email id and hash #********************************************************************** def _parse_uidl_line(self, line): # in the current context, the line has been checked, and the default # line is the correct format here array = line.decode("utf-8").split(" ") return int(array[0]), array[1] # end _parse_uidl_line() #********************************************************************** # @Function: _route_by_rules(self, eb, e) # @Description: use all rules to match the content of new emails, when a rule # is successfully matched, subsequent rules will no longer match. # priority of routing rules, subject to the order of addition. # between multiple conditions is AND # @Parameter: eb, the emailbot object # @Parameter: e, the email object # @Return: None #********************************************************************** def _route_by_rules(self, eb, e): for r in self.rule: if r.execute(eb, e): break # end for # end _route_by_rules() #********************************************************************** # @Function: login(self, username="", password="") # @Description: initialize user/pass and server status, and check status # @Parameter: username="", the mailbox username, using configure if empty # @Parameter: password="", the mailbox password, using configure if empty # @Return: status, all server and user/pass is ready #********************************************************************** def login(self, username="", password=""): if username != "": self.username = username if password != "": self.password = password result = False if self.smtp_address == "" and self.pop3_address == "": logger.error("at least one of smtp/pop3 needs to be started") return result # check smtp && pop3 server status and user auth status result = 1 if self.smtp_address != "": self.smtp = protocol.SMTP(self.smtp_address, self.smtp_port, self.smtp_ssl, self.username, self.password) if self.smtp.check_status(): logger.info("smtp server is ready, user auth success") else: result = result & 0 # end if if self.pop3_address != "": self.pop3 = protocol.POP3(self.pop3_address, self.pop3_port, self.pop3_ssl, self.username, self.password) if self.pop3.check_status(): logger.info("pop3 server is ready, user auth success") else: result = result & 0 # end if return result == 1 # end login() #********************************************************************** # @Function: add_rule(self, callback, sender="", subject="", content="", func=None) # @Description: add a rule for matching receive new email, when all the rules # are empty, it means all match. # @Parameter: callback, when the rule is successfully matched, the callback # function that needs to be executed # @Parameter: sender="", regexp rule which match sender # @Parameter: subject="", regexp rule which match subject # @Parameter: content="", regexp rule which match content # @Parameter: func=None, custom rule match function # @Return: None #********************************************************************** def add_rule(self, callback, sender="", subject="", content="", func=None): r = rule.Rule(callback, sender, subject, content, func) logger.debug("load [%s] rule by 'add_rule()'" % callback.__name__) self.rule.append(r) # end add_rule() #********************************************************************** # @Function: send_email(self, to, cc="", subject="", content="", attachment="", blocking=False) # @Description: the user calls this function to send email. # if blocking=True, the email will be added to the queue to be sent, the # email will auto sent and retry. # if blocking=False, the email will send directly, and return send result # @Parameter: to, the email receiver # @Parameter: cc="", the email carbon copy # @Parameter: subject="", the email subject # @Parameter: content="", the email content # @Parameter: attachment="", the email attachment file path # @Parameter: blocking=False, blocking or not send mode # @Return: None #********************************************************************** def send_email(self, to, cc="", subject="", content="", attachment="", blocking=False): # check smtp object is ready if self.smtp == None: logger.error("smtp server not initialize") return # create mime Email object e = mime.Email(self.username, to, cc, subject, content, attachment) # blocking send mode if blocking: return self.smtp.send(e) # non-blocking send mode # add new email into send queue self._mutex.acquire() self._send_queue.append(e) self._mutex.release() # end send_email() #********************************************************************** # @Function: run(self, daemon=False) # @Description: the emailbot launch entrypoint # @Parameter: daemon=False, set background running, facilitate that emailbot # can be run as a service or be called as a library. # @Return: None #********************************************************************** def run(self, daemon=False): # "login()" is not called, or neither smtp/pop3 is set if self.smtp_address == "" and self.pop3_address == "": logger.critical("at least one of smtp/pop3 needs to be started") return # user want to use smtp(send email) if self.smtp_address != "": if self.smtp == None or self.smtp.status == False: logger.error("smtp/user status is not ready") return logger.info("initialize send email manager") ts = threading.Thread(target=self._send_manager) ts.start() # end if # user want to use pop3(receive email) if self.pop3_address != "": if self.pop3 == None or self.pop3.status == False: logger.error("pop3/user status is not ready") return logger.info("initialize recv email manager") tr = threading.Thread(target=self._recv_manager) tr.start() # end if # set daemon if daemon == False: if self.smtp_address != "": ts.join() if self.pop3_address != "": tr.join() # end if # end run() # end class #********************************************************************** # @Function: main() # @Description: main entry point # @Parameter: None # @Return: None #********************************************************************** if __name__ == "__main__": # arguments parse parser = argparse.ArgumentParser(description="EmailBot launch arguments as service") parser.add_argument("-u", "--username", type=str, default="", help="the emailbox username") parser.add_argument("-p", "--password", type=str, default="", help="the emailbox password") parser.add_argument("--smtp", type=str, default="", help="SMTP server address(address:port)") parser.add_argument("--smtpssl", type=bool, default=False, help="connect SMTP server with ssl") parser.add_argument("--pop3", type=str, default="", help="POP3 server address(address:port)") parser.add_argument("--pop3ssl", type=bool, default=False, help="connect POP3 server with ssl") parser.add_argument("-v", "--version", help="print emailbot version", action="store_true") args = parser.parse_args() if args.version: print(VERSION) exit(0) # end if # parse smtp/pop3 server address and port smtp, smtpport = utils.parse_args_server(args.smtp) pop3, pop3port = utils.parse_args_server(args.pop3) # launch emailbot logger.info("launch Emailbot ...") eb = EmailBot(smtp=smtp, smtp_port=smtpport, smtp_ssl=args.smtpssl, pop3=pop3, pop3_port=pop3port, pop3_ssl=args.pop3ssl) status = eb.login(args.username, args.password) if not status: logger.error("Emailbot check SMTP/POP3/USER failed") exit(0) #eb.add_rule() eb.run() # end main() ``` #### File: 0x7Fancy/EmailBot/protocol.py ```python import smtplib import poplib import config import mime from utils import logger #********************************************************************** # @Class: SMTP # @Description: implement and warpper SMTP protocol multi commands, and provide # external sending interface #********************************************************************** class SMTP: #********************************************************************** # @Function: __init__(self, address, port, ssl, user, passwd) # @Description: SMTP object initialize # @Parameter: address, the SMTP server address # @Parameter: port, the SMTP server port # @Parameter: ssl, ssl is required to connect to the SMTP # @Parameter: username, the mailbox username # @Parameter: password, the mailbox password # @Return: None #********************************************************************** def __init__(self, address, port, ssl, user, passwd): self.address = address self.port = port self.ssl = ssl self.user = user self.passwd = <PASSWORD> # set value by "check_status()" self.status = False # end __init__() #********************************************************************** # @Function: _login_server(self) # @Description: connect and login in SMTP server # @Parameter: None # @Return: smtp, the connectd SMTP object #********************************************************************** def _login_server(self): # connect smtp server try: if self.ssl: smtp = smtplib.SMTP_SSL(self.address, self.port) else: smtp = smtplib.SMTP(self.address, self.port) except Exception as e: logger.error(e) return False, None # if the log level is DEBUG import logging if config.LOG_LEVEL <= logging.DEBUG: smtp.set_debuglevel(1) # login in smtp server try: smtp.login(self.user, self.passwd) except Exception as e: smtp.close() logger.error(e) return False, None return True, smtp # end _login_server() #********************************************************************** # @Function: check_status(self) # @Description: check SMTP server and user/pass status # @Parameter: None # @Return: status, return True when SMTP server is ok and user/pass is authed #********************************************************************** def check_status(self): status, smtp = self._login_server() if status: self.status = True smtp.quit() return status # end check_status() #********************************************************************** # @Function: send(self, email) # @Description: send email through SMTP server # @Parameter: email, the mime email object # @Return: status, return True when email send success #********************************************************************** def send(self, email): # connect/auth smtp server and send status, smtp = self._login_server() if status == False: return False try: smtp.sendmail(email.sender, email.receiver+email.cc, email.MIME.as_string()) smtp.quit() except Exception as e: logger.error(e) smtp.close() return False return True # end send() # end class #********************************************************************** # @Class: POP3 # @Description: implement and warpper POP3 protocol multi commands, and provide # external sending interface #********************************************************************** class POP3: #********************************************************************** # @Function: __init__(self, address, port, ssl, user, passwd) # @Description: POP3 object initialize # @Parameter: address, the POP3 server address # @Parameter: port, the POP3 server port # @Parameter: ssl, ssl is required to connect to the POP3 # @Parameter: username, the mailbox username # @Parameter: password, the mailbox password # @Return: None #********************************************************************** def __init__(self, address, port, ssl, user, passwd): self.address = address self.port = port self.ssl = ssl self.user = user self.passwd = <PASSWORD> # set value by "check_status()" self.status = False # end __init__() #********************************************************************** # @Function: _login_server(self) # @Description: connect and login in POP3 server # @Parameter: None # @Return: pop3, the connectd POP3 object #********************************************************************** def _login_server(self): # connect pop3 server try: if self.ssl: pop3 = poplib.POP3_SSL(self.address, self.port) else: pop3 = poplib.POP3(self.address, self.port) except Exception as e: logger.error(e) return False, None # if the log level is DEBUG import logging if config.LOG_LEVEL <= logging.DEBUG: pop3.set_debuglevel(1) # login in pop3 server try: pop3.user(self.user) pop3.pass_(self.passwd) except Exception as e: logger.error(e) pop3.close() return False, None return True, pop3 # end _login_server() #********************************************************************** # @Function: check_status(self) # @Description: check POP3 server and user/pass status # @Parameter: None # @Return: status, return True when POP3 server is ok and user/pass is authed #********************************************************************** def check_status(self): status, pop3 = self._login_server() if status: self.status = True pop3.quit() return status # end check_status() #********************************************************************** # @Function: stat(self) # @Description: get mailbox status, include message count and mailbox size, # we just return message count # @Parameter: None # @Return: count, the message count, while error will return -1 #********************************************************************** def stat(self): # connect/auth pop3 server status, pop3 = self._login_server() if status == False: return -1 # get email status try: count, octets = pop3.stat() pop3.quit() except Exception as e: logger.error(e) pop3.close() return -1 return count # end stat() #********************************************************************** # @Function: uidl(self, which=0) # @Description: get all message hash or get the hash of the mail with the # specified id. # @Parameter: which=0, message id, when the value less than or equal to 0, # return the specified mail hash. # @Return: result, the email hash list or hash string # the single hash example: b'1 ZC3130-wi6DhoW5iDuIEDhYOkraUbh' #********************************************************************** def uidl(self, which=0): # connect/auth pop3 server status, pop3 = self._login_server() if status == False: return None # get all email message digest (unique id) list result = None try: if which > 0: # get the uuid of the specified mail line = pop3.uidl(which) # check response with "+OK" if line.decode("utf-8").startswith("+OK"): result = line[4:] else: # get the uuid of all emails resp, lines, octets = pop3.uidl() # check response with "+OK" if resp.decode("utf-8").startswith("+OK"): result = lines # end if-else pop3.quit() except Exception as e: logger.error(e) pop3.close() return None return result # end uidl() #********************************************************************** # @Function: recv(self, which) # @Description: get the email content of the specified id, the original # content of the received email is parsed through MIME. # @Parameter: which, the email id # @Return: email, our internal warpper Email object #********************************************************************** def recv(self, which): # connect/auth pop3 server status, pop3 = self._login_server() if status == False: return None # get email message by id try: resp, lines, octets = pop3.retr(which) pop3.quit() except Exception as e: logger.error(e) pop3.close() return None # check resp if resp.decode("utf-8").startswith("-ERR"): logger.warning("pop3 response ERR: %s" % resp) return None # join each line of email message content and decode the data with # utf-8 charset encoding. content = b'\r\n'.join(lines).decode("utf-8", "ignore") return mime.Email(source=content) # end recv() # end class ``` #### File: 0x7Fancy/EmailBot/rule.py ```python import re import threading from utils import logger #********************************************************************** # @Class: Rule # @Description: manage and match rules, extract the content of received emails, # and call callback functions #********************************************************************** class Rule: #********************************************************************** # @Function: __init__(self, callback, sender="", subject="", content="", func=None): # @Description: Rule object initialize # @Parameter: callback, when the rule is successfully matched, the callback # function that needs to be executed # @Parameter: sender="", regexp rule which match sender # @Parameter: subject="", regexp rule which match subject # @Parameter: content="", regexp rule which match content # @Parameter: func=None, custom rule match function # @Return: None #********************************************************************** def __init__(self, callback, sender="", subject="", content="", func=None): self.sender = sender self.subject = subject self.content = content self.func = func self.callback = callback # end __init__() #********************************************************************** # @Function: execute(self, emailbot, email): # @Description: match all rules, call the callback function after the match # is successful; (multiple conditions are AND) # @Parameter: emailbot, the emailbot object # @Parameter: email, the email object # @Return: match, rule matched or not #********************************************************************** def execute(self, emailbot, email): try: match_sender = re.search(self.sender, email.sender, re.M|re.I) match_subject = re.search(self.subject, email.subject, re.M|re.I) match_content = re.search(self.content, email.content, re.M|re.I) if self.func != None: match_func, regx = self.func(email) else: match_func, regx = True, {} except Exception as e: logger.error(e) return False # check each result (AND) if not (match_sender and match_subject and match_content and match_func): return False # matched and set regx dict regx["sender"] = match_sender.group() regx["subject"] = match_subject.group() regx["content"] = match_content.group() # execute callback logger.info("MATCH %s" % self) te = threading.Thread(target=self.callback, args=(emailbot, email, regx)) te.start() return True # end check() #********************************************************************** # @Function: __repr__(self) # @Description: rewrite __str__ function, print complete "Rule" object informations # @Parameter: None # @Return: str #********************************************************************** def __repr__(self): if self.func == None: funcname = "" else: funcname = self.func.__name__ return f"RULE ({self.callback.__name__}) <{self.sender}> [{self.subject}] {self.content} ({funcname})" # end __repr__() #********************************************************************** # @Function: __str__(self) # @Description: rewrite __str__ function, just call __repr__() # @Parameter: None # @Return: str #********************************************************************** def __str__(self): return self.__repr__() # end __str__() # end class ```
{ "source": "0x7FFFFF/asoul-remark", "score": 2 }
#### File: 0x7FFFFF/asoul-remark/app.py ```python import uvicorn from fastapi import FastAPI, Request from fastapi.exceptions import RequestValidationError from fastapi.responses import JSONResponse from src.apis import users, marks, mark_lists, subscribe from src.utils.exceptions import BaseError from src.utils.return_handler import return_handler app = FastAPI(openapi_url="") app.include_router(users.router) app.include_router(marks.router) app.include_router(mark_lists.router) app.include_router(subscribe.router) @app.exception_handler(BaseError) async def exception_handler(request: Request, exc: BaseError): return JSONResponse( status_code=exc.code, content={"code": exc.code, "msg": exc.message}, ) @app.exception_handler(RequestValidationError) async def validation_exception_handler(request, exc): return JSONResponse( status_code=412, content={"code": 412, "msg": str(exc)} ) @app.get("/") async def root(): return return_handler() if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=8000) ``` #### File: src/databases/database.py ```python from sqlalchemy.ext.asyncio import create_async_engine from ..utils.config import Config from ..utils.singleton import Singleton class Database(Singleton): def __init__(self): self.engine = create_async_engine(Config.url, echo=True, future=True, pool_pre_ping=True) async def execute(self, sql): async with self.engine.begin() as conn: result = await conn.execute(sql) return result ``` #### File: src/databases/subscribe.py ```python from sqlalchemy import MetaData, Table, Column, insert, delete, select from sqlalchemy.dialects.mysql import CHAR from .database import Database from ..utils.verify import uuid_verify metadata = MetaData() subscriptions = Table( "subscriptions", metadata, Column("UserUUID", CHAR(32), nullable=False), Column("MarkListUUID", CHAR(32), nullable=False) ) db = Database() class Subscriptions: @staticmethod async def add(userUUID, markListUUID): uuid_verify(userUUID) uuid_verify(markListUUID) await db.execute(insert(subscriptions). values(UserUUID=userUUID, MarkListUUID=markListUUID)) @staticmethod async def remove(userUUID, markListUUID): uuid_verify(userUUID) uuid_verify(markListUUID) await db.execute(delete(subscriptions). where(subscriptions.c.UserUUID == userUUID). where(subscriptions.c.MarkListUUID == markListUUID)) @staticmethod async def get(userUUID): uuid_verify(userUUID) result = await db.execute(select(subscriptions). where(subscriptions.c.UserUUID == userUUID)) return result.all() ``` #### File: src/utils/config.py ```python import json class _MetaConfig(type): def __getattr__(cls, name): with open("config.json", "r") as f: cls._config = json.load(f) return cls._config.get(name) class Config(metaclass=_MetaConfig): pass ``` #### File: src/utils/singleton.py ```python import threading def synchronized(func): func.__lock__ = threading.Lock() def lock_func(*args, **kwargs): with func.__lock__: return func(*args, **kwargs) return lock_func class Singleton(object): instance = None @synchronized def __new__(cls, *args, **kwargs): if cls.instance is None: cls.instance = super().__new__(cls) return cls.instance ```
{ "source": "0x7FFFFF/bilibili_audio_download", "score": 2 }
#### File: 0x7FFFFF/bilibili_audio_download/bilibili_audio_download.py ```python import requests import math import os import json import threading import sys from mutagen.id3 import ID3, APIC, TIT2, TPE1, COMM from tenacity import retry, stop_after_attempt from requests import get, head import platform media_id = input('media_id:') ng_str = r'\/:*?"<>|' #win特供文件命名规则 translate_str = r"¦¦:x?'《》¦" #不满意重命名的用户改这里 trantab = str.maketrans(ng_str,translate_str) #同样是百度的代码 #链接:https://blog.csdn.net/weixin_38587484/article/details/97802917 def SetMp3Info(path, info): songFile = ID3(path) songFile['APIC'] = APIC( # 插入封面 encoding=3, mime='image/png', type=3, desc=u'Cover', data=info['picData'] ) songFile['TIT2'] = TIT2( # 插入歌名 encoding=3, text=info['title'] ) songFile['TXXX'] = COMM( # 插入详细信息 encoding=3, text=info['desc'] ) songFile['TPE1'] = TPE1( # 插入第一演奏家、歌手、等 encoding=3, text=info['artist'] ) songFile songFile.save() print('开始处理') def get_video_list(media_id): media_id = str(media_id) print('获取收藏夹数据') like_list_info = requests.get(f'https://api.bilibili.com/x/v3/fav/resource/list?media_id={media_id}&pn=1&ps=20&jsonp=jsonp').json() video_count = int(like_list_info.get('data').get('info').get('media_count')) page_count = math.ceil(video_count/20) page = 1 like_list = [] while True: medias = requests.get(f'https://api.bilibili.com/x/v3/fav/resource/list?media_id={media_id}&pn={page}&ps=20&jsonp=jsonp').json().get('data').get('medias') for i in medias: like_list.append(i.get('bvid')) if page == page_count: break else: page = page + 1 return like_list def get_like_list_title(media_id): media_id = str(media_id) print('获取收藏夹标题') like_list_info = requests.get(f'https://api.bilibili.com/x/v3/fav/resource/list?media_id={media_id}&pn=1&ps=20&jsonp=jsonp').json() title = like_list_info.get('data').get('info').get('title') return title def get_video_info(bvid): video_info = requests.get(f'http://api.bilibili.com/x/web-interface/view?bvid={bvid}').json() title = video_info.get('data').get('title') pic = video_info.get('data').get('pic') owner = video_info.get('data').get('owner').get('name') desc = video_info.get('data').get('desc') pages_cid = [] pages_title = {} for i in video_info.get('data').get('pages'): pages_cid.append(i.get('cid')) if i.get('part') != '': pages_title[i.get('cid')] = i.get('part').translate(trantab) else: pages_title[i.get('cid')] = title.translate(trantab) return {'title':title,'pic':pic,'pages_cid':pages_cid,'pages_title':pages_title,'owner':owner,'desc':desc} @retry(stop=stop_after_attempt(1)) def download_video(bvid,cid,like_list_title,mthead): cid = str(cid) info = get_video_info(bvid) print(f'获取视频数据({bvid})') video_download_info = requests.get(f'http://api.bilibili.com/x/player/playurl?bvid={bvid}&cid={cid}').json() video_download_url = [] for i in video_download_info.get('data').get('durl'): video_download_url.append(i.get('url')) n = 1 title = info.get('title').translate(trantab) page_title = info.get('pages_title').get(int(cid)) page_num = int(info.get('pages_cid').index(int(cid)))+1 for i in video_download_url: print(f'正在下载:{title}-{page_title}-{page_num}') if mthead == True: if platform.system() == 'Windows': os.system(f'aria2c.exe "{i}" -d "tmp" -s16 -x16 -k1M -j16 -o "tmp_{n}.flv" --referer "https://www.bilibili.com" -U "my-app/0.0.1" --file-allocation=none') else: os.system(f'aria2c "{i}" -d "tmp" -s16 -x16 -k1M -j16 -o "tmp_{n}.flv" --referer "https://www.bilibili.com" -U "my-app/0.0.1" --file-allocation=none') n = n + 1 else: video = requests.get(i,headers={'user-agent': 'my-app/0.0.1', 'referer': 'https://www.bilibili.com'}).content video_file = open(f'tmp/tmp_{n}.flv','wb') video_file.write(video) video_file.close() n = n + 1 video_part_list = os.listdir('tmp') video_part_list_str = '' for i in video_part_list: video_part_list_str = video_part_list_str + "file '" + i +"'\n" open('tmp/filename.txt','w').write(video_part_list_str) print('转换中...') if platform.system() == 'Windows': os.system('ffmpeg.exe -f concat -i tmp/filename.txt -c copy tmp/output.aac') else: os.system('ffmpeg -f concat -i tmp/filename.txt -c copy tmp/output.aac') path = f'download/{like_list_title}' try: os.makedirs(f'download/{like_list_title}') except: pass if platform.system() == 'Windows': os.system(f'ffmpeg.exe -i tmp/output.aac {path}/output.mp3') else: os.system(f'ffmpeg -i tmp/output.aac {path}/output.mp3') pic_data = requests.get(info.get('pic')).content artist = info.get('owner') desc = info.get('desc') media_info ={'picData': pic_data, 'title': title, 'artist': artist, 'desc': desc} try: if len(info.get('pages_cid')) != 1: os.rename(f'{path}/output.mp3',f'{path}/{title}-{page_title}-{page_num}.mp3') songPath = f'{path}/{title}-{page_title}-{page_num}.mp3' else: os.rename(f'{path}/output.mp3', f'{path}/{title}.mp3') songPath = f'{path}/{title}.mp3' SetMp3Info(songPath, media_info) print('写入ID3Tag...') except: os.remove(f'{path}/output.mp3') already_list = json.loads(open(f'download/{like_list_title_get}/info.json', 'r').read()) already_list.get('info').append(bvid) for i in os.listdir('tmp'): os.remove(f'tmp/{i}') open(f'{path}/info.json','w').write(json.dumps(already_list)) like_list = get_video_list(media_id) like_list_title_get = get_like_list_title(media_id) try: os.makedirs('download') os.makedirs('tmp') except: pass try: for i in os.listdir('tmp'): os.remove(f'tmp/{i}') except: pass try: info = open(f'download/{like_list_title_get}/info.json','r').read() already_list_file = json.loads(info) for i in already_list_file.get('info'): try: like_list.remove(str(i)) except: pass except: try: os.makedirs(f'download/{like_list_title_get}') except: pass error = input('在获取下载记录时出现错误,是否创建新的下载记录?(Y/N):').upper() if error == 'Y': init_json = json.dumps({'info': []}) open(f'download/{like_list_title_get}/info.json','w').write(init_json) print('创建成功') elif error == 'N': raise else: print('wdnmd你选的什么鬼东西') sys.exit() thead = input('多线程(实验性)?(Y/N):').upper() if thead == 'Y': mthead = True elif thead == 'N': mthead = False else: print('wdnmd你选的什么鬼东西') sys.exit() for bvid in like_list: info = get_video_info(bvid) for cid in info.get('pages_cid'): download_video(bvid,cid,like_list_title_get,mthead) print('处理完成') ```
{ "source": "0x8008135/hydrafw", "score": 3 }
#### File: contrib/bbio_hydranfc/bbio_hydranfc_init.py ```python import hexdump import serial import time import sys import struct # see python -m serial.tools.list_ports #ser = serial.Serial('/dev/ttyACM0', 115200, timeout=0.01) ser = serial.Serial('COM3', 115200, timeout=0.01) def cmd_check_status(status): if status != '\x01': print status.encode('hex'), print "Error", print "" else: print "OK", print "" def cs_on(): ser.write('\02') status=ser.read(1) if status != '\x01': print "CS-ON:", print status.encode('hex'), print "Error", print "" def cs_off(): ser.write('\03') status=ser.read(1) if status != '\x01': print "CS-OFF:", print status.encode('hex'), print "Error", print "" def configure_trf797a_gpio(): # Configure NFC/TRF7970A in SPI mode with Chip Select print("Configure NFC/TRF7970A in SPI mode with Chip Select") ser.write("exit\n") print(ser.readline()),;print(ser.readline()), print(ser.readline()),;print(ser.readline()), ser.write("\n") print(ser.readline()),;print(ser.readline()), ser.write("gpio pa3 mode out off\n") print(ser.readline()),;print(ser.readline()), ser.write("gpio pa2 mode out on\n") print(ser.readline()),;print(ser.readline()), ser.write("gpio pc0 mode out on\n") print(ser.readline()),;print(ser.readline()), ser.write("gpio pc1 mode out on\n") print(ser.readline()),;print(ser.readline()), ser.write("gpio pb11 mode out off\n") print(ser.readline()),;print(ser.readline()), time.sleep(0.02); ser.write("gpio pb11 mode out on\n"); print(ser.readline()),;print(ser.readline()), time.sleep(0.01); ser.write("gpio pa2-3 pc0-1 pb11 r\n"); print(ser.readline()),;print(ser.readline()), print(ser.readline()),;print(ser.readline()), print(ser.readline()),;print(ser.readline()), print(ser.readline()),;print(ser.readline()), print "" def enter_bbio(): for i in xrange(20): ser.write("\x00") if "BBIO1" in ser.read(5): print "Into BBIO mode: OK", print(ser.readline()), print "" else: print "Could not get into bbIO mode" exit() def exit_bbio(): ser.write('\x00') ser.write('\x0F\n') ser.readline();ser.readline() def bbio_spi_conf(): print "Switching to SPI mode:", ser.write('\x01') print ser.read(4), print(ser.readline()), print "" print "Configure SPI2 polarity 0 phase 1:", ser.write('\x80') status=ser.read(1) # Read Status cmd_check_status(status) print "Configure SPI2 speed to 2620000 bits/sec:", ser.write('\x63') status=ser.read(1) # Read Status cmd_check_status(status) def trf7970a_software_init(): cs_on() print "Write TRF7970A Software Initialization 0x83 0x83 (no read):", ser.write('\x05\x00\x02\x00\x00') # Write 2 data, read 0 data ser.write('\x83\x83') # Data status=ser.read(1) # Read Status cmd_check_status(status) cs_off() def trf7970a_write_idle(): cs_on() print "Write TRF7970A Idle 0x80 0x80 (no read):", ser.write('\x05\x00\x02\x00\x00') # Write 2 data, read 0 data ser.write('\x80\x80') # Data status=ser.read(1) # Read Status cmd_check_status(status) cs_off() def trf7970a_read_modulator(): cs_on() print "Read TRF7970A Modulator/SYS_CLK Control Register (0x09):", ser.write('\x05\x00\x01\x00\x01') # Write 1 data, read 1 data ser.write('\x49') # Data status=ser.read(1) # Read Status modulator = ser.read(1) print modulator.encode('hex'), # Read Data if modulator == '\x91': print "OK", else: print "Error" print "" cs_off() print "" if modulator == '\x91': return 1 else: return 0 def bbio_trf7970a_init(): i = 0 end = 10 while True: trf7970a_software_init() trf7970a_write_idle() time.sleep(0.1) ret=trf7970a_read_modulator() if ret == 1: break i = i + 1 if i > end: break # main code configure_trf797a_gpio() enter_bbio() bbio_spi_conf() bbio_trf7970a_init() exit_bbio() ``` #### File: contrib/bbio_smartcard/smartcard_bbio.py ```python import sys import serial import time import pyHydrabus from binascii import hexlify BBIO_SMARTCARD = 0b00001011 BBIO_SMARTCARD_CONFIG = 0b10000000 DEV_CONVENTION_INVERSE = 0x00 DEV_CONVENTION_DIRECT = 0x01 speed_dict = {640:0, 1200:1, 2:2400, 4800:3, 9600:4, 19200:5, 31250:6, 38400:7, 57600:8, 115200:10} Fi = [372, 372, 558, 744, 1116, 1488, 1860, "RFU", "RFU", 512, 768, 1024, 1536, 2048, "RFU", "RFU"]; Di = ["RFU", 1, 2, 4, 8, 16, 32, 64, 12, 20, "RFU", "RFU", "RFU", "RFU", "RFU", "RFU"]; def smartcard_set_parity(parity): if parity == "even": hydrabus.write(bytes([BBIO_SMARTCARD_CONFIG | 0b010])) else: hydrabus.write(bytes([BBIO_SMARTCARD_CONFIG | 0b110])) rsp = hydrabus.read(1) if b"\x01" not in rsp: hydrabus_cleanup() error("Cannot change parity, try again or reset hydrabus.") def smartcard_reverse(value): value = (value & 0xcc) >> 2 | (value & 0x33) << 2 value = (value & 0xaa) >> 1 | (value & 0x55) << 1 value = (value >> 4 | value << 4) & 0xff return value def smartcard_apply_convention(data, convention): result = b"" if convention == DEV_CONVENTION_INVERSE: for i in range(len(data)): result += bytes([smartcard_reverse(data[i] ^ 0xff)]) else: result = data return result def smartcard_crc(data): crc = 0 for i in data: crc ^= i return bytes([crc]) def parse_atr(data): """ Basic smartcard answer to reset (ATR) parser. :example: >>> atr = bytes([0x3B, 0x04, 0x92, 0x23, 0x10, 0x91]) >>> convention = parse_atr(atr) Parsing ATR: TS: 0x3b, direct convention T0: 0x4 TD1: absent, protocol T=0 TA2: absent, card in negotiable mode. Historical bytes: 92231091 >>> """ print("Parsing ATR:") if len(data) < 2: return 0 if data[0] == 0x3b: print("TS: " + hex(data[0]) + ", direct convention") convention = DEV_CONVENTION_DIRECT elif data[0] == 0x3f: print("TS: " + hex(data[0]) + ", inverse convention") convention = DEV_CONVENTION_INVERSE else: print("TS: " + hex(data[0]) + ", Non standard ATR") return DEV_CONVENTION_DIRECT print("T0: " + hex(data[1])) y1 = data[1] >> 4 y = 0 k = data[1] & 0xf i = 1 if y1 & 1: i += 1 F = Fi[data[i] >> 4]; D = Di[data[i] & 0x0F]; E = F // D; print("TA1: " + hex(data[i]) + ", Fi={:d}, Di={:d}, {:d} cycles/ETU".format(F, D, E)) if y1 & 2: i += 1 print("TB1: " + hex(data[i])) if y1 & 4: i += 1 print("TC1: " + hex(data[i]) + ", extra guard time integer N={:d}".format(data[i])) if y1 & 8: i += 1 print("TD1: " + hex(data[i]) + ", protocol T={:d}".format(data[i] & 0xf)) y = data[i] >> 4 else: print("TD1: absent, protocol T=0") protocol = 0 y = 0 if y & 1: i += 1 print("TA2: " + hex(data[i]) + ", card in specific mode") else: print("TA2: absent, card in negotiable mode.") if y & 2: i += 1 print("TB2: " + hex(data[i])) if y & 4: i += 1 print("TC2: " + hex(data[i])) if y & 8: i += 1 protocol = data[i] & 0xf print("TD2: " + hex(data[i]) + ", protocol T={:d}".format(protocol)) y = data[i] >> 4 else: y = 0 ind = 3 while y > 0: if y & 1: i += 1 if protocol == 15: param = ", X={:d}, Y={:d}".format(data[i] >> 6, data[i] & 0x3f) else: param = "" print("TA" + str(ind) + ": " + hex(data[i]) + param) if y & 2: i += 1 print("TB" + str(ind) + ": " + hex(data[i])) if y & 4: i += 1 print("TC" + str(ind) + ": " + hex(data[i])) if y & 8: i += 1 protocol = data[i] & 0xf print("TD" + str(ind) + ": " + hex(data[i]) + ", protocol T={:d}".format(protocol)) y = data[i] >> 4 ind += 1 else: y = 0 if k > 0: i+=1 print("Historical bytes: " + hexlify(data[i:i+k]).decode()) if protocol != 0 or data[i+k:] != b"": print("TCK: " + hex(data[i+k])) return convention def smartcard_t1(nad, pcb, data, rcv_length): cmd = bytes([nad, pcb, len(data)]) + data cmd += smartcard_crc(cmd) print("Sending: " + hexlify(cmd).decode()) rsp = scard.write_read(cmd, rcv_length) print("Receiving: " + hexlify(rsp).decode()) def smartcard_send_pps(F, D, T=0): ppss = b"\xff" pps0 = bytes([0x10 | T]) pps1 = bytes([Fi.index(F) << 4 | Di.index(D)]) pps = ppss + pps0 + pps1 pps = pps + smartcard_crc(pps) print("Sending PPS: " + hexlify(pps).decode()) data = scard.write_read(pps, len(pps)) print("Reponse PPS: " + hexlify(data).decode()) if __name__ == '__main__': scard = pyHydrabus.Smartcard('/dev/ttyACM0') atr = scard.get_atr() print("ATR: " + hexlify(atr).decode()) convention = parse_atr(atr) scard.convention = convention scard.close() sys.exit() ``` #### File: contrib/pyHydra_93xx/hydra_93xx.py ```python from pyHydrabus import RawWire class EEPROM93xx(RawWire): """ Serial EEPROM handler for the 93xx series (93C06, 93C46, 93C86, ...) """ def __init__(self, port="", address_size=8, word_size=16): """ Handler constructor :param address_size: Address size in bits :type address_size: int :param word_size: Word size in bits :type word_size: int """ super().__init__(port) if address_size < 2: print("Address size must be at least 2 bit long") exit(2) self.address_size = address_size self.word_size = word_size // 8 self.gpio_mode = 1 self.set_speed(5000) self.cs = self.AUX[0] self.cs.direction=0 self.cs.value=0 def write_enable(self): """ Write enable: opcode 00 """ self.cs.value=1 # Start bit and opcode self.sda = 1 self.clock() self.sda = 0 self.clocks(2) self.sda = 1 self.clocks(self.address_size) self.value=0 self.cs.value=0 def write_disable(self): """ Write disable: opcode 00 """ self.cs.value=1 # Start bit and opcode self.sda = 1 self.clock() self.sda = 0 self.clocks(2) self.clocks(self.address_size) self.cs.value=0 def read_data(self, address, num): """ Read: opcode 10 :param address: Address to read :type address: int :param num: number of words to read :type num: int """ data_format = "{:0" + str(self.address_size) + "b}" addr_bin = data_format.format(address) self.cs.value=1 # Start bit and opcode self.sda = 1 self.clocks(2) self.sda = 0 self.clock() # Write address for i in addr_bin: self.sda = int(i, 2) self.clock() # Read values data = self.read(num) self.cs.value=0 return data def write_data(self, address, data): """ Read: opcode 01 :param address: Address to write :type address: int :param data: data to write :type data: bytes """ if len(data) % self.word_size != 0: print("Data length must be multiple of %d bits" % self.word_size * 8) exit(2) data_format = "{:0" + str(self.address_size) + "b}" for i in range(0, len(data), self.word_size): addr_bin = data_format.format(address + i // self.word_size) self.cs.value=1 # Start bit and Opcode self.sda = 1 self.clock() self.sda = 0 self.clock() self.sda = 1 self.clock() # Write address for j in addr_bin: self.sda = int(j, 2) self.clock() self.write(data[i:i + self.word_size]) self.cs.value=0 def erase(self, address): """ Erase: opcode 11 :param address: Address to erase :type data: int """ data_format = "{:0" + str(self.address_size) + "b}" addr_bin = data_format.format(address) self.cs.value=1 # Start bit and opcode self.sda = 1 self.clock() self.sda = 1 self.clocks(2) # Write address for i in addr_bin: self.sda = int(i, 2) self.clock() self.cs.value=0 if __name__ == '__main__': eeprom = EEPROM93xx('/dev/ttyACM0', 6, 16) print(eeprom.wires) print(eeprom.cs.value) eeprom.write_enable() eeprom.write_data(0,b"Hello world !!") eeprom.write_disable() print(eeprom.read_data(0,128)) ``` #### File: hydrafw/scripts/tx_bench.py ```python """ Before to launch this test start HydraBus with latest HydraFW and open a console then type debug test-rx Then close the console and launch following example benchmark. Examples tx_bench.py 64 1000 >> bench.txt tx_bench.py 128 500 >> bench.txt tx_bench.py 256 500 >> bench.txt tx_bench.py 500 500 >> bench.txt tx_bench.py 512 500 >> bench.txt tx_bench.py 1000 500 >> bench.txt tx_bench.py 1008 500 >> bench.txt tx_bench.py 1024 500 >> bench.txt tx_bench.py 1280 400 >> bench.txt tx_bench.py 2000 400 >> bench.txt tx_bench.py 2048 400 >> bench.txt tx_bench.py 3000 400 >> bench.txt """ import serial; import time; import sys; def main(): try: serialPort = serial.Serial('COM3',\ 115200, serial.EIGHTBITS,\ serial.PARITY_NONE, serial.STOPBITS_ONE); except: print("Couldn't open serial port"); exit(); size = int(sys.argv[1]); num_Packets = int(sys.argv[2]); txData=[]; for i in range(size): txData.append('a'); packet_Length = len(txData); text = txData; print('packet_Length: %d num_Packets: %d' % (packet_Length, num_Packets)); byteSent = 0; # First time slice t1 = time.time() for i in range(num_Packets): byteSent += serialPort.write(text); # Second time slice t2 = time.time() throughPut = (num_Packets * packet_Length)/(t2-t1); time_s = t2-t1; throughPut_kb = throughPut/1024; kbyteSent = byteSent/1024; print("TX Time: %.5f s " % time_s); print('TX Bytes/s: %d TX KBytes/s: %.2f' % (throughPut, throughPut_kb)); print; """ print; print('Bytes Sent: %d ' % byteSent); print('KBytes Sent: %.2f ' % kbyteSent); """ if __name__ == '__main__': main() #!/usr/bin/env python ```
{ "source": "0x822a5b87/test-bazel-cpp-tutorial", "score": 2 }
#### File: test-bazel-cpp-tutorial/stage6/deps.bzl ```python load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") def load_deps(): if "com_google_absl" not in native.existing_rules(): http_archive( name = "com_google_absl", sha256 = "84b4277a9b56f9a192952beca535313497826c6ff2e38b2cac7351a3ed2ae780", strip_prefix = "abseil-cpp-c476da141ca9cffc2137baf85872f0cae9ffa9ad", url = "https://github.com/abseil/abseil-cpp/archive/c476da141ca9cffc2137baf85872f0cae9ffa9ad.zip", ) if "zlib" not in native.existing_rules(): http_archive( name = "zlib", build_file_content = """ cc_library( name = "zlib", srcs = glob(["*.c", "*.h"]), includes = ["."], visibility = ["//visibility:public"], ) """, sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", strip_prefix = "zlib-1.2.11", urls = ["https://mirror.bazel.build/zlib.net/zlib-1.2.11.tar.gz"], ) ```
{ "source": "0x8b/HackerRank", "score": 3 }
#### File: HackerRank/aoc2015/12.py ```python import json import re content = input() number = re.compile(r"-?\d+") all_numbers = map(int, number.findall(content)) parsed = json.loads(content) def traverse(node): if isinstance(node, dict): for k, v in node.items(): if isinstance(v, dict) and "red" in v.values(): node[k] = 0 traverse(node[k]) elif isinstance(node, list): for i in range(len(node)): if isinstance(node[i], dict) and "red" in node[i].values(): node[i] = 0 traverse(node[i]) else: return traverse(parsed) assert sum(all_numbers) == 119433 content = json.dumps(parsed, indent=4, sort_keys=True) assert sum(map(int, number.findall(content))) == 68466 ``` #### File: HackerRank/aoc2017/07.py ```python import fileinput import re from collections import Counter, defaultdict from operator import itemgetter c = re.compile(r"(\w+)") values = {} children = defaultdict(list) lines = [line for line in fileinput.input()] head = Counter(re.compile(r"([a-z]+)").findall("\n".join(lines))).most_common()[-1][0] assert head == "cqmvs" def parse(line): name, weight, *ch = c.findall(line) values[name] = int(weight) children[name] = ch data = [parse(line) for line in lines] ans = None def t(name): global ans if children[name]: v, s = zip(*[(values[n], t(n)) for n in children[name]]) if len(set(s)) > 1: mc = list(map(itemgetter(0), Counter(s).most_common())) diff = mc[-1] - mc[0] if not ans: ans = v[s.index(mc[-1])] - diff return values[name] + sum(s) else: return values[name] t(head) assert ans == 2310 ``` #### File: HackerRank/codeforces/0204A-LittleElephantAndInterval.py ```python l, r = input().split() def count(s: str) -> int: if s[0] != s[-1]: while s[0] != s[-1]: s = str(int(s) - 1) length = len(s) cnt = 0 if length == 1: return int(s) if length == 2: return 9 + int(s[0]) # length > 2 cnt += 9 # 1 2 ... 9 cnt += 9 # 11 22 ... 99 cnt += 1 + int(s[1:-1]) # _x..._ cnt += (int(s[0]) - 1) * 10 ** (length - 2) cnt += sum(9 * 10 ** (n - 2) for n in range(3, length)) return cnt print((l[0] != l[-1]) * -1 + count(r) - count(l) + 1) ``` #### File: HackerRank/hackerrank/BetweenTwoSets.py ```python import os def getTotalX(a, b): c = 0 for i in range(max(a), min(b) + 1): if all([i % d == 0 for d in a]) and all([d % i == 0 for d in b]): c += 1 return c if __name__ == "__main__": f = open(os.environ["OUTPUT_PATH"], "w") nm = input().split() n = int(nm[0]) m = int(nm[1]) a = list(map(int, input().rstrip().split())) b = list(map(int, input().rstrip().split())) total = getTotalX(a, b) f.write(str(total) + "\n") f.close() ``` #### File: HackerRank/hackerrank/BreakingTheRecords.py ```python import os def breakingRecords(scores): h = 0 l = 0 min_score = scores[0] max_score = scores[0] for s in scores[1:]: if s > max_score: max_score = s h += 1 if s < min_score: min_score = s l += 1 return h, l if __name__ == "__main__": fptr = open(os.environ["OUTPUT_PATH"], "w") n = int(input()) scores = list(map(int, input().rstrip().split())) result = breakingRecords(scores) fptr.write(" ".join(map(str, result))) fptr.write("\n") fptr.close() ``` #### File: HackerRank/hackerrank/CompareTheTriples.py ```python import os def compareTriplets(a, b): e = 0 f = 0 for [c, d] in zip(a, b): if c - d > 0: e += 1 if c - d < 0: f += 1 return [e, f] if __name__ == "__main__": fptr = open(os.environ["OUTPUT_PATH"], "w") a = list(map(int, input().rstrip().split())) b = list(map(int, input().rstrip().split())) result = compareTriplets(a, b) fptr.write(" ".join(map(str, result))) fptr.write("\n") fptr.close() ``` #### File: HackerRank/hackerrank/FlippingBits.py ```python import os def flipping_bits(n): return ~n & 0xFFFFFFFF if __name__ == "__main__": fptr = open(os.environ["OUTPUT_PATH"], "w") q = int(input()) for q_itr in range(q): n = int(input()) result = flipping_bits(n) fptr.write(str(result) + "\n") fptr.close() ``` #### File: HackerRank/hackerrank/GradingStudents.py ```python import os import sys def grading_policy(g): if g >= 38 and (5 - g % 5) < 3: return g - (g % 5) + 5 return g def grading_students(grades): return map(lambda x: grading_policy(x), grades) if __name__ == "__main__": f = open(os.environ["OUTPUT_PATH"], "w") n = int(input()) grades = [] for _ in range(n): grades_item = int(input()) grades.append(grades_item) result = grading_students(grades) f.write("\n".join(map(str, result))) f.write("\n") f.close() ``` #### File: HackerRank/hackerrank/InsertionSortPart1.py ```python import os def insertionSort1(n, arr): el = arr[-1] for i in reversed(range(1, n)): if arr[i - 1] > el: arr[i] = arr[i - 1] print(" ".join(map(str, arr))) else: arr[i] = el print(" ".join(map(str, arr))) return arr[0] = el print(" ".join(map(str, arr))) if __name__ == "__main__": n = int(input()) arr = list(map(int, input().rstrip().split())) insertionSort1(n, arr) ``` #### File: HackerRank/hackerrank/TheTimeInWords.py ```python h = int(input()) m = int(input()) def word(n): return [ "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fiveteen", "sixteen", "seventeen", "eighteen", "nineteen", "twenty", ][n] def time_in_words(h, m): if m == 0: print(word(h), "o' clock") elif m == 15: print("quarter past", word(h)) elif m < 30: minutes = "" if m == 1: minutes = "one minute past" elif m > 20: minutes = "twenty " + word(m - 20) + " minutes past" else: # 2 - 20 minutes = word(m) + " minutes past" print(minutes, word(h)) elif m == 30: print("half past", word(h)) elif m == 45: print("quarter to", word(h + 1)) else: # 31 to 59 except 45 minutes = "" to = 60 - m if to > 20: minutes = "twenty " + word(to - 20) + " minutes to" else: minutes = word(to) + " minutes to" if m == 1: minutes = "one minute to" print(minutes, word(h + 1)) time_in_words(h, m) ``` #### File: HackerRank/hackerrank/TreeInorderTraversal.py ```python def inOrder(root): if root.left: inOrder(root.left) print(root, end=" ") if root.right: inOrder(root.right) ``` #### File: HackerRank/hackerrank/TreePostorderTraversal.py ```python def postOrder(root): if root.left: postOrder(root.left) if root.right: postOrder(root.right) print(root, end=" ") ```
{ "source": "0x8BitDev/MAPeD-SPReD", "score": 2 }
#### File: MAPeD-SPReD/scripts/MAPeD_project_stats.py ```python def dump_inst_entity( _ent ): print '\t\tuid: ' + str( _ent.uid ) print '\t\ttarget_uid: ' + str( _ent.target_uid ) print '\t\tbase_ent_uid: ' + str( _ent.base_ent_uid ) print '\t\tx: ' + str( _ent.x ) print '\t\ty: ' + str( _ent.y ) print '\t\tproperties: ' + _ent.properties + '\n' def dump_base_entity( _ent ): print '\t\tname: ' + _ent.name print '\t\tuid: ' + str( _ent.uid ) print '\t\twidth: ' + str( _ent.width ) print '\t\theight: ' + str( _ent.height ) print '\t\tpivot_x: ' + str( _ent.pivot_x ) print '\t\tpivot_y: ' + str( _ent.pivot_y ) print '\t\tproperties: ' + _ent.properties print '\t\tinst_properties: ' + _ent.inst_properties + '\n' # Get the API version api_ver = mpd_api_ver() print 'API ver: ' + str( api_ver >> 8 ) + '.' + str( api_ver & 0xff ) print '*** The active project stats: ***\n' num_banks = mpd_num_banks() print 'Number of graphics banks: ' + str( num_banks ) print 'The active bank is ' + str( mpd_get_active_bank() ) # Run through all graphics banks # Each graphics bank contains: CHR bank, blocks (256), tiles (256), palettes and screens for bank_n in xrange( num_banks ): print '\tBank: ' + str( bank_n ) for slot_n in xrange( mpd_get_palette_slots( bank_n ) ): print '\t\tPalette' + str( slot_n ) + ':' for plt_n in xrange( 4 ): print '\t\t\t' + str( plt_n ) + ': ' + str( mpd_get_palette( bank_n, plt_n, slot_n ) ) # CHRs/Blocks/Tiles count print '\n\t\tCHRs(1x1):\t' + str( mpd_get_CHRs_cnt( bank_n ) ) print '\t\tBlocks(2x2):\t' + str( mpd_get_blocks_cnt( bank_n ) ) print '\t\tTiles(4x4):\t' + str( mpd_get_tiles_cnt( bank_n ) ) # CHR bank # CHR data is stored as image 128 x 128 x Pages count (8bit) # Use mpd_get_CHRs_data( int _bank_n ) to get CHR's raw data # NES: Use mpd_export_CHR_data( int _bank_n, string _filename, bool _need_padding ) to export NES-ready data # SMS: Use mpd_export_CHR_data( int _bank_n, string _filename, int _bpp ) to export SMS-ready data # PCE/ZX: Use mpd_export_CHR_data( int _bank_n, string _filename ) to export platform-ready data CHR_data = mpd_get_CHRs_data( bank_n ) print '\n\t\tCHR data size: ' + str( CHR_data.Count ) + ' --> Array[Byte]' # + str( CHR_data ) # Dump 2x2 tiles (blocks) data # Each block's value (UInt32) has the following bits description: # NES: 15.. [ property_id ](4) [ palette ind ](2) [X](2) [ CHR ind ](8) ..0 # SMS: 15.. [ property_id ](4) [ hv_flip ](2) [ palette ind ](1) [CHR ind](9) ..0 # PCE: 19.. [ property_id ](4) [ palette ind ](4) [CHR ind](12) ..0 # ZX: 15.. [ property_id ](4) [ palette ind ](1) [CHR ind](11)..0 # Four UInt32 values form one 2x2 tile (block) blocks_data = mpd_get_blocks( bank_n ); print '\n\t\tBlocks: Array size: ' + str( blocks_data.Count ) + ' --> ' + str( blocks_data ) # Dump 4x4 tiles data # Each tile value (UInt32) consists of blocks indices ordered left to right, up to down from low to high byte tiles_data = mpd_get_tiles( bank_n ) print '\n\t\tTiles: Array size: ' + str( tiles_data.Count ) + ' --> ' + str( tiles_data ) # Dump screens data # Each screen data consist of byte array of 4x4 or 2x2 tiles ordered left to right, up to down screen_mode_str = '' if mpd_screen_mode(): screen_mode_str = 'Blocks2x2' else: screen_mode_str = 'Tiles4x4' n_screens = mpd_num_screens( bank_n ) print '\n\t\tNumber of screens: ' + str( n_screens ) + ' / ' + screen_mode_str for scr_n in xrange( n_screens ): scr_data = mpd_get_screen_data( bank_n, scr_n ) print '\t\tScreen' + str( scr_n ) + ': Array size: ' + str( scr_data.Count ) + ' --> ' + str( scr_data ) # Run through layouts num_layouts = mpd_num_layouts() print '\nNumber of layouts: ' + str( num_layouts ) for layout_n in xrange( num_layouts ): layout_width = mpd_layout_width( layout_n ) layout_height = mpd_layout_height( layout_n ) start_screen_cell = mpd_layout_start_screen_cell( layout_n ) print '\tLayout' + str( layout_n ) + ': width: ' + str( layout_width ) + ' / height: ' + str( layout_height ) + ' ; Start screen cell: ' + str( start_screen_cell ) + ' ; Start screen ind: ' + ( str( mpd_layout_screen_ind( layout_n, start_screen_cell % layout_width, start_screen_cell / layout_width ) ) if start_screen_cell >= 0 else str( start_screen_cell ) ) # Dump layout data for cell_y in xrange( layout_height ): for cell_x in xrange( layout_width ): num_screen_entities = mpd_layout_screen_num_entities( layout_n, cell_x, cell_y ); print '\t\tCell: ' + str( cell_x + cell_y * layout_width ) + ' -> Screen index: ' + str( mpd_layout_screen_ind( layout_n, cell_x, cell_y ) ) + ' ; Marks: ' + str( hex( mpd_layout_screen_marks( layout_n, cell_x, cell_y ) ) ) + ' ; Entities: ' + str( num_screen_entities ) # Dump entity instances for ent_n in xrange( num_screen_entities ): dump_inst_entity( mpd_layout_get_inst_entity( layout_n, cell_x, cell_y, ent_n ) ) # Dump base entities print '\n*** Base entities: ***' ent_group_names = mpd_group_names_of_entities() print 'Number of groups: ' + str( ent_group_names.Count ) for group_n in xrange( ent_group_names.Count ): grp_name = ent_group_names[ group_n ] grp_num_ents = mpd_group_num_entities( grp_name ) print '\t' + str( grp_name ) + '(' + str( grp_num_ents ) + '):' for ent_n in xrange( grp_num_ents ): dump_base_entity( mpd_group_get_entity_by_ind( grp_name, ent_n ) ) ```
{ "source": "0x90/dexsim", "score": 2 }
#### File: dexsim/dexsim/plugin.py ```python import hashlib import logging import os import re import tempfile from abc import abstractmethod from json import JSONEncoder from smaliemu.emulator import Emulator from timeout3 import timeout logger = logging.getLogger(__name__) class Plugin(object): """ 解密插件基类 """ name = 'Plugin' description = '' version = '' enabled = True # const/16 v2, 0x1a CONST_NUMBER = r'const(?:\/\d+) [vp]\d+, (-?0x[a-f\d]+)\s+' # ESCAPE_STRING = '''"(.*?)(?<!\\\\)"''' ESCAPE_STRING = '"(.*?)"' # const-string v3, "encode string" CONST_STRING = r'const-string [vp]\d+, ' + ESCAPE_STRING + '.*' # move-result-object v0 MOVE_RESULT_OBJECT = r'move-result-object ([vp]\d+)' # new-array v1, v1, [B NEW_BYTE_ARRAY = r'new-array [vp]\d+, [vp]\d+, \[B\s+' # new-array v1, v1, [B NEW_INT_ARRAY = r'new-array [vp]\d+, [vp]\d+, \[I\s+' # new-array v1, v1, [B NEW_CHAR_ARRAY = r'new-array [vp]\d+, [vp]\d+, \[C\s+' # fill-array-data v1, :array_4e FILL_ARRAY_DATA = r'fill-array-data [vp]\d+, :array_[\w\d]+\s+' ARRAY_DATA_PATTERN = r':array_[\w\d]+\s*.array-data[\w\W\s]+.end array-data' # [{'className':'', 'methodName':'', 'arguments':'', 'id':''}, ..., ] json_list = [] # [(mtd, old_content, new_content), ..., ] target_contexts = {} # data_arraies = {} # smali methods witch have been update smali_mtd_updated_set = set() def __init__(self, driver, smalidir): self.make_changes = False self.driver = driver self.smalidir = smalidir # self.smali_files = smali_files self.emu = Emulator() self.emu2 = Emulator() # def get_return_variable_name(self, line): # mro_statement = re.search(self.MOVE_RESULT_OBJECT, line).group() # return mro_statement[mro_statement.rindex(' ') + 1:] @timeout(1) def pre_process(self, snippet): """ 预处理 sget指令 """ # emu2 = Emulator() args = {} clz_sigs = set() field_desc_prog = re.compile(r'^.*, (.*?->.*)$') for line in snippet: if 'sget' not in line: continue field_desc = field_desc_prog.match(line).groups()[0] try: field = self.smalidir.get_field(field_desc) except TypeError as ex: logger.warning(ex) logger(field_desc) continue if field: value = field.get_value() if value: args.update({field_desc: value}) continue clz_sigs.add(field_desc.split('->')[0]) for clz_sig in clz_sigs: mtd = self.smalidir.get_method(clz_sig, '<clinit>()V') if mtd: body = mtd.get_body() self.emu2.call(re.split(r'\n\s*', body), thrown=False) self.emu2.call(re.split(r'\n\s*', body), thrown=False) args.update(self.emu2.vm.variables) for (key, value) in self.emu2.vm.variables.items(): if clz_sig in key: field = self.smalidir.get_field(key) field.set_value(value) # print(__name__, 'pre_process, emu2', sys.getsizeof(self.emu2)) return args @staticmethod def convert_args(typ8, value): """ 根据参数类型,把参数转换为适合Json保存的格式。 """ if value is None: return None if typ8 == 'I': if not isinstance(value, int): return None return 'I:' + str(value) if typ8 == 'B': if not isinstance(value, int): return None return 'B:' + str(value) if typ8 == 'S': if not isinstance(value, int): return None return 'S:' + str(value) if typ8 == 'C': # don't convert to char, avoid some unreadable chars. return 'C:' + str(value) if typ8 == 'Ljava/lang/String;': if not isinstance(value, str): return None import codecs item = codecs.getdecoder('unicode_escape')(value)[0] args = [] for i in item.encode("UTF-8"): args.append(i) return "java.lang.String:" + str(args) if typ8 == '[B': if not isinstance(value, list): return None byte_arr = [] for item in value: if item == '': item = 0 byte_arr.append(item) return '[B:' + str(byte_arr) if typ8 == '[C': if not isinstance(value, list): return None byte_arr = [] for item in value: if item == '': item = 0 byte_arr.append(item) return '[C:' + str(byte_arr) logger.warning('不支持该类型 %s %s', typ8, value) @timeout(3) def get_vm_variables(self, snippet, args, rnames): """ snippet : smali 代码 args :方法藏书 rnames :寄存器 获取当前vm的变量 """ self.emu2.call(snippet[-5:], args=args, thrown=False) # 注意: 寄存器的值,如果是跨方法的话,可能存在问题 —— 导致解密乱码 # A方法的寄存器v1,与B方法的寄存器v1,保存的内容不一定一样 # TODO 下一个方法,则进行清理 # 方法成员变量,可以考虑初始化到smalifile中 # 其他临时变量,则用smali执行 result = self.varify_argments(self.emu2.vm.variables, rnames) if result: return self.emu2.vm.variables self.emu2.call(snippet, args=args, thrown=False) result = self.varify_argments(self.emu2.vm.variables, rnames) if result: return self.emu2.vm.variables @staticmethod def varify_argments(variables, arguments): """ variables :vm存放的变量 arguments :smali方法的参数 验证smali方法的参数 """ for k in arguments: value = variables.get(k, None) if value is None: return False return True @staticmethod def get_json_item(cls_name, mtd_name, args): """ json item 为一个json格式的解密对象。 包含id、className、methodName、arguments。 模拟器/手机会通过解析这个对象进行解密。 """ item = {'className': cls_name, 'methodName': mtd_name, 'arguments': args} item['id'] = hashlib.sha256(JSONEncoder().encode(item).encode( 'utf-8')).hexdigest() return item def append_json_item(self, json_item, mtd, old_content, rtn_name): """ 往json list添加json解密对象 json list 存放了所有的json格式解密对象。 """ mid = json_item['id'] if rtn_name: new_content = 'const-string %s, ' % rtn_name + '%s' else: # TODO XX 也许有更好的方式 # const-string v0, "Dexsim" # const-string v1, "Decode String" # invoke-static {v0, v1}, Landroid/util/Log;->d( # Ljava/lang/String;Ljava/lang/String;)I new_content = ('const-string v0, "Dexsim"\n' 'const-string v1, %s\n' 'invoke-static {v0, v1}, Landroid/util/Log;->d' '(Ljava/lang/String;Ljava/lang/String;)I\n') if mid not in self.target_contexts: self.target_contexts[mid] = [(mtd, old_content, new_content)] else: self.target_contexts[mid].append((mtd, old_content, new_content)) if json_item not in self.json_list: self.json_list.append(json_item) @abstractmethod def run(self): """ 插件执行逻辑 插件必须实现该方法 """ pass def optimize(self): """ smali 通用优化代码 一般情况下,可以使用这个,插件也可以实现自己的优化方式。 """ if not self.json_list or not self.target_contexts: return jsons = JSONEncoder().encode(self.json_list) outputs = {} with tempfile.NamedTemporaryFile(mode='w+', delete=False) as tfile: tfile.write(jsons) outputs = self.driver.decode(tfile.name) os.unlink(tfile.name) if not outputs: return if isinstance(outputs, str): return for key, value in outputs.items(): if 'success' not in value: continue if key not in self.target_contexts: logger.warning('not found %s', key) continue if value[1] == 'null': continue # json_item, mtd, old_content, rtn_name for item in self.target_contexts[key]: old_body = item[0].get_body() old_content = item[1] new_content = item[2] % value[1] # It's not a string. if outputs[key][1] == 'null': continue item[0].set_body(old_body.replace(old_content, new_content)) item[0].set_modified(True) self.make_changes = True self.smali_files_update() def clear(self): """ 每次解密完毕后,都需要清理。 """ self.json_list.clear() self.target_contexts.clear() def smali_files_update(self): ''' write changes to smali files ''' if self.make_changes: for sf in self.smalidir: sf.update() ```
{ "source": "0x90E/news-diff_parser", "score": 2 }
#### File: news-diff_parser/src/newest_arrange.py ```python import json import os import glob from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from models import News, Base from datetime import date import shutil media_list = { 1: '蘋果', 2: '中時', 3: '中央社', 4: '東森', 5: '自由', 6: '新頭殼', 7: 'NowNews', 8: '聯合', 9: 'TVBS', 10: '中廣新聞網', 11: '公視新聞網', 12: '台視', 13: '華視', 14: '民視', # // 15 : '三立', 16: '風傳媒', } def inspect_josn_format(inspect_str): try: json.loads(inspect_str) except ValueError: return False return True def create_output_file(f, news_event): print('title: ' + f.name) f.write('--->\n') f.write(news_event.get_url() + '\n\n') f.write(news_event.get_title() + '\n') f.write(news_event.get_content()) def get_news_info(news_file, session, is_diff_file): title = '' content = '' is_exists_id = False already_get_id = False for line in news_file: if inspect_josn_format(line) and line.startswith('{'): already_get_id = True is_exists_id = False title = '' content = '' meta = json.loads(line) id = meta['id'] create_time = date.fromtimestamp(int(meta['created_at'])).strftime("%Y-%m-%d") try_get = session.query(News).filter_by(news_id=id).first() if try_get and is_diff_file: is_exists_id = True # if not try_get.inspect_expiration_date(create_time): session.query(News).filter_by(news_id=id).first().update_changed_count() elif (not try_get) and (not is_diff_file): url = meta['url'] source_media = media_list[int(meta['source'])] else: already_get_id = False print('Error item!') else: if already_get_id == False: continue if is_exists_id == True: if content == '' and title != '': content = line already_get_id = False if not(("404" in content) or ("404" in title)): session.query(News).filter_by(news_id=id).update({'title': title, 'content': content}) if title == '': title = line else: if content == '' and title != '': content = line already_get_id = False news_add = News(news_id=id, url=url, title=title, source_media=source_media, content=content, create_time=create_time) session.add(news_add) session.commit if title == '': title = line return session def create_diff_map(source_dir): print('source_dir: ' + source_dir) if os.path.exists('db.sqlite'): os.remove('db.sqlite') engine = create_engine("sqlite:///db.sqlite", echo=False) Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) session = Session() for file_name in glob.glob(source_dir + '/*'): if 'diff' in file_name: continue with open(file_name, 'r') as f: print('normal: ' + file_name) session = get_news_info(f, session, False) for file_name in glob.glob(source_dir + '/*diff*'): print('diff: ' + file_name) with open(file_name, 'r') as f: session = get_news_info(f, session, True) all_of_news = session.query(News).all() output_dir = source_dir + '_output' if os.path.exists(output_dir): shutil.rmtree(output_dir) os.makedirs(output_dir) for news_event in all_of_news: print('title: ' + news_event.get_news_file_name()) with open(output_dir + '/' + news_event.get_news_file_name(), 'w') as f: create_output_file(f, news_event) def arrange(source_dir, dest_dir): if not os.path.exists(dest_dir): os.mkdir(dest_dir) create_diff_map(source_dir) ''' for news_file in source_dir: with open(news_file, 'r') as news_f: ''' # arrange('test_input', 'tmp') arrange('extract', 'tmp') ```
{ "source": "0x90E/PyWDI", "score": 2 }
#### File: apps/auth/__init__.py ```python from flask import Blueprint from flask_restful import Api from .route import init_route def init_app(app): api_bp = Blueprint('api_auth', __name__, url_prefix="/api/auth") api = Api(api_bp) init_route(api) app.register_blueprint(api_bp) ``` #### File: PyWDI/apps/database.py ```python from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base from config import config Base = declarative_base() class DatabaseManager: def __init__(self): self.db_session = None def init_db(self): db_engine_prefix = config.DB_ENGINE_PREFIX db_user = config.DB_USER db_password = <PASSWORD> db_host = config.DB_HOST db_port = config.DB_PORT db_name = config.DB_NAME db_engine = '{}://{}:{}@{}:{}/{}'.format(db_engine_prefix, db_user, db_password, db_host, db_port, db_name) engine = create_engine(db_engine, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.query = self.db_session.query_property() import apps.user.models Base.metadata.create_all(bind=engine) def get_db_session(self): return self.db_session database_manager = DatabaseManager() ``` #### File: PyWDI/apps/PyWDI.py ```python from flask import Flask, request from apps import user, auth from apps.database import database_manager def create_app(): app = Flask(__name__) try: from config import config except ModuleNotFoundError: print("Please copy config.py from config_example.py") exit(1) app.config.from_object(config) user.init_app(app) auth.init_app(app) return app def create_db(): database_manager.init_db() if __name__ == '__main__': app = create_app() db = create_db() app.run(host=app.config['HOST'], port=app.config['PORT'], debug=app.config['DEBUG']) ``` #### File: 0x90E/PyWDI/config_example.py ```python class ConfigBase: DEBUG = True HOST = '127.0.0.1' PORT = 5000 DB_ENGINE_PREFIX = 'mysql+pymysql' DB_USER = '' DB_PASSWORD = '' DB_HOST = '127.0.0.1' DB_PORT = 3306 DB_NAME = 'pywdi' JWT_SECRET_KEY = '' JWT_TOKEN_EXPIRED = 60 class Produection(ConfigBase): def __init__(self): DEBUG = False config = ConfigBase() ```
{ "source": "0x9900/aprstar", "score": 2 }
#### File: 0x9900/aprstar/aprstar.py ```python import json import logging import os import platform import sys import time from configparser import ConfigParser from io import StringIO import aprslib try: # Python 3 from urllib.request import urlopen except ImportError: # Python 2 from urllib import urlopen from aprslib.exceptions import ConnectionError CONFIG_FILE = "/etc/aprstar.conf" CONFIG_DEFAULT = u""" [APRS] call: N0CALL-1 latitude: 0 longitude: 0 sleep: 600 symbol: n symbol_table: / """ THERMAL_FILE = "/sys/class/thermal/thermal_zone0/temp" LOADAVG_FILE = "/proc/loadavg" DEFAULT_PORT = 14580 logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S', level=logging.INFO) class Config(object): def __init__(self): parser = ConfigParser() parser.read_file(StringIO(CONFIG_DEFAULT)) self._passcode = "" self._call = "NOCALL-1" self._longitude = 0.0 self._latitude = 0.0 self._sleep = 900 self._symbol = "n" self._symbol_table = "/" if not os.path.exists(CONFIG_FILE): logging.info('Using default config') else: try: logging.info('Reading config file') with open(CONFIG_FILE, 'r') as fdc: parser.readfp(fdc) logging.info('Config file %s read', CONFIG_FILE) except (IOError, SystemError): raise SystemError('No [APRS] section configured') self.call = parser.get('APRS', 'call') self.sleep = parser.get('APRS', 'sleep') self.symbol_table = parser.get('APRS', 'symbol_table') self.symbol = parser.get('APRS', 'symbol') lat, lon = [float(parser.get('APRS', c)) for c in ('latitude', 'longitude')] if not lat or not lon: self.latitude, self.longitude = get_coordinates() else: self.latitude, self.longitude = lat, lon if parser.has_option('APRS', 'passcode'): self.passcode = parser.get('APRS', 'passcode') else: logging.warning('Generating passcode') self.passcode = aprslib.passcode(self.call) def __repr__(self): return ("<Config> call: {0.call}, passcode: {0.passcode} - " "{0.latitude}/{0.longitude}").format(self) @property def call(self): return self._call @call.setter def call(self, val): self._call = str(val) @property def sleep(self): return self._sleep @sleep.setter def sleep(self, val): try: self._sleep = int(val) except ValueError: logging.warning('Sleep value error using 600') self._sleep = 600 @property def latitude(self): return self._latitude @latitude.setter def latitude(self, val): self._latitude = val @property def longitude(self): return self._longitude @longitude.setter def longitude(self, val): self._longitude = val @property def passcode(self): return self._passcode @passcode.setter def passcode(self, val): self._passcode = str(val) @property def symbol(self): return self._symbol @symbol.setter def symbol(self, val): self._symbol = str(val) @property def symbol_table(self): return self._symbol_table @symbol_table.setter def symbol_table(self, val): self._symbol_table = str(val) class Sequence(object): """Generate an APRS sequence number.""" def __init__(self): self.sequence_file = '/tmp/aprstar.sequence' try: with open(self.sequence_file) as fds: self._count = int(fds.readline()) except (IOError, ValueError): self._count = 0 def flush(self): try: with open(self.sequence_file, 'w') as fds: fds.write("{0:d}".format(self._count)) except IOError: pass def __iter__(self): return self def next(self): return self.__next__() def __next__(self): self._count = (1 + self._count) % 999 self.flush() return self._count def get_coordinates(): logging.warning('Trying to figure out the coordinate using your IP address') url = "http://ip-api.com/json/" try: response = urlopen(url) _data = response.read() data = json.loads(_data.decode()) except IOError as err: logging.error(err) return (0, 0) else: logging.warning('Position: %f, %f', data['lat'], data['lon']) return data['lat'], data['lon'] def get_load(): try: with open(LOADAVG_FILE) as lfd: loadstr = lfd.readline() except IOError: return 0 try: load15 = float(loadstr.split()[1]) except ValueError: return 0 return int(load15 * 1000) def get_freemem(): proc_file = '/proc/meminfo' try: with open(proc_file) as pfd: for line in pfd: if 'MemFree' in line: freemem = int(line.split()[1]) except (IOError, ValueError): return 0 return int(freemem / 1024) def get_temp(): try: with open(THERMAL_FILE) as tfd: _tmp = tfd.readline() temperature = int(_tmp.strip()) except (IOError, ValueError): temperature = 20000 return temperature def send_position(ais, config): packet = aprslib.packets.PositionReport() packet.fromcall = config.call packet.tocall = 'APRS' packet.symbol = config.symbol packet.symbol_table = config.symbol_table packet.timestamp = time.time() packet.latitude = config.latitude packet.longitude = config.longitude packet.comment = "{} - https://github.com/0x9900/aprstar".format(platform.node()) logging.info(str(packet)) try: ais.sendall(packet) except ConnectionError as err: logging.warning(err) def send_header(ais, config): send_position(ais, config) try: ais.sendall("{0}>APRS::{0:9s}:PARM.Temp,Load,FreeMem".format(config.call)) ais.sendall("{0}>APRS::{0:9s}:EQNS.0,0.001,0,0,0.001,0,0,1,0".format(config.call)) except ConnectionError as err: logging.warning(err) def ais_connect(config): ais = aprslib.IS(config.call, passwd=config.passcode, port=DEFAULT_PORT) for retry in range(5): try: ais.connect() except ConnectionError as err: logging.warning(err) time.sleep(10) else: return ais logging.error('Connection error exiting') sys.exit(os.EX_NOHOST) def main(): config = Config() ais = ais_connect(config) send_header(ais, config) for sequence in Sequence(): if sequence % 10 == 1: send_header(ais, config) temp = get_temp() load = get_load() freemem = get_freemem() data = "{}>APRS:T#{:03d},{:d},{:d},{:d},0,0,00000000".format( config.call, sequence, temp, load, freemem) ais.sendall(data) logging.info(data) time.sleep(config.sleep) if __name__ == "__main__": try: main() except KeyboardInterrupt: sys.exit() ```
{ "source": "0x9900/blkin", "score": 2 }
#### File: 0x9900/blkin/belkin.py ```python import ds18x20 import gc import network import onewire import os import time import uasyncio as asyncio import ujson import uselect as select import usocket as socket import ustruct as struct from machine import Pin from machine import RTC from machine import WDT from machine import reset import logging import wificonfig as wc logging.basicConfig(level=logging.INFO) LOG = logging.getLogger("Belkin") HTML_PATH = b'/html' HTML_ERROR = """<!DOCTYPE html><html><head><title>404 Not Found</title> <body><h1>{} {}</h1></body></html> """ HTTPCodes = { 200: ('OK', 'OK'), 303: ('Moved', 'Moved'), 307: ('Temporary Redirect', 'Moved temporarily'), 400: ('Bad Request', 'Bad request'), 404: ('Not Found', 'File not found'), 500: ('Internal Server Error', 'Server erro'), } MIME_TYPES = { b'css': 'text/css', b'html': 'text/html', b'js': 'application/javascript', b'json': 'application/json', b'txt': 'text/plain', b'png': 'image/png', } MAX_TEMP = 40.0 # (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60 NTP_DELTA = 3155673600 def get_ntp_time(host): NTP_QUERY = bytearray(48) NTP_QUERY[0] = 0x1b addr = socket.getaddrinfo(host, 123)[0][-1] s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.settimeout(2) try: res = s.sendto(NTP_QUERY, addr) msg = s.recv(res) finally: s.close() val = struct.unpack("!I", msg[40:44])[0] return val - NTP_DELTA # There's currently no timezone support in MicroPython, so # time.localtime() will return UTC time (as if it was .gmtime()) # add timezone org, default -7 for California def settime(timezone=-7, server='us.pool.ntp.org'): try: time_ntp = get_ntp_time(server) except OSError: LOG.warning("Error fetching NTP time") return time_ntp = time_ntp + (timezone * 60 * 60) tm = time.localtime(time_ntp) RTC().datetime((tm[0], tm[1], tm[2], tm[6] + 1, tm[3], tm[4], tm[5], 0)) def parse_headers(head_lines): headers = {} for line in head_lines: if line.startswith(b'GET') or line.startswith(b'POST'): method, uri, proto = line.split() headers[b'Method'] = method headers[b'URI'] = uri headers[b'Protocol'] = proto else: try: key, val = line.split(b":", 1) headers[key] = val except: LOG.warning('header line warning: %s', line) return headers class Relay: def __init__(self, *args, **kwargs): self.pin = Pin(*args, **kwargs) self.forced = False # False for Automatic, True for forced def value(self, val=None): if val is None: return self.pin.value() return self.pin.value(val) def on(self): if self.pin.value() == 1: return self.pin.value(1) def off(self): if self.pin.value() == 0: return return self.pin.value(0) class DS1820: def __init__(self, *args, **kwargs): self.pin = Pin(*args, **kwargs) self.sensor = ds18x20.DS18X20(onewire.OneWire(self.pin)) self.temp = 0 self.lastread = 0 self.rom = None roms = self.sensor.scan() if not roms: LOG.error('DS1820 sensor not found') return self.rom = roms[0] LOG.info('Found DS devices: %s', self.rom) async def read(self): if not self.rom: LOG.warning('DS1820 sensor not found') return 0.0 LOG.debug('Read DS1820 temp') self.sensor.convert_temp() await asyncio.sleep_ms(750) self.temp = self.sensor.read_temp(self.rom) return self.temp class Server: def __init__(self, switch, temp, addr='0.0.0.0', port=80): self.addr = addr self.port = port self.open_socks = [] self.switch = switch self.temp = temp self._files = [bytes('/' + f, 'utf-8') for f in os.listdir('html')] async def run(self, loop): addr = socket.getaddrinfo(self.addr, self.port, 0, socket.SOCK_STREAM)[0][-1] s_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s_sock.bind(addr) s_sock.listen(5) self.open_socks.append(s_sock) LOG.info('Awaiting connection on %s:%d', self.addr, self.port) poller = select.poll() poller.register(s_sock, select.POLLIN) while True: if poller.poll(1): # 1ms c_sock, addr = s_sock.accept() # get client socket LOG.debug('Connection from %s:%d', *addr) loop.create_task(self.process_request(c_sock)) gc.collect() await asyncio.sleep_ms(100) async def process_request(self, sock): LOG.debug('Process request %s', sock) self.open_socks.append(sock) sreader = asyncio.StreamReader(sock) swriter = asyncio.StreamWriter(sock, '') try: head_lines = [] while True: line = await sreader.readline() line = line.rstrip() if line in (b'', b'\r\n'): break head_lines.append(line) headers = parse_headers(head_lines) uri = headers.get(b'URI') if not uri: LOG.debug('Empty request') raise OSError LOG.debug('Request %s %s', headers[b'Method'].decode(), uri.decode()) if uri == b'/api/v1/status': data = await self.get_state() await self.send_json(swriter, data) elif uri == b'/api/v1/on': self.switch.on() self.switch.forced = True data = await self.get_state() await self.send_json(swriter, data) elif uri == b'/api/v1/off': self.switch.off() self.switch.forced = True data = await self.get_state() await self.send_json(swriter, data) elif uri == b'/api/v1/auto': self.switch.forced = False data = await self.get_state() await self.send_json(swriter, data) elif uri.startswith('/api/v1/reboot'): await self.reboot(swriter) elif uri == b'/': await self.send_file(swriter, b'/index.html') elif uri in self._files: await self.send_file(swriter, uri) else: await self.send_error(swriter, 404) except OSError: pass LOG.debug("%r", self.switch) gc.collect() LOG.debug('Disconnecting %s / %d', sock, len(self.open_socks)) sock.close() self.open_socks.remove(sock) async def get_state(self): data = {} data['time'] = "{:02d}:{:02d}".format(*time.localtime()[3:5]) data['forced'] = self.switch.forced; data['switch'] = self.switch.value() data['temp'] = await self.temp.read() return data async def send_json(self, wfd, data): LOG.debug('send_json') jdata = ujson.dumps(data) await wfd.awrite(self._headers(200, b'json', content_len=len(jdata))) await wfd.awrite(jdata) gc.collect() async def send_file(self, wfd, url): fpath = b'/'.join([HTML_PATH, url.lstrip(b'/')]) mime_type = fpath.split(b'.')[-1] LOG.debug('send_file: %s mime_type: %s', url, mime_type) try: with open(fpath, 'rb') as fd: await wfd.awrite(self._headers(200, mime_type, cache=-1)) for line in fd: await wfd.awrite(line) except OSError as err: LOG.debug('send file error: %s %s', err, url) await self.send_error(wfd, 404) gc.collect() async def send_error(self, wfd, err_c): if err_c not in HTTPCodes: err_c = 400 errors = HTTPCodes[err_c] await wfd.awrite(self._headers(err_c) + HTML_ERROR.format(err_c, errors[1])) gc.collect() async def send_redirect(self, wfd, location='/'): page = HTML_ERROR.format(303, 'redirect') await wfd.awrite(self._headers(303, location=location, content_len=len(page))) await wfd.awrite(HTML_ERROR.format(303, 'redirect')) gc.collect() def close(self): LOG.debug('Closing %d sockets', len(self.open_socks)) for sock in self.open_socks: sock.close() async def reboot(self, wfd): jdata = ujson.dumps({"status": "reboot"}) await wfd.awrite(self._headers(200, b'json', content_len=len(jdata))) await wfd.awrite(jdata) await asyncio.sleep_ms(500) reset() @staticmethod def _headers(code, mime_type=None, location=None, content_len=0, cache=None): try: labels = HTTPCodes[code] except KeyError: raise KeyError('HTTP code (%d) not found', code) headers = [] headers.append(b'HTTP/1.1 {:d} {}'.format(code, labels[0])) headers.append(b'Content-Type: {}'.format(MIME_TYPES.get(mime_type, 'text/html'))) if location: headers.append(b'Location: {}'.format(location)) if content_len: headers.append(b'Content-Length: {:d}'.format(content_len)) if cache and cache == -1: headers.append(b'Cache-Control: public, max-age=604800, immutable') elif cache and isinstance(cache, str): headers.append(b'Cache-Control: '.format(cache)) headers.append(b'Connection: close') return b'\n'.join(headers) + b'\n\n' def wifi_connect(ssid, password): ap_if = network.WLAN(network.AP_IF) ap_if.active(False) sta_if = network.WLAN(network.STA_IF) if not sta_if.isconnected(): sta_if.active(True) sta_if.connect(ssid, password) for cnd in range(10): LOG.info('Connecting to WiFi...') if sta_if.isconnected(): break time.sleep(3) if not sta_if.isconnected(): LOG.error('Wifi connection error sleeping for 30 second before reboot') time.sleep(30) reset() LOG.info('Network config: %s', sta_if.ifconfig()) gc.collect() return sta_if async def automation(tm_on, switch, temp): # If the current hour/min is in the tm_on set the realy # is closed else the relay is open. while True: await asyncio.sleep_ms(10061) if switch.forced: continue t = time.localtime() hour, min = t[3:5] key = int("{:d}{:02d}".format(hour, min)) if key in tm_on and await temp.read() < MAX_TEMP: switch.on() else: switch.off() async def update_rtc(tzone): # Re-sync the micro-controller internal RTC with NTP every hours. settime(timezone=tzone) while True: await asyncio.sleep(1823) settime(timezone=tzone) gc.collect() async def monitor(switch, temp): while True: if switch.forced and switch.value() == 1 and await temp.read() > MAX_TEMP: switch.forced = False switch.off() await asyncio.sleep_ms(10061) # 10061 – super-prime, happy prime (10sec) async def heartbeat(): feed_time = 1500 wdt = WDT() while True: wdt.feed() await asyncio.sleep_ms(feed_time) def parse_dat(filename): global MAX_TEMP sched_data = { 'tz': -7, 'tm_on': [] } def _int(value, default): try: return int(value) except ValueError as err: pass return default try: line_no = 0 with open(filename) as fd: for line in fd: line_no += 1 line = line.rstrip() if not line or line.startswith('#'): continue if line.startswith('@maxtemp'): _, value = line.split() MAX_TEMP = _int(value, MAX_TEMP) LOG.info('Max temp: %d', MAX_TEMP) elif line.startswith('@timezone'): _, value = line.split() sched_data['tz'] = _int(value, -7) LOG.info('Time zone: %d', sched_data['tz']) else: value = _int(line, -1) if value < 0: LOG.info('Value error: %s line: %d', line, line_no) else: sched_data['tm_on'].append(value) except Exception as err: LOG.info('No scheduling "time.dat" file read error %s', err) time.sleep(300) LOG.info(sched_data['tm_on']) return sched_data def main(): wifi = wifi_connect(wc.SSID, wc.PASSWORD) try: import update update.timesfile() except: print("Update error") switch = Relay(2, Pin.OUT, value=1) ds1820 = DS1820(0, Pin.IN, Pin.PULL_UP) sched_data = parse_dat('times.dat') LOG.info('Last chance to press [^C]') time.sleep(7) LOG.info('Start server') server = Server(switch, ds1820) loop = asyncio.get_event_loop() loop.create_task(update_rtc(sched_data['tz'])) loop.create_task(heartbeat()) loop.create_task(automation(sched_data['tm_on'], switch, ds1820)) loop.create_task(server.run(loop)) loop.create_task(monitor(switch, ds1820)) try: loop.run_forever() except KeyboardInterrupt: LOG.info('Closing all connections') if __name__ == "__main__": main() ```
{ "source": "0x9900/esp-sensor", "score": 3 }
#### File: 0x9900/esp-sensor/sensor.py ```python import bme280 class Sensor: def __init__(self, i2c): self.sensor = bme280.BME280(i2c=i2c) self.sensor.set_measurement_settings({ 'filter': bme280.BME280_FILTER_COEFF_16, 'standby_time': bme280.BME280_STANDBY_TIME_500_US, 'osr_h': bme280.BME280_OVERSAMPLING_1X, 'osr_p': bme280.BME280_OVERSAMPLING_16X, 'osr_t': bme280.BME280_OVERSAMPLING_2X}) self.sensor.set_power_mode(bme280.BME280_NORMAL_MODE) def get(self): data = self.sensor.get_measurement() data['temp'] = data.pop('temperature') return data def to_json(self): data = bytes(json.dumps(self.get), 'utf-8') gc.collect() return data ```
{ "source": "0x9900/gitbanner", "score": 3 }
#### File: 0x9900/gitbanner/gitbanner.py ```python import os import shlex import subprocess import sys import tempfile import yaml from datetime import datetime BANNER_INFO_FILE = 'banner.yaml' DATAFILE = 'commits.dat' SUNDAY = 6 HEADER = """# # This file will be changed just to have something to commit. # Last update: %s # """ def is_pushday(filename, today=datetime.now().date()): try: with open('banner.yaml') as fdin: banner = yaml.load(fdin) except (IOError, yaml.YAMLError) as err: raise StandardError(err) # Check if all the fields are present. for field in ('start_date', 'message'): if field not in banner: raise StandardError('The field "%s" is missing' % field) try: start_date = datetime.strptime(banner['start_date'], '%m/%d/%Y') start_date = start_date.date() if start_date.weekday() != SUNDAY: raise ValueError('The date must start on a sunday') except ValueError as err: raise StandardError(err) message = banner['message'].splitlines() # Check for the number of lines and the length of all the lines. if len(message) != 7: raise StandardError('Only 7 lines are allowed (there is 7 days/week)') if not all([len(message[0]) == len(l) for l in message[1:]]): raise StandardError('All the lines must have the same length') # Is today a push day? if today < start_date: raise StandardError('Today is before start date') number_of_days = len(message) * len(message[0]) day = (today - start_date).days try: push = True if message[day % 7][day / len(message)] == '#' else False except IndexError: raise StandardError('The banner is fully printed') return (push, day, number_of_days) def main(): try: push, day, days_left = is_pushday(BANNER_INFO_FILE) except StandardError as err: print err sys.exit(os.EX_OSERR) msg = "Push: %s, Day: %s, Days left: %s" % (push, day, days_left - day) try: with tempfile.TemporaryFile() as fdtmp: fdtmp.write(HEADER % datetime.now()) fdtmp.write(msg) fdtmp.write('\n') # copy the content of the old DATAFILE into the tmpfile. with open(DATAFILE, 'r') as fdin: for line in fdin: if line.startswith('#'): continue fdtmp.write(line) # copy the content of the tmpfile into the new datafile. fdtmp.seek(0) with open(DATAFILE, 'w') as fdout: for line in fdtmp: fdout.write(line) except IOError as err: print err else: # send everything to github.com if it's a 'commit' day. if push: command = lambda cmd: subprocess.check_call(shlex.split(cmd)) command('git commit -am "%s"' % msg ) command('git push origin') if __name__ == '__main__': main() ```
{ "source": "0x9900/whitelist", "score": 3 }
#### File: 0x9900/whitelist/whitelist.py ```python from __future__ import print_function import DNS import re import sys from collections import defaultdict USAGE = """ whitelist.py [-d domain.tld | -f domain_list.txt] """ RE_PARSE = re.compile(r'(ip4|ip6|include|redirect)[:=](.*)', re.IGNORECASE) MAX_RECURSION = 5 def dns_txt(domain): try: resp = DNS.dnslookup(domain, 'TXT') except (DNS.ServerError, DNS.Base.TimeoutError) as err: print("{}: {}".format(domain, err.message), file=sys.stderr) return None response = [] for r in resp: response.append(''.join(r)) return response def dns_parse(txt_field): resp = defaultdict(set) for rec in txt_field: fields = rec.split() for field in fields: match = RE_PARSE.match(field) if match: resp[match.group(1)].add(match.group(2)) return resp def process(domain): domains = [domain] ip_addresses = set() for cnt in range(MAX_RECURSION): includes = set() for dom in domains: txt = dns_txt(dom) if not txt: continue spf = dns_parse(txt) ip_addresses |= spf.get('ip4', set()) ip_addresses |= spf.get('ip6', set()) includes |= spf.get('include', set()) includes |= spf.get('redirect', set()) if not includes: break domains = includes return ip_addresses if __name__ == '__main__': whitelist = set() if len(sys.argv) != 3 or sys.argv[1] not in ('-f', '-d'): print('Wrong arguments', file=sys.stderr) print(USAGE, file=sys.stderr) sys.exit(1) if sys.argv[1] == '-d': for ip in process(sys.argv[2]): whitelist.add(ip) elif sys.argv[1] == '-f': with open(sys.argv[2]) as fd: for line in fd: line = line.strip() for ip in process(line): whitelist.add(ip) for ip in sorted(whitelist): print(ip) ```
{ "source": "0x9900/wspr", "score": 2 }
#### File: 0x9900/wspr/leaf.py ```python import argparse import collections import logging import math import os import sys from datetime import datetime, timedelta import json import matplotlib.dates as mdates import matplotlib.pyplot as plt import numpy as np import requests from scipy.interpolate import make_interp_spline try: from mpl_toolkits.basemap import Basemap except ImportError: Basemap = None logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S', level=logging.INFO) DXPLORER_URL = "http://dxplorer.net/wspr/tx/spots.json" DEFAULT_BAND = "20m" BANDS = collections.OrderedDict(( ("160m", 1), ("80m", 3), ("60m", 5), ("40m", 7), ("30m", 10), ("20m", 14), ("17m", 18), ("15m", 21), ("12m", 24), ("10m", 28), ("6m", 50), ("4m", 70), ("2m", 144), ("70cm", 432), ("23cm", 1296), )) class Config: """Store Configuration and global variables""" # pylint: disable=too-few-public-methods target = '/tmp' granularity = 8 percentile = 90 fig_size = (14, 6) count = 10000 timespan = 24 callsign = os.getenv("CALLSIGN", '').upper() key = os.getenv("KEY") band = 14 file = None class WsprData: """Structure storing WSPR data""" # pylint: disable=too-few-public-methods __slot__ = ["distance", "tx_call", "timestamp", "drift", "tx_grid", "rx_call", "power_dbm", "rx_grid", "azimuth", "snr", "freq", "rx_lat", "rx_long", "tx_lat", "tx_long"] def __init__(self, *_, **kwargs): for key, val, in kwargs.items(): setattr(self, key, val) if key == 'tx_grid': lat, lon = grid2latlon(val) setattr(self, 'tx_lat', lat) setattr(self, 'tx_lon', lon) elif key == 'rx_grid': lat, lon = grid2latlon(val) setattr(self, 'rx_lon', lon) setattr(self, 'rx_lat', lat) def __repr__(self): pattern = "WsprData: {0.tx_call} / {0.rx_call}, distance: {0.distance}, snr: {0.snr}" return pattern.format(self) def grid2latlon(maiden): """ Transform a maidenhead grid locator to latitude & longitude """ assert isinstance(maiden, str), "Maidenhead locator has to be a string" maiden = maiden.strip().upper() maiden_lg = len(maiden) assert len(maiden) in [2, 4, 6, 8], 'Locator length error: 2, 4, 6 or 8 characters accepted' char_a = ord("A") lon = -180.0 lat = -90.0 lon += (ord(maiden[0]) - char_a) * 20 lat += (ord(maiden[1]) - char_a) * 10 if maiden_lg >= 4: lon += int(maiden[2]) * 2 lat += int(maiden[3]) * 1 if maiden_lg >= 6: lon += (ord(maiden[4]) - char_a) * 5.0 / 60 lat += (ord(maiden[5]) - char_a) * 2.5 / 60 if maiden_lg >= 8: lon += int(maiden[6]) * 5.0 / 600 lat += int(maiden[7]) * 2.5 / 600 return lat, lon def readfile(): """Read WSPR data file""" try: with open(Config.file, 'rb') as fdi: data = json.load(fdi) except (ValueError, IOError) as err: logging.error(err) sys.exit(os.EX_OSFILE) return [WsprData(**d) for d in data] def download(): """Download WSPR data from the dxplorer website""" params = dict(callsign=Config.callsign, band=BANDS[Config.band], key=Config.key, count=Config.count, timelimit="24H") try: resp = requests.get(url=DXPLORER_URL, params=params) data = resp.json() except Exception as err: logging.error(err) raise if not data: logging.error('Empty data') sys.exit(os.EX_OSFILE) if 'Error' in data: logging.error(data['Error']) sys.exit(os.EX_OSFILE) logging.info('Downloaded %d records', len(data)) return [WsprData(**d) for d in data] def reject_outliers(data, magnitude=1.8): """Reject the statistical outliers from a list""" q25, q75 = np.percentile(data, [25, 75]) iqr = q75 - q25 qmin = q25 - (iqr * magnitude) qmax = q75 + (iqr * magnitude) return [x for x in data if qmin <= x <= qmax] def azimuth(wspr_data): """Display the contacts azimut / distance.""" filename = os.path.join(Config.target, 'azimuth.png') logging.info('Drawing azimuth to %s', filename) data = [] for node in wspr_data: data.append((math.radians(int(node.azimuth/Config.granularity) * Config.granularity), (node.distance / 50) * 50)) dist_count = collections.defaultdict(int) for elem in data: dist_count[elem] += 1 theta = [] distance = [] density = [] for key, cnt in dist_count.items(): theta.append(key[0]) distance.append(key[1]) density.append(cnt * 3) fig = plt.figure(figsize=(8, 8)) fig.text(.01, .02, ('http://github.com/0x9900/wspr - Distance & direction - ' 'Time span: %sH - Band: %s') % (Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') ax_ = fig.add_subplot(111, projection="polar") ax_.set_theta_zero_location("N") ax_.set_theta_direction(-1) ax_.scatter(theta, distance, s=density, c=theta, cmap='PiYG', alpha=0.8) plt.savefig(filename) plt.close() def skip_plot(wspr_data): """Show the skip zones""" filename = os.path.join(Config.target, 'skipplot.png') logging.info('Drawing skip_plot to %s', filename) data = np.array([d.distance for d in wspr_data]) fig, ax_ = plt.subplots(figsize=Config.fig_size) fig.text(.01, .02, ('http://github.com/0x9900/wspr - Skip zones - Time span: ' '%sH - Band: %s') % (Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') ax_.set_xlabel('Distances in Km') ax_.set_ylabel('Contacts') ax_.hist(data, bins="auto", alpha=0.9, rwidth=0.95) plt.savefig(filename) plt.close() def dist_plot(wspr_data): """Show the maximum distances""" filename = os.path.join(Config.target, 'distplot.png') logging.info('Drawing dist_plot to %s', filename) collection = collections.defaultdict(list) for data in wspr_data: date_hour = datetime.fromtimestamp(data.timestamp).replace(second=0, microsecond=0) date_hour += timedelta(minutes=5) date_hour -= timedelta(minutes=date_hour.minute % 10) collection[date_hour.timestamp()].append(data.distance) collection = {k: np.percentile(v, Config.percentile) for k, v in collection.items()} xval, yval = zip(*sorted(collection.items())) xval = np.array(xval) yval = np.array(yval) xnew = np.linspace(xval.min(), xval.max(), len(xval) * 10) k_factor = 3 if len(xval) > 10 else 1 spline = make_interp_spline(xval, yval, k=k_factor) smooth = spline(xnew) fig, ax_ = plt.subplots(figsize=Config.fig_size) fig.text(.01, .02, ('http://github.com/0x9900/wspr - Distance %sth percentile - Time span: ' '%sH - Band: %s') % (Config.percentile, Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') fig.autofmt_xdate() ax_.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M')) ax_.grid(True, which="both", linestyle='dotted') ax_.set_xlabel('UTC Time') ax_.set_ylabel('Km') ax_.set_yscale('log') ylim_val = int(yval.min()/3) ax_.set_ylim(ylim_val if ylim_val > 0 else 1 , yval.max()+1000) ax_.plot([datetime.utcfromtimestamp(x) for x in xnew], smooth) plt.savefig(filename) plt.close() def box_plot(wspr_data): """Box plot graph show the median, 75 and 25 percentile of the distance. It also show the outliers.""" filename = os.path.join(Config.target, 'boxplot.png') logging.info('Drawing box_plot to %s', filename) collection = collections.defaultdict(list) for val in wspr_data: date_hour = datetime.utcfromtimestamp(val.timestamp).replace(minute=0, second=0, microsecond=0) collection[date_hour].append(val.distance) data = sorted(collection.items()) fig, ax_ = plt.subplots(figsize=Config.fig_size) fig.text(.01, .02, ('http://github.com/0x9900/wspr - Distance quartile range - ' 'Time span: %sH - Band: %s') % (Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') fig.autofmt_xdate() labels, values = zip(*data) labels = ['{}'.format(h.strftime('%R')) for h in labels] ax_.grid(True, linestyle='dotted') ax_.set_xlabel('UTC Time') ax_.set_ylabel('Km') bplot = ax_.boxplot(values, sym="b.", patch_artist=True, autorange=True, labels=labels) for patch in bplot['boxes']: patch.set(color='silver', linewidth=1) plt.savefig(filename) plt.close() def violin_plot(wspr_data): """After removing the outliers draw violin plot. This graph show where is the highest contact distances probabilities.""" filename = os.path.join(Config.target, 'violin.png') logging.info('Drawing violin to %s', filename) # get only the relevant data and reject the outliers collection = collections.defaultdict(list) for val in wspr_data: date_hour = datetime.utcfromtimestamp(val.timestamp).replace(minute=0, second=0, microsecond=0) collection[date_hour].append(val.distance) data = [] for key, values in sorted(collection.items()): data.append((key, reject_outliers(values))) labels, values = zip(*data) labels = ['{}'.format(h.strftime('%R')) for h in labels] fig, ax_ = plt.subplots(figsize=Config.fig_size) fig.text(.01, .02, ('http://github.com/0x9900/wspr - Distance and contacts density - ' 'Time span: %sH - Band: %s') % (Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') ax_.xaxis.set_ticks_position('bottom') ax_.set_xticks(np.arange(1, len(labels) + 1)) ax_.set_xticklabels(labels) ax_.set_xlim(0.25, len(labels) + 0.75) ax_.set_xlabel('UTC Time') ax_.grid(True, linestyle='dotted') ax_.set_ylabel('Km') ax_.violinplot(values, showmeans=False, showmedians=True) plt.savefig(filename) plt.close() def contact_map(wspr_data): """Show all the contacts on a map""" filename = os.path.join(Config.target, 'contactmap.png') logging.info('Drawing connection map to %s', filename) __calls = [] points = [] for data in wspr_data: if data.rx_call in __calls: continue __calls.append(data.rx_call) points.append((data.rx_lon, data.rx_lat)) points = np.array(points) right, upl = points.max(axis=0) + [15., 10.] left, downl = points.min(axis=0) + [-15., -10] if right > 180 or left < -180: right, left, upl, downl = (180., -180., 90., -90.) fig = plt.figure(figsize=(12, 8)) fig.text(.01, .02, ('http://github/com/0x9900/wspr - Contacts map - ' 'Time span: %sH - Band: %s') % (Config.timespan, Config.band)) fig.suptitle('[{}] WSPR Stats'.format(Config.callsign), fontsize=14, fontweight='bold') logging.info("Origin lat: %f / lon: %f", wspr_data[0].tx_lat, wspr_data[0].tx_lon) bmap = Basemap(projection='mill', lon_0=wspr_data[0].tx_lon, lat_0=wspr_data[0].tx_lat, urcrnrlat=upl, urcrnrlon=right, llcrnrlat=downl, llcrnrlon=left, resolution='c') bmap.drawlsmask(land_color="#5c4033", ocean_color="#9999ff", resolution='l') bmap.drawparallels(np.arange(-90., 90., 45.)) bmap.drawmeridians(np.arange(-180., 180., 45.)) bmap.drawcountries() bmap.drawstates(linestyle='dashed', color='#777777') #bmap.drawrivers(linestyle='dotted', color='#7777ff') for lon, lat in points: bmap.drawgreatcircle(wspr_data[0].tx_lon, wspr_data[0].tx_lat, lon, lat, linewidth=.5, color='navy', del_s=1) x, y = bmap(lon, lat) bmap.plot(x, y, '*', markersize=4, alpha=.5, color='yellow') plt.savefig(filename) plt.close() def band_select(argument): """Select and validate the band passed as argument""" argument = argument.lower() if argument not in BANDS: raise argparse.ArgumentTypeError("Possible bands are:", ",".join(BANDS)) return argument def type_directory(parg): """Check expand the argument then check if it is a directory""" path = os.path.expanduser(parg) if not os.path.isdir(path): print('"{}" is not a directory'.format(path)) sys.exit(os.EX_OSERR) return path def main(): """Every good program start with a main function""" parser = argparse.ArgumentParser(description='WSPR Stats.', usage=__doc__) parser.add_argument('-D', '--debug', action='store_true', default=False, help='Print information useful for debugging') parser.add_argument('-t', '--target-dir', default='/tmp', type=type_directory, help=('Target directory where the images will be ' 'saved [default: %(default)s]')) parser.add_argument('-f', '--file', help='JSON file from DXPlorer.net') parser.add_argument('-b', '--band', type=band_select, default=DEFAULT_BAND, help=('Band to download, in Mhz [default: %(default)s]')) pargs = parser.parse_args() Config.target = pargs.target_dir Config.band = pargs.band Config.file = pargs.file if pargs.debug: _logger = logging.getLogger() _logger.setLevel('DEBUG') del _logger if not pargs.file and not any([Config.callsign, Config.key]): logging.error('Call sign or key missing') sys.exit(os.EX_NOPERM) if pargs.file: wspr_data = readfile() else: wspr_data = download() timespan = np.array([datetime.utcfromtimestamp(w.timestamp) for w in wspr_data]) Config.timespan = np.timedelta64(timespan.max() - timespan.min(), 'h').astype(int) try: box_plot(wspr_data) violin_plot(wspr_data) azimuth(wspr_data) dist_plot(wspr_data) skip_plot(wspr_data) if Basemap: contact_map(wspr_data) except ValueError as err: logging.error(err) logging.error('Your dataset is to small. Run WSPR for a longer time and gather more data') sys.exit(os.EX_DATAERR) if __name__ == "__main__": main() ```
{ "source": "0x9fff00/flask-apscheduler", "score": 3 }
#### File: flask-apscheduler/examples/advanced.py ```python from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore from flask import Flask from flask_apscheduler import APScheduler class Config(object): JOBS = [ { 'id': 'job1', 'func': 'advanced:job1', 'args': (1, 2), 'trigger': 'interval', 'seconds': 10 } ] SCHEDULER_JOBSTORES = { 'default': SQLAlchemyJobStore(url='sqlite://') } SCHEDULER_EXECUTORS = { 'default': {'type': 'threadpool', 'max_workers': 20} } SCHEDULER_JOB_DEFAULTS = { 'coalesce': False, 'max_instances': 3 } SCHEDULER_API_ENABLED = True def job1(a, b): print(str(a) + ' ' + str(b)) if __name__ == '__main__': app = Flask(__name__) app.config.from_object(Config()) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() app.run() ```
{ "source": "0xa10/PyGT521F32", "score": 2 }
#### File: PyGT521F32/gt521f32/gt521f32.py ```python import logging import contextlib import threading import time from typing import ContextManager, Optional, Callable, Tuple, ClassVar, Union, Type import PIL # type: ignore import PIL.Image # type: ignore from . import packets from .interfaces import SCSIInterface, SerialInterface, InterfaceException logger = logging.getLogger(__name__) # pylint: disable=invalid-name def retry(func: Callable[..., bool], count: int = 3) -> Callable[..., bool]: def wrapper(*args, **kwargs) -> bool: for _ in range(count): if func(*args, **kwargs): return True return False return wrapper def save_bitmap_to_file(path: str, bitmap: bytes) -> None: img = PIL.Image.frombytes("L", (202, 258), bitmap, "raw") img.save(path, "BMP") class GT521F32Exception(Exception): pass class GT521F32: _PROMPT_INTERVAL: ClassVar[float] = 0.1 _port: str _interface: Union[SerialInterface, SCSIInterface] _firmware_version: Optional[str] = None _iso_area_max_size: Optional[int] = None _device_serial_number: Optional[str] = None _cancel: threading.Event @staticmethod def _choose_interface_type( port, ) -> Union[Type[SerialInterface], Type[SCSIInterface]]: if any( port.startswith(_) for _ in ("COM", "/dev/tty") ): # Any other path patterns? return SerialInterface if port.startswith("/dev/sg") or ( port[0].isalpha() and port[1] == ":" and len(port) == 2 ): return SCSIInterface raise GT521F32Exception("Could not derive interface type from port path") def __init__(self, port: str, baudrate: Optional[None] = None): self._port = port try: interface_cls = GT521F32._choose_interface_type(port) logger.debug("Chose interface type %s", interface_cls.__name__) if baudrate is not None: if interface_cls is not SerialInterface: raise GT521F32Exception( "Baud rate can only be given for serial interfaces." ) self._interface = interface_cls(port=port, baudrate=baudrate) else: self._interface = interface_cls(port=port) except InterfaceException as e: # pylint: disable=invalid-name logger.error("Could not open the fingerprint device: %s", e) raise GT521F32Exception("Failed to open the fingerprint device.") self._cancel = threading.Event() @staticmethod def _delay(seconds: float) -> None: time.sleep(seconds) def send_command(self, command: str, parameter: int) -> Tuple[int, int]: if command not in packets.command_codes.keys(): logger.error("Bad command.") raise GT521F32Exception("Invalid command.") command_code = packets.command_codes[command] command_packet = packets.CommandPacket( parameter=parameter, command=command_code ) self._interface.write(command_packet.to_bytes()) # read response to_read = packets.ResponsePacket().byte_size() response_bytes = self._interface.read(to_read) response_packet = packets.ResponsePacket.from_bytes(response_bytes) if response_packet is None: logger.error("Command failed.") raise GT521F32Exception("Command failed.") if not response_packet.ok: logger.debug( "Command responded with code %x and error %04x", response_packet.response_code, response_packet.parameter, ) return response_packet.response_code, response_packet.parameter def usb_internal_check(self) -> None: _, _ = self.send_command("USB_INTERNAL_CHECK", 0) def change_baud_rate(self, baudrate: int) -> None: # Not really relevant for USB (scsi) mode, but the command # is still supported response_code, parameter = self.send_command("CHANGE_BAUDRATE", baudrate) if response_code != packets.ACK_OK: logger.error( "ChangeBaudRate error: %s", packets.reverse(packets.response_error)[parameter], ) def change_baud_rate_and_reopen(self, baudrate: int) -> None: # We can send the command and it wont do any harm, but we dont want the # interface to be reopened, so unless we are already using a # serial interface, do not proceed if not isinstance(self._interface, SerialInterface): raise NotImplementedError( "Baud-rate not supported for interface type %s" % (type(self._interface),) ) self.change_baud_rate(baudrate) self._interface.close() self._interface = SerialInterface(port=self._port, baudrate=baudrate) @property def firmware_version(self): return self._firmware_version @property def iso_area_max_size(self): return self._iso_area_max_size @property def device_serial_number(self): return self._device_serial_number def open(self) -> Tuple[str, int, str]: _, _ = self.send_command("OPEN", 1) # read data response to_read = packets.OpenDataPacket().byte_size() response_bytes = self._interface.read(to_read) open_data_response = packets.OpenDataPacket.from_bytes(response_bytes) self._firmware_version, self._iso_area_max_size, self._device_serial_number = ( open_data_response.firmware_version, open_data_response.iso_area_max_size, open_data_response.device_serial_number, ) logger.info("Firmware version: %s", open_data_response.firmware_version) logger.info("Iso area max size: %s", open_data_response.iso_area_max_size) logger.info("Serial number: %s", open_data_response.device_serial_number) return ( self.firmware_version, self.iso_area_max_size, self.device_serial_number, ) def module_info(self) -> Tuple[str, str, int, int, int, int, int, int, int]: _, parameter = self.send_command("MODULE_INFO", 0) # read data response to_read = parameter + packets.DataPacket().byte_size() response_bytes = self._interface.read(to_read) module_info_known_size = packets.ModuleInfoDataPacket().byte_size() if to_read > module_info_known_size: logger.error("Module info returned more bytes than expected.") module_info_packet = packets.ModuleInfoDataPacket.from_bytes(response_bytes) logger.info("Sensor: %s", module_info_packet.sensor) logger.info("Engine Version: %s", module_info_packet.engine_version) logger.info("Raw Image Width: %s", module_info_packet.raw_img_width) logger.info("Raw Image Height: %s", module_info_packet.raw_img_height) logger.info("Image Height: %s", module_info_packet.img_width) logger.info("Image Width: %s", module_info_packet.img_height) logger.info("Max record count: %s", module_info_packet.max_record_count) logger.info("Enroll count: %s", module_info_packet.enroll_count) logger.info("Template size: %s", module_info_packet.template_size) return ( module_info_packet.sensor, module_info_packet.engine_version, module_info_packet.raw_img_width, module_info_packet.raw_img_height, module_info_packet.img_width, module_info_packet.img_height, module_info_packet.max_record_count, module_info_packet.enroll_count, module_info_packet.template_size, ) def __del__(self) -> None: self.close() def close(self) -> None: # does nothing self.send_command("CLOSE", 0) self.change_baud_rate(9600) self._interface.close() def enroll_start(self, user_id: int) -> bool: response_code, parameter = self.send_command("ENROLL_START", user_id) if response_code != packets.ACK_OK: logger.error( "EnrollStart error: %s", packets.reverse(packets.response_error)[parameter], ) return False return True @retry def enroll_n( # pylint: disable=invalid-name self, n: int, save_enroll_photos: bool = False ) -> bool: self.prompt_finger_and_capture() if save_enroll_photos: # Save image before proceeding out_path = "Enroll%d.bmp" % (n,) logger.info("Saving Enroll%d to %s", n, out_path) bitmap = self.get_image() if bitmap: save_bitmap_to_file(out_path, bitmap) else: logger.error("Could not save image for current enroll cycle.") response_code, parameter = self.send_command("ENROLL%d" % (n,), 0) if response_code != packets.ACK_OK: error_code = packets.reverse(packets.response_error).get(parameter, None) if error_code is None: logger.error("Enroll%d error: %s", n, f"Duplicate ID: {parameter}") return True # fast fail logger.error("Enroll%d error: %s", n, error_code) return False # Will lead to retry logger.debug("Enroll%d succeeded.", n) return True def enroll_user(self, user_id: int, save_enroll_photos: bool = False) -> bool: if not self.enroll_start(user_id): return False for i in range(1, 4): with self.prompt_finger(): if not self.enroll_n( i, save_enroll_photos ): # Not sure why this only works when reentering logger.debug("Enrollment for user id %d failed, aborting.", user_id) break else: logger.debug("Enroll user id: %d succeeded.", user_id) return True return False def identify(self) -> Optional[int]: self.prompt_finger_and_capture() response_code, parameter = self.send_command("IDENTIFY", 0) if response_code != packets.ACK_OK: logger.error( "Identify error: %s", packets.reverse(packets.response_error)[parameter] ) return None return parameter def get_raw_image_safe(self) -> Optional[bytes]: with self.led(): # Undocumented, but sensor crashes if led is off return self._get_raw_image() def _get_raw_image(self) -> Optional[bytes]: # Do not call this with the led off response_code, parameter = self.send_command("GET_RAWIMAGE", 0) if response_code != packets.ACK_OK: logger.error( "GetRawImage error: %s", packets.reverse(packets.response_error)[parameter], ) return None # read data response logger.info("Downloading raw image...") to_read = packets.GetRawImageDataPacket().byte_size() response_bytes = self._interface.read(to_read) get_raw_image_data_response = packets.GetRawImageDataPacket.from_bytes( response_bytes ) return get_raw_image_data_response.raw_bitmap def get_image(self) -> Optional[bytes]: response_code, parameter = self.send_command("GET_IMAGE", 0) if response_code != packets.ACK_OK: logger.error( "GetImage error: %s", packets.reverse(packets.response_error)[parameter] ) return None # read data response logger.info("Downloading image...") to_read = packets.GetImageDataPacket().byte_size() response_bytes = self._interface.read(to_read) get_image_data_response = packets.GetImageDataPacket.from_bytes(response_bytes) return get_image_data_response.bitmap @contextlib.contextmanager # type: ignore def led(self) -> ContextManager[None]: # type: ignore self.set_led(True) yield None self.set_led(False) def set_led(self, onoff: bool) -> None: assert isinstance(onoff, bool) # Cannot fail _, _ = self.send_command("CMOS_LED", int(onoff)) def capture(self, best_image: bool = False) -> bool: assert isinstance(best_image, bool) response_code, parameter = self.send_command("CAPTURE", int(best_image)) if response_code != packets.ACK_OK: logger.error( "Capture error: %s", packets.reverse(packets.response_error)[parameter] ) return False return True def get_enrolled_count(self) -> int: # Supposedly this cannot fail? _, parameter = self.send_command("ENROLL_COUNT", 0) return parameter def is_id_enrolled(self, user_id: int) -> bool: response_code, parameter = self.send_command("CHECK_ENROLLED", user_id) if response_code != packets.ACK_OK: logger.error( "CheckEnroll %d error: %s", user_id, packets.reverse(packets.response_error)[parameter], ) return False return True def delete_id(self, user_id: int) -> bool: response_code, parameter = self.send_command("DELETE_ID", user_id) if response_code != packets.ACK_OK: logger.error( "DeleteID %d error: %s", user_id, packets.reverse(packets.response_error)[parameter], ) return False return True def delete_all(self) -> bool: response_code, parameter = self.send_command("DELETE_ALL", 0) if response_code != packets.ACK_OK: logger.error( "DeleteAll error: %s", packets.reverse(packets.response_error)[parameter], ) return False return True def verify(self, user_id: int) -> bool: self.prompt_finger_and_capture() response_code, parameter = self.send_command("VERIFY", user_id) if response_code != packets.ACK_OK: logger.error( "Verify %d error: %s", user_id, packets.reverse(packets.response_error)[parameter], ) return False return True def save_image_to_bmp(self, path: str) -> None: self.prompt_finger_and_capture() bitmap = self.get_image() if bitmap: save_bitmap_to_file(path, bitmap) # Utitilies def is_finger_pressed(self) -> bool: response_code, parameter = self.send_command("IS_PRESS_FINGER", 0) if response_code != packets.ACK_OK: logger.error( "IsFingerPressed error: %s", packets.reverse(packets.response_error)[parameter], ) return False return not bool(parameter) def cancel(self) -> None: self._cancel.set() def wait_for_finger_press(self, interval: float = _PROMPT_INTERVAL) -> None: while not self._cancel.is_set() and not self.is_finger_pressed(): self._delay(interval) if self._cancel.is_set(): logger.info("Cancelled action.") self._cancel.clear() def prompt_finger_and_capture(self) -> None: with self.prompt_finger(): self.capture() @contextlib.contextmanager # type: ignore def prompt_finger(self) -> ContextManager[None]: # type: ignore with self.led(): self.wait_for_finger_press(self._PROMPT_INTERVAL) yield ```
{ "source": "0xa10/scraper", "score": 3 }
#### File: 0xa10/scraper/scrape.py ```python import asyncio import cgi from contextlib import contextmanager import functools import os import pathlib import blessings from bs4 import BeautifulSoup import progressbar from absl import app from absl import flags from absl import logging import aiofiles import aiohttp FLAGS = flags.FLAGS flags.DEFINE_string('output_dir', 'scrape_results', 'Directory to write scrape output to.') FLAGS.verbosity = -1 BANNER = "\n".join((r" ____ ", r"/ ___| ___ _ __ __ _ _ __ ___ _ __ ", r"\___ \ / __| '__/ _` | '_ \ / _ \ '__|", r" ___) | (__| | | (_| | |_) | __/ | ", r"|____/ \___|_| \__,_| .__/ \___|_| ", r" |_| ")) MAIN_BAR_WIDGETS = ['Tasks complete: ', progressbar.SimpleProgress(), ' (', progressbar.Percentage(), ') ', progressbar.AnimatedMarker()] SUB_BAR_WIDGETS = [': ', ' (', progressbar.Percentage(), ') ', progressbar.Bar(), progressbar.FileTransferSpeed()] class ProgressBarManager(object): """Draws a main progress bars and sub-task progress bars on a given blessings terminal""" SUB_LINE_OFFSET = 1 SUB_INDENT = 4 DEFAULT_COORDS = (0, 0) DEFAULT_LINE_MAX = 10 def __init__(self, terminal, main_coords=None, lines=None): """Takes a terminal object, anchor location and amount of lines for sub bars""" self._terminal = terminal self._main_coords = main_coords if main_coords else self.DEFAULT_COORDS self._sub_coords = (main_coords[0] + self.SUB_INDENT, main_coords[1] + self.SUB_LINE_OFFSET) self._lines = [None,] * (lines if lines else self.DEFAULT_LINE_MAX) def _get_sub_line(self, idx): return (self._sub_coords[0], self._sub_coords[1] + idx) def _alloc_slot(self): if None in self._lines: return self._lines.index(None) return None def _free_slot(self, slot): self._lines[slot] = None @contextmanager def install_main(self, progress_bar): """Context manager for starting and positioning main progress bar""" # Replace writer progress_bar.fd = Writer(self._main_coords) # Make bold progress_bar.widgets = [TERM.bold,] + progress_bar.widgets progress_bar.start() yield progress_bar progress_bar.finish() @contextmanager def install_sub_bar(self, progress_bar): """Context manager for starting and positioning sub progress bars""" # Request a free slot slot = self._alloc_slot() if slot is not None: self._lines[slot] = progress_bar # Add line widgets progress_bar.widgets = ["%d: " % slot,] + progress_bar.widgets # Replace writer progress_bar.fd = Writer(self._get_sub_line(slot)) else: # If no slots are free, redirect to /dev/null progress_bar.fd = open(os.devnull, 'w') # Make yellow progress_bar.widgets = [TERM.yellow, ] + progress_bar.widgets progress_bar.start() yield progress_bar progress_bar.finish() # Free the slot if slot is not None: self._free_slot(slot) def bound_concurrency(size): """Decorator to limit concurrency on coroutine calls""" sem = asyncio.Semaphore(size) def decorator(func): """Actual decorator""" @functools.wraps(func) async def wrapper(*args, **kwargs): """Wrapper""" async with sem: return await func(*args, **kwargs) return wrapper return decorator # Bind concurrency at 10 executions @bound_concurrency(10) async def download_file(url, output_folder): """Download a file at url to the output path""" try: LOGGER.debug("Downloading file from uri: %r to folder: %r", url, output_folder) async with aiohttp.ClientSession() as session: # Request the file resp = await session.get(url) # Extract the basename, for cases where the filename is unknown basename = os.path.basename(url) # Attempt to extract file name from the Content-Disposition header content_dispo = resp.headers.get('Content-Disposition', None) if content_dispo: # Parse disposition and extract filename _, params = cgi.parse_header(content_dispo) filename = params.get('filename') LOGGER.debug("Got filename %r for basename %r", filename, basename) else: # If the content disposition is not available, just use the basename from the url filename = basename # Attempt to get the file size from the Content-Length header content_length = resp.headers.get('Content-Length', None) file_size = int(content_length) if content_length else None # Create output file async with aiofiles.open(pathlib.Path(output_folder, filename), "wb") as output_file: # Create progress bar for this file chunks_bar = progressbar.ProgressBar( widgets=[filename,] + SUB_BAR_WIDGETS, maxval=file_size) chunks_bar.term_width = int(chunks_bar.term_width * 0.6) # Install to sub progress bar with PROGRESS_MANAGER.install_sub_bar(chunks_bar): # Iterate over chunks and write them to file async for chunk, _ in resp.content.iter_chunks(): if file_size: # If we have the file size, update the file chunks_bar.update(chunks_bar.currval + len(chunk)) await output_file.write(chunk) except (aiohttp.client_exceptions.ClientError, asyncio.TimeoutError) as exc: LOGGER.error("Could not download file %r - %r", url, exc) except OSError as exc: LOGGER.error("Could not write file - %r", exc) async def fetch(url): """Fetch a page and return the response""" async with aiohttp.ClientSession() as session: LOGGER.debug("Fetching url %r", url) resp = await session.get(url) LOGGER.debug("Got response, status %d", resp.status) resp.raise_for_status() return await resp.text() # pylint: disable=too-few-public-methods class Writer(object): """Create an object with a write method that writes to a specific place on the screen, defined at instantiation. This is the glue between blessings and progressbar. """ # Taken from https://github.com/aaren/multi_progress def __init__(self, location): """Input: location - tuple of ints (x, y), the position of the bar in the terminal""" self.location = location def write(self, string): """Write with saved location""" with TERM.location(*self.location): print(string) async def scrape(output_dir): """Scrape""" url = "http://speedtest.tele2.net/" try: resp_text = await fetch(url) except (aiohttp.client_exceptions.ClientError, asyncio.TimeoutError) as exc: LOGGER.error("Could not fetch page - %r", exc) return # Make soup LOGGER.info("Extracting 100MB link") soup = BeautifulSoup(resp_text, 'html.parser') # Filter all links according to their target's prefix. all_files = list(filter(lambda element: element.attrs.get('href', '').endswith('.zip'), soup.findAll("a", href=True))) LOGGER.info("Got %d zip links", len(all_files)) all_urls = map(lambda element: element.attrs['href'], all_files) target_url = list(filter(lambda href: href.startswith("100MB"), all_urls))[0] coros_bar = progressbar.ProgressBar( widgets=MAIN_BAR_WIDGETS, maxval=1) coros_bar.term_width = int(coros_bar.term_width * 0.6) pathlib.Path(output_dir).mkdir(parents=True, exist_ok=True) with PROGRESS_MANAGER.install_main(coros_bar): await download_file(url + target_url, output_dir) def print_banner(): """Print the colorful blinking banner""" print(TERM.magenta + TERM.blink + TERM.bold + BANNER + TERM.normal) def main(_): """Main function""" # pylint: disable=global-statement global PROGRESS_MANAGER # Enter fullscreen hidden cursor mode with TERM.fullscreen(), TERM.hidden_cursor(): # Instantiate the progress manager PROGRESS_MANAGER = ProgressBarManager(TERM, main_coords=(0, len(BANNER.splitlines()) + 1)) print_banner() # Start work print("\nStarting...") loop = asyncio.get_event_loop() loop.run_until_complete(scrape(FLAGS.output_dir)) TERM = blessings.Terminal() PROGRESS_MANAGER = None LOGGER = logging.get_absl_logger() if __name__ == "__main__": app.run(main) ```
{ "source": "0xa5a5/PQClean", "score": 2 }
#### File: PQClean/test/pqclean.py ```python import glob import os from typing import Optional import yaml import platform class Scheme: def __init__(self): self.type = None self.name = None self.implementations = [] def path(self, base='..'): return os.path.join(base, 'crypto_' + self.type, self.name) def namespace_prefix(self): return 'PQCLEAN_{}_'.format(self.name.upper()).replace('-', '') @staticmethod def by_name(scheme_name): for scheme in Scheme.all_schemes(): if scheme.name == scheme_name: return scheme raise KeyError() @staticmethod def all_schemes(): schemes = [] schemes.extend(Scheme.all_schemes_of_type('kem')) schemes.extend(Scheme.all_schemes_of_type('sign')) return schemes @staticmethod def all_implementations(): implementations = [] for scheme in Scheme.all_schemes(): implementations.extend(scheme.implementations) return implementations @staticmethod def all_supported_implementations(): return [impl for impl in Scheme.all_implementations() if impl.supported_on_current_platform()] @staticmethod def all_schemes_of_type(type: str) -> list: schemes = [] p = os.path.join('..', 'crypto_' + type) if os.path.isdir(p): for d in os.listdir(p): if os.path.isdir(os.path.join(p, d)): if type == 'kem': schemes.append(KEM(d)) elif type == 'sign': schemes.append(Signature(d)) else: assert('Unknown type') return schemes def metadata(self): metafile = os.path.join(self.path(), 'META.yml') try: with open(metafile, encoding='utf-8') as f: metadata = yaml.safe_load(f.read()) return metadata except Exception as e: print("Can't open {}: {}".format(metafile, e)) return None def __repr__(self): return "<{}({})>".format(self.type.title(), self.name) class Implementation: def __init__(self, scheme, name): self.scheme = scheme self.name = name def metadata(self): for i in self.scheme.metadata()['implementations']: if i['name'] == self.name: return i def path(self, base='..') -> str: return os.path.join(self.scheme.path(base=base), self.name) def libname(self) -> str: if os.name == 'nt': return "lib{}_{}.lib".format(self.scheme.name, self.name) return "lib{}_{}.a".format(self.scheme.name, self.name) def cfiles(self) -> [str]: return glob.glob(os.path.join(self.path(), '*.c')) def hfiles(self) -> [str]: return glob.glob(os.path.join(self.path(), '*.h')) def ofiles(self) -> [str]: return glob.glob(os.path.join(self.path(), '*.o' if os.name != 'nt' else '*.obj')) @staticmethod def by_name(scheme_name, implementation_name): scheme = Scheme.by_name(scheme_name) for implementation in scheme.implementations: if implementation.name == implementation_name: return implementation raise KeyError() @staticmethod def all_implementations(scheme: Scheme) -> list: implementations = [] for d in os.listdir(scheme.path()): if os.path.isdir(os.path.join(scheme.path(), d)): implementations.append(Implementation(scheme, d)) return implementations @staticmethod def all_supported_implementations(scheme: Scheme) -> list: return [impl for impl in Implementation.all_implementations(scheme) if impl.supported_on_current_platform()] def namespace_prefix(self): return '{}{}_'.format(self.scheme.namespace_prefix(), self.name.upper()).replace('-', '') def supported_on_os(self, os: Optional[str] = None) -> bool: """Check if we support the OS If no OS is specified, then we run on the current OS """ if os is None: os = platform.system() for platform_ in self.metadata().get('supported_platforms', []): if 'operating_systems' in platform_: if os not in platform_['operating_systems']: return False return True def supported_on_current_platform(self) -> bool: if 'supported_platforms' not in self.metadata(): return True if platform.machine() == 'ppc': return False if not self.supported_on_os(): return False if not hasattr(Implementation, 'CPUINFO'): import cpuinfo Implementation.CPUINFO = cpuinfo.get_cpu_info() CPUINFO = Implementation.CPUINFO for platform_ in self.metadata()['supported_platforms']: if platform_['architecture'] == CPUINFO['arch'].lower(): # Detect actually running on emulated i386 if (platform_['architecture'] == 'x86_64' and platform.architecture()[0] == '32bit'): continue if all([flag in CPUINFO['flags'] for flag in platform_['required_flags']]): return True return False def __str__(self): return "{} implementation of {}".format(self.name, self.scheme.name) def __repr__(self): return "<Implementation({}, {})>".format(self.scheme.name, self.name) class KEM(Scheme): def __init__(self, name: str): self.type = 'kem' self.name = name self.implementations = Implementation.all_implementations(self) @staticmethod def all_kems() -> list: return Scheme.all_schemes_of_type('kem') class Signature(Scheme): def __init__(self, name: str): self.type = 'sign' self.name = name self.implementations = Implementation.all_implementations(self) @staticmethod def all_sigs(): return Scheme.all_schemes_of_type('sign') ``` #### File: PQClean/test/test_duplicate_consistency.py ```python import difflib import os import sys import yaml import helpers import pqclean def pytest_generate_tests(metafunc): ids = [] argvalues = [] for scheme in pqclean.Scheme.all_schemes(): for implementation in scheme.implementations: if os.path.isfile( os.path.join( 'duplicate_consistency', '{}_{}.yml'.format(scheme.name, implementation.name))): metafile = os.path.join( 'duplicate_consistency', '{}_{}.yml'.format(scheme.name, implementation.name)) with open(metafile, encoding='utf-8') as f: metadata = yaml.safe_load(f.read()) for group in metadata['consistency_checks']: source = pqclean.Implementation.by_name( group['source']['scheme'], group['source']['implementation']) for file in group['files']: argvalues.append((implementation, source, file)) ids.append( "{scheme.name} {implementation.name} {source.scheme.name}: {file}" .format(scheme=scheme, source=source, implementation=implementation, file=file)) metafunc.parametrize(('implementation', 'source', 'file'), argvalues, ids=ids) def file_get_contents(filename): with open(filename) as f: return f.read() @helpers.skip_windows() @helpers.filtered_test def test_duplicate_consistency(implementation, source, file): target_path = os.path.join(source.path(), file) this_path = os.path.join(implementation.path(), file) target_src = file_get_contents(target_path) this_src = file_get_contents(this_path) this_transformed_src = this_src.replace( implementation.namespace_prefix(), '') target_transformed_src = target_src.replace(source.namespace_prefix(), '') if not this_transformed_src == target_transformed_src: diff = difflib.unified_diff( this_transformed_src.splitlines(keepends=True), target_transformed_src.splitlines(keepends=True), fromfile=this_path, tofile=target_path) raise AssertionError( "Files differed:\n" + ''.join(diff)) if __name__ == '__main__': import pytest pytest.main(sys.argv) ``` #### File: PQClean/test/test_linter.py ```python import os import platform import unittest from glob import glob import pytest import helpers import pqclean additional_flags = [] #['-fix-errors'] @pytest.mark.parametrize( 'implementation', pqclean.Scheme.all_supported_implementations(), ids=str, ) @helpers.skip_windows() @helpers.filtered_test def test_clang_tidy(implementation: pqclean.Implementation): if platform.machine() in ['i386']: raise unittest.SkipTest("Clang-tidy has false-positives on i386") helpers.ensure_available('clang-tidy') cfiles = implementation.cfiles() common_files = glob(os.path.join('..', 'common', '*.c')) (returncode, _) = helpers.run_subprocess( ['clang-tidy', '-quiet', '-header-filter=.*', *additional_flags, *cfiles, *common_files, '--', '-iquote', os.path.join('..', 'common'), '-iquote', implementation.path()], expected_returncode=None, ) # Detect and gracefully avoid segfaults if returncode == -11: raise unittest.SkipTest("clang-tidy segfaulted") assert returncode == 0, "Clang-tidy returned %d" % returncode if __name__ == "__main__": import sys pytest.main(sys.argv) ```
{ "source": "0xAA23/monte_carlo", "score": 4 }
#### File: monte_carlo/montecarlo/freak.py ```python import collections import itertools def frequencies(outcomes): """Returns a dict{}(?) containing all unique elements of the data and their frequencies. Parameters: events = data dtype = data type of events We currently use a simple list. We also currently assume randomness. Returns: Currently, dict{} """ if type(outcomes) is not None: # Return all of the items in Counter return collections.Counter(outcomes).items() def groups(outcomes, g = True): """Generates a list of groups of outcomes. Example: [ "H", "H", "T", "T", "H", "H", "H", "H", "T", "T" ] ["H", 2] ["T", 2] ["H", 4] ["T", 2] Parameters: outcomes = iterable of outcomes g = True for generator """ if g: for val, streak in itertools.groupby(outcomes): yield [val, sum([1 for _ in streak])] else: return [[val, sum([1 for _ in streak])] for val, streak in itertools.groupby(outcomes)] def p_event(outcomes): """Returns probability (real number) between 0 and 1 for given events in sample_space, n times. Example: events = { "H" , "T" } how many times out of sample_space? IE: frequencies Parameters: outcomes = data containing outcomes of trials Returns: Iterable of real numbers between 0 and 1 representing probabilities of events """ # collections.Counter returns a dict of all outcomes and the respected frequencies c = collections.Counter(outcomes) # convert the dictionary to one of probabilities instead return dict((key, value / sum(c.values())) for (key, value) in c.items()) ```
{ "source": "0xAA55/WebGL4WASM", "score": 3 }
#### File: 0xAA55/WebGL4WASM/testhost.py ```python import http.server import socketserver PORT = 8000 DIRECTORY = "testwww" socketserver.allow_reuse_address = True class Handler(http.server.SimpleHTTPRequestHandler): def __init__(self, *args, **kwargs): super().__init__(*args, directory=DIRECTORY, **kwargs) with socketserver.TCPServer(("0.0.0.0", PORT), Handler) as httpd: print("Serving at port", PORT) httpd.serve_forever() ```
{ "source": "0xAAAAAA/ml-titanic-survival-predictor", "score": 3 }
#### File: 0xAAAAAA/ml-titanic-survival-predictor/main.py ```python import numpy as np import pandas as pd import os from string import Template from flask import Flask from itertools import * from sklearn.model_selection import train_test_split from sklearn.neighbors import KNeighborsClassifier app = Flask(__name__, static_folder="assets") df = pd.read_csv('train.csv') test = pd.read_csv('test.csv') df.drop('Cabin', axis=1, inplace=True) test.drop('Cabin', axis=1, inplace=True) def mean_age(cols): Age = cols[0] Pclass = cols[1] if pd.isnull(Age): if Pclass == 1: return 37 elif Pclass == 2: return 29 else: return 24 else: return Age def generateRows(result, names): returner = [] for row, name in zip(result.itertuples(index=True), names): string = "<tr><td>{0}</td><td>{1}</td><td>{2}</td></tr>".format(row[0][0], name, row[1]) returner.append(string) return "".join(row for row in returner) df['Age'] = df[['Age', 'Pclass']].apply(mean_age, axis=1) test['Age'] = test[['Age', 'Pclass']].apply(mean_age, axis=1) age_range = [0, 0, 0, 0, 0, 0, 0, 0, 0] for i in df['Age']: age_range[int(i//10)] = age_range[int(i//10)] + 1 '''Drop all remaining NaN''' df.dropna(inplace=True) test.dropna(inplace=True) '''Remove Perfect predictors from dataframe''' sex = pd.get_dummies(df['Sex'], drop_first=True) embark = pd.get_dummies(df['Embarked'], drop_first=True) sex_test = pd.get_dummies(test['Sex'], drop_first=True) embark_test = pd.get_dummies(test['Embarked'], drop_first=True) # making dummy because the column has only 3 values (1,2,3) # TODO: maybe remove this dummy and not drop it 7 lines below if algorithm doesnt work pclass = pd.get_dummies(df['Pclass'], drop_first=True) pclass_test = pd.get_dummies(test['Pclass'], drop_first=True) '''Modify Dataframe with training series''' df = pd.concat([df, sex, embark, pclass], axis=1) test = pd.concat([test, sex_test, embark_test, pclass_test], axis=1) passenger_ids = test['PassengerId'] names = test['Name'] test.drop(['Sex', 'Embarked', 'Ticket', 'Name', 'PassengerId', 'Parch', 'Pclass'], axis=1, inplace=True) X = df.drop(['Sex', 'Embarked', 'Name', 'Ticket', 'Parch', 'PassengerId', 'Pclass', 'Survived'], axis=1) y = df['Survived'] X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=1, random_state=101) knn = KNeighborsClassifier() knn.fit(X_train, y_train) predictions = knn.predict(test) result = pd.DataFrame( data=predictions, index=[passenger_ids, names], columns=['Survived']) relationships = df.SibSp.value_counts().to_dict() relation_labels = list(relationships.keys()) relation_vals = list(relationships.values()) params = { 'maleCount': len(df[(df.Sex == "male") & (df.Survived == 1)]), 'femaleCount': len(df[(df.Sex == "female") & (df.Survived == 1)]), 'firstClass': len(df[(df.Pclass == 1) & (df.Survived == 1)]), 'secondClass': len(df[(df.Pclass == 2) & (df.Survived == 1)]), 'thirdClass': len(df[(df.Pclass == 3) & (df.Survived == 1)]), 'relationshipLabels': str(relation_labels), 'relationshipValues': str(relation_vals), 'ageRange': str(age_range), 'rowsWithData': generateRows(result, names) } template = Template(''' <!DOCTYPE HTML> <html> <head> <title>Titanic</title> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no" /> <script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.7.3/Chart.min.js"></script> <link rel="stylesheet" href="assets/css/main.css" /> <noscript><link rel="stylesheet" href="assets/css/noscript.css" /></noscript> </head> <body class="is-preload"> <!-- Wrapper --> <div id="wrapper"> <!-- Header --> <header id="header" class="alt"> <h1>Titanic</h1> <p>Passenger Survival Predictor using KNN Algorithm</p> </header> <!-- Nav --> <nav id="nav"> <ul> <li><a href="#gender">Gender</a></li> <li><a href="#relationship">Relationship</a></li> <li><a href="#pclass">Class</a></li> <li><a href="#age">Age</a></li> <li><a href="#results">Result</a></li> </ul> </nav> <!-- Main --> <div id="main"> <section id="gender" class="main special"> <header class="major"> <h2>Gender Distribution</h2> <p>Most of the survivors were females</p> </header> <canvas id="genderCanvas"></canvas> </section> <section id="relationship" class="main special"> <header class="major"> <h2>Relationship Distribution</h2> <p>Survival rate is inversely proportional to number of relatives on board</p> </header> <canvas id="relationCanvas"></canvas> </section> <section id="pclass" class="main special"> <header class="major"> <h2>Passenger Class Distribution</h2> <p>Highest number of First class passengers survived</p> </header> <canvas id="pclassCanvas"></canvas> </section> <section id="age" class="main special"> <header class="major"> <h2>Age Distribution</h2> <p>Most survivors were in their 20-30s</p> </header> <canvas id="ageCanvas"></canvas> </section> <section id="results" class="main special"> <header class="major"> <h2>Results</h2> </header> <div class="table-wrapper"> <table> <tr> <th>Passenger Id</th> <th>Name</th> <th>Surivived?</th> </tr> $rowsWithData </table> </div> </section> </div> <footer id="footer"> <section> <h2>Made by</h2> <p>By <b><NAME></b> (16101A0015), <b>Sanket Udapi</b> (16101A0014) and <b>Amey Nikam</b> (16101A0017)</p> </section> <section> <h2>Dataset</h2> <p><a href="https://www.kaggle.com/c/titanic/data">Available here</a></p> </section> </footer> </div> <!-- Scripts --> <script src="assets/js/jquery.min.js"></script> <script src="assets/js/jquery.scrollex.min.js"></script> <script src="assets/js/jquery.scrolly.min.js"></script> <script src="assets/js/browser.min.js"></script> <script src="assets/js/breakpoints.min.js"></script> <script src="assets/js/util.js"></script> <script src="assets/js/main.js"></script> <script> const gender = document.getElementById('genderCanvas').getContext('2d') new Chart(gender, { type: 'pie', data: { labels: ['Males', 'Females'], datasets: [{ label: 'Survivors', data: [$maleCount, $femaleCount], backgroundColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', ], borderColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', ], borderWidth: 1 }] } }) const pclass = document.getElementById('pclassCanvas').getContext('2d') new Chart(pclass, { type: 'doughnut', data: { labels: ['First', 'Second', 'Third'], datasets: [{ label: 'Survivors', data: [$firstClass, $secondClass, $thirdClass], backgroundColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)' ], borderColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', ], borderWidth: 1 }] } }) const relation = document.getElementById('relationCanvas').getContext('2d') new Chart(relation, { type: 'line', data: { labels: $relationshipLabels, datasets: [{ label: "Survivors", data: $relationshipValues, backgroundColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', 'rgba(75, 192, 192, 1)', 'rgba(153, 102, 255, 1)', 'rgba(255, 159, 64, 1)' ], borderColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', 'rgba(75, 192, 192, 1)', 'rgba(153, 102, 255, 1)', 'rgba(255, 159, 64, 1)' ], borderWidth: 1 }] } }) const age = document.getElementById('ageCanvas').getContext('2d') new Chart(age, { type: 'bar', data: { labels: ["0-9", "10-19", "20-29", "30-39", "40-49", "50-59", "60-69", "70-79", "80+"], datasets: [{ label: "Survivors", data: $ageRange, backgroundColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', 'rgba(75, 192, 192, 1)', 'rgba(153, 102, 255, 1)', 'rgba(255, 159, 64, 1)' ], borderColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', 'rgba(75, 192, 192, 1)', 'rgba(153, 102, 255, 1)', 'rgba(255, 159, 64, 1)' ], borderWidth: 1 }] } }) </script> </body> </html> ''').safe_substitute(params) @app.route("/") def index(): return template @app.route("/favicon.ico") def favicon(): return "False" if __name__ == '__main__': app.run() ```
{ "source": "0xAalaoui/netbox", "score": 2 }
#### File: netbox/extras/views.py ```python from __future__ import unicode_literals from django.contrib import messages from django.contrib.auth.mixins import PermissionRequiredMixin from django.http import Http404 from django.shortcuts import get_object_or_404, redirect, render from django.utils.safestring import mark_safe from django.views.generic import View from utilities.forms import ConfirmationForm from utilities.views import ObjectDeleteView, ObjectEditView from .forms import ImageAttachmentForm from .models import ImageAttachment, ReportResult, UserAction from .reports import get_report, get_reports # # Image attachments # class ImageAttachmentEditView(PermissionRequiredMixin, ObjectEditView): permission_required = 'extras.change_imageattachment' model = ImageAttachment model_form = ImageAttachmentForm def alter_obj(self, imageattachment, request, args, kwargs): if not imageattachment.pk: # Assign the parent object based on URL kwargs model = kwargs.get('model') imageattachment.parent = get_object_or_404(model, pk=kwargs['object_id']) return imageattachment def get_return_url(self, request, imageattachment): return imageattachment.parent.get_absolute_url() class ImageAttachmentDeleteView(PermissionRequiredMixin, ObjectDeleteView): permission_required = 'extras.delete_imageattachment' model = ImageAttachment def get_return_url(self, request, imageattachment): return imageattachment.parent.get_absolute_url() # # Reports # class ReportListView(View): """ Retrieve all of the available reports from disk and the recorded ReportResult (if any) for each. """ def get(self, request): reports = get_reports() results = {r.report: r for r in ReportResult.objects.all()} ret = [] for module, report_list in reports: module_reports = [] for report in report_list: report.result = results.get(report.full_name, None) module_reports.append(report) ret.append((module, module_reports)) return render(request, 'extras/report_list.html', { 'reports': ret, }) class ReportView(View): """ Display a single Report and its associated ReportResult (if any). """ def get(self, request, name): # Retrieve the Report by "<module>.<report>" module_name, report_name = name.split('.') report = get_report(module_name, report_name) if report is None: raise Http404 # Attach the ReportResult (if any) report.result = ReportResult.objects.filter(report=report.full_name).first() return render(request, 'extras/report.html', { 'report': report, 'run_form': ConfirmationForm(), }) class ReportRunView(PermissionRequiredMixin, View): """ Run a Report and record a new ReportResult. """ permission_required = 'extras.add_reportresult' def post(self, request, name): # Retrieve the Report by "<module>.<report>" module_name, report_name = name.split('.') report = get_report(module_name, report_name) if report is None: raise Http404 form = ConfirmationForm(request.POST) if form.is_valid(): # Run the Report. A new ReportResult is created. report.run() result = 'failed' if report.failed else 'passed' msg = "Ran report {} ({})".format(report.full_name, result) messages.success(request, mark_safe(msg)) UserAction.objects.log_create(request.user, report.result, msg) return redirect('extras:report', name=report.full_name) ```
{ "source": "0xabu/bricknil", "score": 3 }
#### File: bricknil/examples/technic_4x4.py ```python import logging from asyncio import sleep from bricknil import attach, start from bricknil.hub import CPlusHub from bricknil.sensor.motor import CPlusXLMotor @attach(CPlusXLMotor, name='front_drive', port=0) @attach(CPlusXLMotor, name='rear_drive', port=1) class Truck(CPlusHub): async def run(self): self.message_info("Running") await self.front_drive.set_speed(-100) await self.rear_drive.set_speed(-100) await sleep(20) # Give it enough time to gather data async def system(): hub = Truck('truck', True) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) start(system) ```
{ "source": "0xabu/pdfannots", "score": 2 }
#### File: pdfannots/pdfannots/cli.py ```python import argparse import logging import sys import typing from pdfminer.layout import LAParams from . import __doc__, __version__, process_file from .printer import Printer from .printer.markdown import MarkdownPrinter, GroupedMarkdownPrinter from .printer.json import JsonPrinter MD_FORMAT_ARGS = ['print_filename', 'remove_hyphens', 'wrap_column', 'condense', 'sections'] """Named of arguments passed to the markdown printers.""" def _float_or_disabled(x: str) -> typing.Optional[float]: if x.lower().strip() == "disabled": return None try: return float(x) except ValueError as ex: raise argparse.ArgumentTypeError("invalid float value: {}".format(x)) from ex def parse_args() -> typing.Tuple[argparse.Namespace, LAParams]: p = argparse.ArgumentParser(prog='pdfannots', description=__doc__) p.add_argument('--version', action='version', version='%(prog)s ' + __version__) p.add_argument("input", metavar="INFILE", type=argparse.FileType("rb"), help="PDF files to process", nargs='+') g = p.add_argument_group('Basic options') g.add_argument("-p", "--progress", default=False, action="store_true", help="Emit progress information to stderr.") g.add_argument("-o", metavar="OUTFILE", type=argparse.FileType("w"), dest="output", default=sys.stdout, help="Output file (default is stdout).") g.add_argument("-n", "--cols", default=None, type=int, metavar="COLS", dest="cols", help="Assume a fixed top-to-bottom left-to-right page layout with this many " "columns per page. If unset, PDFMiner's layout detection logic is used.") g.add_argument("--keep-hyphens", dest="remove_hyphens", default=True, action="store_false", help="When capturing text across a line break, don't attempt to remove hyphens.") g.add_argument("-f", "--format", choices=["md", "json"], default="md", help="Output format (default: markdown).") g = p.add_argument_group('Options controlling markdown output') g.add_argument("-s", "--sections", metavar="SEC", nargs="*", choices=GroupedMarkdownPrinter.ALL_SECTIONS, default=GroupedMarkdownPrinter.ALL_SECTIONS, help=("sections to emit (default: %s)" % ', '.join(GroupedMarkdownPrinter.ALL_SECTIONS))) g.add_argument("--no-condense", dest="condense", default=True, action="store_false", help="Emit annotations as a blockquote regardless of length.") g.add_argument("--no-group", dest="group", default=True, action="store_false", help="Emit annotations in order, don't group into sections.") g.add_argument("--print-filename", dest="print_filename", default=False, action="store_true", help="Print the name of each file with annotations.") g.add_argument("-w", "--wrap", dest="wrap_column", metavar="COLS", type=int, help="Wrap text at this many output columns.") g = p.add_argument_group( "Advanced options affecting PDFMiner text layout analysis") laparams = LAParams() g.add_argument( "--line-overlap", metavar="REL_HEIGHT", type=float, default=laparams.line_overlap, help="If two characters have more overlap than this they are considered to be " "on the same line. The overlap is specified relative to the minimum height " "of both characters. Default: %s" % laparams.line_overlap) g.add_argument( "--char-margin", metavar="REL_WIDTH", type=float, default=laparams.char_margin, help="If two characters are closer together than this margin they " "are considered to be part of the same line. The margin is " "specified relative to the character width. Default: %s" % laparams.char_margin) g.add_argument( "--word-margin", metavar="REL_WIDTH", type=float, default=laparams.word_margin, help="If two characters on the same line are further apart than this " "margin then they are considered to be two separate words, and " "an intermediate space will be added for readability. The margin " "is specified relative to the character width. Default: %s" % laparams.word_margin) g.add_argument( "--line-margin", metavar="REL_HEIGHT", type=float, default=laparams.line_margin, help="If two lines are close together they are considered to " "be part of the same paragraph. The margin is specified " "relative to the height of a line. Default: %s" % laparams.line_margin) g.add_argument( "--boxes-flow", type=_float_or_disabled, default=laparams.boxes_flow, help="Specifies how much a horizontal and vertical position of a " "text matters when determining the order of lines. The value " "should be within the range of -1.0 (only horizontal position " "matters) to +1.0 (only vertical position matters). You can also " "pass 'disabled' to disable advanced layout analysis, and " "instead return text based on the position of the bottom left " "corner of the text box. Default: %s" % laparams.boxes_flow) # The next two booleans are described as if they default off, so let's ensure that. assert not laparams.detect_vertical assert not laparams.all_texts g.add_argument( "--detect-vertical", default=laparams.detect_vertical, action="store_const", const=(not laparams.detect_vertical), help="Consider vertical text during layout analysis.") g.add_argument( "--all-texts", default=laparams.all_texts, action="store_const", const=(not laparams.all_texts), help="Perform layout analysis on text in figures.") args = p.parse_args() # Propagate parsed layout parameters back to LAParams object for param in ("line_overlap", "char_margin", "word_margin", "line_margin", "boxes_flow", "detect_vertical", "all_texts"): setattr(laparams, param, getattr(args, param)) return args, laparams def main() -> None: args, laparams = parse_args() logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.WARNING) # construct appropriate Printer printer: Printer if args.format == "md": mdargs = {k: getattr(args, k) for k in MD_FORMAT_ARGS} printer = (GroupedMarkdownPrinter if args.group else MarkdownPrinter)(**mdargs) elif args.format == "json": printer = JsonPrinter(remove_hyphens=args.remove_hyphens) def write_if_nonempty(s: str) -> None: if s: args.output.write(s) write_if_nonempty(printer.begin()) # iterate over files for file in args.input: doc = process_file( file, columns_per_page=args.cols, emit_progress_to=(sys.stderr if args.progress else None), laparams=laparams) for line in printer.print_file(file.name, doc): args.output.write(line) write_if_nonempty(printer.end()) ```
{ "source": "0xACAT1DEA/entropy_calculator", "score": 4 }
#### File: 0xACAT1DEA/entropy_calculator/entopy.py ```python import math import string magic = 5 # this is the mininum length for a word to be considered a dictionary word and not random def complex_password(password): ''' this will return the complexity of how many 'rounds' it will take to bruteforce a password based on common bruteforce algorithms, it is safer than common password rules as this function is not based on rules if used properly ''' length = len(password) complexity = 0 lower = string.ascii_lowercase upper = string.ascii_uppercase nums = string.digits has_lower = False has_upper = False has_sym = False has_num = False has_outwards_sym = False has_outwards_num = False has_outwards_upper = False symbols = ''.join(c for c in string.printable if not c in lower and not c in upper and not c in nums) non_print = ''.join(chr(n) for n in xrange(1, 255) if not chr(n) in string.printable) # disregard null byte for index in range(0, length): if password[index] in non_print: return 254 ** x # non printable characters if password[index] in lower: has_lower = True if password[index] in upper: if index == 0 or index == (length - 1): # this is to check if it's like Password has_outwards_upper = True else: has_upper = True if password[index] in symbols: if index == 0 or index == (length - 1): # this is a check if it's like password1 has_outwards_sym = True else: has_sym = True if password[index] in nums: if index == 0 or index == (length - 1): # this is a check if it's like password1 has_outwards_num = True else: has_num = True if has_lower: complexity += 26 if has_upper: complexity += 26 has_outwards_upper = False if has_sym: complexity += 38 has_outwards_sym = False if has_num: complexity += 10 has_outwards_num = False if complexity == 0: complexity = 1 combinations = complexity ** length if has_outwards_num: combinations *= 10 if has_outwards_upper: combinations *= 26 if has_outwards_sym: combinations *= 38 return combinations def load_dict(book='/usr/share/dict/words'): with open(book) as f: return f.readlines() def min_complex_password(password): return len(''.join(set(password))) ** len(password) def complex_zero(password): total = 0 for x in range(0, len(password)): total += complex_password(password[x:]) return total def min_zero(password): total = 0 for x in range(0, len(password)): total += min_complex_password(password[x:]) return total def calc_entropy(value): entropy = 0 while value > 2: value >>= 1 entropy += 1 return entropy while True: div = 1 a = raw_input('Enter a password: ') words = load_dict() #TODO cheesecheese isn't as secure as you make thing for x in words: x = x.rstrip('\r\n') if len(x) > magic and a.find(x) != -1: div *= complex_password(x) / len(words) print '%s sounds like a dictionary word, dividing your result by %s' % (x, div) print '%s is calculated by %s\'s own entropy (%s) multiplied by any previous found words divided by the length of the dictionary (%s)' % \ (div, x, complex_password(x), len(words)) b = complex_password(a)/div c = min_complex_password(a) d = complex_zero(a)/div e = min_zero(a) f = math.log(complex_password(a)/div, 2) print '%s would approximately take %s tries to bruteforce knowing the exact length' % (a, b) print '%s would approximately take %s tries to bruteforce by knowing the exact character set and length' % (a, c) print '%s would approximately take %s tries to bruteforce without knowing the exact length or character set' % (a, d) print '%s would approximately take %s tries to bruteforce by knowing the character set but not the length' % (a, e) print '%s has ~%s bits of entropy' % (a, f) ```
{ "source": "0xack13/PyDispatcher", "score": 3 }
#### File: PyDispatcher/examples/simple_sample.py ```python from pydispatch import dispatcher def doSomethingUseful( table, signal, sender ): """Sample method to receive signals""" print ' doSomethingUseful', repr(table), signal, sender def doSomethingElse( signal, **named ): """Sample method to receive signals This method demonstrates the use of the **named parameter, which allows a method to receive all remaining parameters from the send call. """ print ' doSomethingElse', named def doDefault( ): """Sample method to receive All signals Note that this function will be registered for all signals from a given object. It does not have the same interface as any of the other functions registered for those signals. The system will automatically determine the appropriate calling signature for the function. """ print ' doDefault (no arguments)' class Node(object): """Sample object to send signals, note lack of dispatcher-aware code""" def __init__( self, name="an object" ): self.name = name def __repr__( self ): return "%s( %r )"%( self.__class__.__name__, self.name ) DO_LOTS = 0 DO_SOMETHING = ('THIS','IS','A','MORE','COMPLEX','SIGNAL') DO_SOMETHING_ELSE = Node() ourObjects = [ Node(), Node(), Node(), ] if __name__ == "__main__": # Establish some "routing" connections dispatcher.connect ( doSomethingUseful, signal = DO_LOTS, sender = ourObjects[0], ) dispatcher.connect ( doSomethingElse, signal = DO_SOMETHING, sender = ourObjects[0], ) dispatcher.connect( doDefault, signal = dispatcher.Any, # this is actually the default, sender = ourObjects[0], ) print "Sending DO_LOTS from first object" dispatcher.send( signal = DO_LOTS, sender = ourObjects[0], table = "Table Argument", ) print "Sending DO_SOMETHING from first object" dispatcher.send( signal = DO_SOMETHING, sender = ourObjects[0], table = "Table Argument", ) print "Sending DO_SOMETHING_ELSE from first object" dispatcher.send( signal = DO_SOMETHING_ELSE, sender = ourObjects[0], table = "Table Argument", ) ```
{ "source": "0xack13/PyStacks", "score": 2 }
#### File: PyStacks/PyStacks/auth.py ```python import os import boto3 class authenticate: def __init__(self, region): self.stsClient = boto3.client('sts') self.assumedRole = os.environ.get('ASSUMED_ROLE') self.mfaSerial = os.environ.get('MFA_SERIAL') self.awsMfaToken = os.environ.get('TOKEN') self.awsRegion = region self.sess = None def getSession(self): if self.sess is None: self.sess = self.newSession() return self.sess def newSession(self): if self.assumedRole is not None: if self.mfaSerial is None: creds = self.stsClient.assume_role( RoleArn=self.assumedRole, RoleSessionName='ecs-deploy-session', DurationSeconds=3600 ) else: creds = self.stsClient.assume_role( RoleArn=self.assumedRole, RoleSessionName='ecs-deploy-session', DurationSeconds=3600, SerialNumber=self.mfaSerial, TokenCode=self.awsMfaToken ) session = boto3.session.Session( aws_access_key_id=creds['Credentials']['AccessKeyId'], aws_secret_access_key=creds['Credentials']['SecretAccessKey'], aws_session_token=creds['Credentials']['SessionToken'], region_name=self.awsRegion ) else: session = boto3.session.Session( region_name=self.awsRegion ) return session ``` #### File: PyStacks/PyStacks/autoscaling.py ```python class AutoScaling: def __init__(self, session): self.as_client = session.client('autoscaling') def get_instance_ids_from_auto_scaling_group(self, autoscaling_groups): instance_ids = [] asg_groups_response = self.as_client.describe_auto_scaling_groups(AutoScalingGroupNames=autoscaling_groups) for asg_group in asg_groups_response["AutoScalingGroups"]: instance_ids += [instance['InstanceId'] for instance in asg_group["Instances"]] return instance_ids ``` #### File: PyStacks/PyStacks/cloudformation_helpers.py ```python import logging import os import cloudformation import auth import sys import yaml def init_cf_action(parameters, parameters_file, stack_name, region, stack_file): parameters_list_from_file = [] parameters_list_from_args = [] if parameters_file: logging.info('Using passed in parameters for File for Template') parameters_list_from_file = load_cf_params_from_file(region, parameters_file) if parameters: logging.info('Using passed in parameters for CF Template') logging.info(parameters) parameters_list_from_args = load_cf_params_from_string(parameters) parameters_list = parameters_list_from_file + parameters_list_from_args logging.info("Parameters to be used for stack: %s" % parameters_list) print ("[INFO] Parameters to be used for stack: %s" % parameters_list) session, stack_content = get_cf_stack_details(stack_name, region, stack_file) cf = cloudformation.stack(session) logging.info('Validating Template') cf.validate_template_from_file(stack_content=stack_content) return cf, stack_content, parameters_list def final_cf_action(cf, stack_name): print "====== Stack Status =======" state, status = cf.statusPoller(stackname=stack_name) print "=============================" print "\n" print "====== Stack Events History =======" display_cf_events(cf.latest_stack_events(stack_name)) print "=============================" if state != 0: sys.exit(1) else: resources = cf.getStackResources(stackname=stack_name) print "====== Stack Resources =======" for resource in resources: print "%s -- %s [%s]" % (resource["LogicalResourceId"], resource["PhysicalResourceId"], resource["ResourceStatus"]) print "=============================" def handle_cf_error(err): logging.error(err) if 'does not exist' in str(err): print "Stack Removed (or) does not exist" sys.exit(1) if 'AlreadyExists' in str(err): logging.error("Stack Exists") sys.exit(1) return def get_cf_stack_details(stack_name, region, stack_file): """ :param stack_name: stack name to create :param region: aws region :param stack_file: local yaml file :return: (aws session, yaml file content) """ stack_content = load_stack_file(stack_file, region) authentication = auth.authenticate(region) session = authentication.getSession() stack_components = stack_name.split('-') print "====== Stack Details =======" if len(stack_components) == 3: print "Project Name: ", stack_components[0] print "Application Name: ", stack_components[1] print "Stack Suffix: ", stack_components[2] print "Region: ", region print "Stack YAML File :", stack_file print "=============================" return session, stack_content def load_stack_file(stack_file, region): """ Validate the yaml file :param stack_file: yaml file :param region: aps2 :return: """ directory = os.path.dirname(__file__) cf_yaml = '../configs/user/region/{region}/{file}'.format( region=region, file=stack_file) stack_content = None try: with open(cf_yaml, "r") as input_file: stack_content = input_file.read() except IOError as err: print 'The file {file} does not exist'.format(file=os.path.join(directory, cf_yaml)) print(err) raise err return stack_content def load_cf_params_from_string(parameters): """ Converts a json string to Cloudformation Parameter/Value pairs. :param parameters: string of format 'key1=val1,key2=val2' :return: python list of format [ { 'ParameterKey': 'string', 'ParameterValue': 'string', 'UsePreviousValue': True|False }, ] """ cf_param_list = [] split_params = parameters.split(",") for split_param in split_params: split_kv = split_param.split("=") if split_kv[1] == 'UsePreviousValue': cf_param_list.append({'ParameterKey': split_kv[0], 'UsePreviousValue': True}) else: cf_param_list.append({'ParameterKey': split_kv[0], 'ParameterValue': split_kv[1]}) print "[INFO] Using Parameters from args - %s for stack" % str(cf_param_list) return cf_param_list def display_cf_events(events_array): if len(events_array) == 0: return False status_reason = "" for event in events_array: if "ResourceStatusReason" in event: status_reason = event["ResourceStatusReason"] print "%s --> %s - %s ( %s ) ( %s )" % (str(event["Timestamp"]), event["ResourceStatus"], event["LogicalResourceId"], event["ResourceType"], status_reason) return True def load_cf_params_from_file(region, yaml_file): cf_param_list = [] yaml_file = '../configs/user/region/{region}/{file}'.format( region=region, file=yaml_file) with open(yaml_file, 'r') as stream: data_loaded = yaml.load(stream) for k, v in data_loaded.iteritems(): cf_param_list.append({'ParameterKey': k, 'ParameterValue': v}) print "[INFO] Adding Parameters fom file %s - %s for stack" % (yaml_file, str(cf_param_list)) return cf_param_list def stack_exists(cf, stack_name): """ :param cf: :param stack_name: :return: """ if not cf.does_stack_exist(stack_name): print "ERROR: Stack %s does not exist" % stack_name return False return True def get_resources(cf, stack_name, resource_type): """ :param cf: :param stack_name: :param resource_type: :return: """ resources = cf.list_stack_resources(stack_name) resource_names = [] for r in resources: if r['ResourceType'] == resource_type: r_description = cf.describe_stack_resource(stack_name, r['LogicalResourceId']) resource_names.append(r_description["PhysicalResourceId"]) return resource_names ``` #### File: PyStacks/PyStacks/kmstasks.py ```python import kms class kmstasks: def __init__(self): pass def decrypt_secrets(self, session, **secrets): config = {} for k, v in secrets.iteritems(): vde = self.decrypt(session, v) config[k] = vde return config def decrypt(self, session, string): crypto = kms.kms(session) decrypted = crypto.decrypt(string) return decrypted def encrypt(self, string, session, key_alias): crypto = kms.kms(session) encrypted = crypto.encrypt(string, key_alias) print encrypted return encrypted ``` #### File: PyStacks/PyStacks/lambdaapi.py ```python from verification import ensure_http_success class LambdaAPI(object): def __init__(self, session): self.client = session.client('lambda') @ensure_http_success def list_versions_by_function(self, function_name, **_): return self.client.list_versions_by_function( FunctionName=function_name, ) @ensure_http_success def list_aliases(self, function_name, **_): return self.client.list_aliases(FunctionName=function_name) @ensure_http_success def update_alias(self, function_name, alias_name, function_version, alias_desc=None, **_): params = { "FunctionName": function_name, "Name": alias_name, "FunctionVersion": function_version, "Description": alias_desc, } return self.client.update_alias(**{k: v for k, v in params.items() if v}) @ensure_http_success def create_alias(self, function_name, alias_name, function_version, alias_desc=None, **_): params = { "FunctionName": function_name, "Name": alias_name, "FunctionVersion": function_version, "Description": alias_desc, } return self.client.create_alias(**{k: v for k, v in params.items() if v}) @ensure_http_success def publish_version(self, function_name, latest_hash=None, version_desc=None, **_): params = { "FunctionName": function_name, "CodeSha256": latest_hash, # update_function_code_response['CodeSha256'], # Use to ensure matches $LATEST "Description": version_desc, } return self.client.publish_version(**{k: v for k, v in params.items() if v}) ``` #### File: PyStacks/PyStacks/recordsets.py ```python class recordsets(): def __init__(self, name, rectype, value, ttl): self.name = name self.rectype = rectype self.value = value self.ttl = ttl ``` #### File: PyStacks/PyStacks/route53.py ```python import json import pprint class route53: def __init__(self, session): self.dnsClient = session.client('route53') def waiterRecordSet(self, session, Id): waiter = self.dnsClient.get_waiter('resource_record_sets_changed') waiter.wait(Id=Id) def listRecords(self, zoneid, logging=None): recordset = [] response = self.dnsClient.list_resource_record_sets( HostedZoneId=zoneid) recordset.extend(response["ResourceRecordSets"]) while response["IsTruncated"]: response = self.dnsClient.list_resource_record_sets( HostedZoneId=zoneid, StartRecordName=response["NextRecordName"], StartRecordType=response["NextRecordType"]) recordset.extend(response["ResourceRecordSets"]) return recordset def getZoneID(self, zonename=None, logging=None): zoneids = [] response = self.dnsClient.list_hosted_zones() if logging == 'Full': pprint.pprint(response) zoneids.extend(response["HostedZones"]) while response["IsTruncated"]: response = self.dnsClient.list_hosted_zones( Marker=response["Marker"]) zoneids.extend(response["HostedZones"]) if logging: pprint.pprint(response) if zonename: for x in zoneids: if x["Name"] == zonename: return [x] else: return zoneids def getDelegationSets(self): response = self.dnsClient.list_reusable_delegation_sets() sets = response["DelegationSets"] print(json.dumps(sets)) return sets def getNameServers(self, setid): response = self.dnsClient.get_hosted_zone(Id=setid) return response["DelegationSet"]["NameServers"] def createDelegationSet(self, caller): self.dnsClient.create_reusable_delegation_set( CallerReference='string' ) def createZone(self, zonename, caller, setid): self.dnsClient.create_hosted_zone( Name=zonename, CallerReference=caller, HostedZoneConfig={'Comment': caller}, DelegationSetId=setid ) def createRecord(self, changebatch, hostzoneid, logging=None): if logging: pprint.pprint(changebatch) pprint.pprint(hostzoneid) self.dnsClient.change_resource_record_sets( HostedZoneId=hostzoneid, ChangeBatch=changebatch ) def createChangeBatch(self, batchset, action, comment, logging=None): changeset = {} if logging: pprint.pprint(batchset) recordset = json.loads(batchset) changeset["Comment"] = comment changeset["Changes"] = [] for x in recordset: temprec = {} temprec["Action"] = action temprec["ResourceRecordSet"] = x changeset["Changes"].append(temprec) if logging: pprint.pprint(changeset) return changeset ``` #### File: test/templates/test_dynamodb_tables.py ```python import unittest from PyStacks.PyStacks.template import templateCF class TestTemplate(unittest.TestCase): def test_templateCF_GenericDynamoDbTables(self): resources = { 'dynamodb_tables': { 'testTable': { 'name': 'testTable', 'attributes': [ { 'name': 'keyAttribute', 'type': 'S' } ], 'key_schema': [ { 'name': 'keyAttribute', 'type': 'HASH' }, ], 'throughput': { 'read_units': 10, 'write_units': 1 }, 'stream': { 'type': "NEW_IMAGE" }, }, }, } expected = { "testTable": { "Type": "AWS::DynamoDB::Table", "Properties": { "AttributeDefinitions": [ { "AttributeName": "keyAttribute", "AttributeType": "S" }, ], "KeySchema": [ { "AttributeName": "keyAttribute", "KeyType": "HASH" }, ], "ProvisionedThroughput": { "ReadCapacityUnits": "10", "WriteCapacityUnits": "1" }, "StreamSpecification": { "StreamViewType": "NEW_IMAGE" }, "TableName": "testTable" } } } actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) if __name__ == '__main__': unittest.main() ``` #### File: test/templates/test_elasticache.py ```python import unittest from PyStacks.PyStacks.template import templateCF class TestTemplate(unittest.TestCase): def test_templateCF_ElastiCacheSubnet(self): resources = { 'elasticachesubnet': { 'TestElastiCacheSubnet': { 'name': '<NAME>', 'description': 'Some Description...', 'subnets': [ 'testsubnet1', 'testsubnet2' ] } } } expected = { 'TestElastiCacheSubnet': { 'Type': 'AWS::ElastiCache::SubnetGroup', 'Properties': { 'CacheSubnetGroupName': 'Some Name', 'Description': 'Some Description...', 'SubnetIds': [ { "Fn::ImportValue": { "Fn::Sub": [ "${VPCStack}-Subnet-testsubnet1", { "VPCStack": { "Ref": "VPCStack" } } ] } }, { "Fn::ImportValue": { "Fn::Sub": [ "${VPCStack}-Subnet-testsubnet2", { "VPCStack": { "Ref": "VPCStack" } } ] } } ] } } } actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) def test_templateCF_ElastiCache(self): resources = { 'elasticache': { 'SimpleElastiCache': { 'name': '<NAME>', 'cacheSubnetGroup': 'subnetGroup1', 'secgroups': [ 'securityGroup1' ] } } } expected = { 'SimpleElastiCache': { 'Type': 'AWS::ElastiCache::CacheCluster', 'Properties': { 'CacheNodeType': 'cache.t2.micro', 'ClusterName': 'some redis', 'Engine': 'redis', 'NumCacheNodes': 1, 'CacheSubnetGroupName': { 'Fn::ImportValue': { 'Fn::Sub': [ '${CacheSubnetStack}-ElastiCache-subnetGroup1-Subnet', { 'CacheSubnetStack': { 'Ref': 'CacheSubnetStack' } } ] } }, 'VpcSecurityGroupIds': [ { 'Fn::ImportValue': { 'Fn::Sub': [ '${SecurityStack}-SecGroup-securityGroup1', { 'SecurityStack': { 'Ref': 'SecurityStack' } } ] } } ] } } } actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) if __name__ == '__main__': unittest.main() ``` #### File: test/templates/test_elasticsearch.py ```python import unittest from PyStacks.PyStacks.template import templateCF class TestTemplate(unittest.TestCase): def test_templateCF_ElasticSearch(self): self.maxDiff = None resources = { "elasticsearch": { "ElasticSearchTest": { "version": 5.5, "dedicatedmaster": True, "instancecount": 4, "instancetype": "m4.large.elasticsearch", "mastertype": "m4.large.elasticsearch", "mastercount": 2, "zoneid": "testaws", "zonesuffix": "test.aws", "ebsoptions": { "iops": 0, "size": 60, "type": "gp2" }, "snapshotoptions": { "AutomatedSnapshotStartHour": 0 }, "advancedoptions": { "rest.action.multi.allow_explicit_index": "true" }, "policy": { "Action": "*", "Effect": "Allow", "Resource": "*" } } } } expected = { "ElasticSearchTest": { "Properties": { "AccessPolicies": { "Statement": [ { "Action": "*", "Effect": "Allow", "Resource": "*" } ], "Version": "2012-10-17" }, "AdvancedOptions": { "rest.action.multi.allow_explicit_index": "true" }, "DomainName": "ElasticSearchTest", "EBSOptions": { "EBSEnabled": "true", "Iops": 0, "VolumeSize": 60, "VolumeType": "gp2" }, "ElasticsearchClusterConfig": { "DedicatedMasterCount": "2", "DedicatedMasterEnabled": "true", "DedicatedMasterType": "m4.large.elasticsearch", "InstanceCount": "4", "InstanceType": "m4.large.elasticsearch", "ZoneAwarenessEnabled": "true" }, "ElasticsearchVersion": "5.5", "SnapshotOptions": { "AutomatedSnapshotStartHour": "0" } }, "Type": "AWS::Elasticsearch::Domain" }, "ElasticSearchTestDNS": { "Properties": { "Comment": "ElasticSearchTest Records by default", "HostedZoneId": { "Fn::ImportValue": { "Fn::Sub": [ "${DNSStack}-Route53-testaws-Zone", { "DNSStack": { "Ref": "DNSStack" } } ] } }, "RecordSets": [ { "Name": "ElasticSearchTest.es.test.aws", "ResourceRecords": [ { "Fn::GetAtt": [ "ElasticSearchTest", "DomainEndpoint" ] } ], "SetIdentifier": "ElasticSearchTest.es.test.aws", "TTL": "60", "Type": "CNAME", "Weight": "10" } ] }, "Type": "AWS::Route53::RecordSetGroup" } } actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) if __name__ == '__main__': unittest.main() ``` #### File: test/templates/test_s3.py ```python import unittest from PyStacks.PyStacks.template import templateCF class TestTemplate(unittest.TestCase): def test_templateCF_S3(self): resources = { 's3': { 'S3Bucket': { 'name': 'stuff.holder', 'accesscontrol': 'PublicRead', 'versioning': True, 'tags': { 'Name': 'Api' }, 'notices': { 'lamda': [{ 'event': 's3:ObjectCreated:*', 'function': 'somelambdaarn' }] } } }, 's3_policies': { 'S3BucketPolicies': { 'policy': '"what": "on earth"' } } } expected = { 'S3BucketPolicies': { 'Type': 'AWS::S3::BucketPolicy', 'Properties': { 'what': 'on earth' } }, 'S3Bucket': { 'Type': 'AWS::S3::Bucket', 'Properties': { 'AccessControl': 'PublicRead', 'NotificationConfiguration': { 'LambdaConfigurations': [ { 'Event': 's3:ObjectCreated:*', 'Function': 'somelambdaarn' } ] }, 'VersioningConfiguration': { 'Status': 'Enabled' }, 'BucketName': 'stuff.holder', 'Tags': [ { 'Key': 'Name', 'Value': 'Api' } ] } } } self.maxDiff = 'None' actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) if __name__ == '__main__': unittest.main() ``` #### File: test/templates/test_sqs.py ```python import unittest from PyStacks.PyStacks.template import templateCF class TestTemplate(unittest.TestCase): def test_templateCF_SQS(self): resources = { 'sqs': { 'testQueue': { 'name': 'testQueueName', 'delay': 20, 'maxsize': 10, 'retention': 1024, 'waittime': 30, 'redirectpolicy': { 'deadletterqueue': 'abc', 'count': 3 }, 'visibilitytimeout': 60 } } } expected = { "testQueue": { "Type": "AWS::SQS::Queue", "Properties": { "DelaySeconds": "20", "MaximumMessageSize": "10", "MessageRetentionPeriod": "1024", "QueueName": "testQueueName", "ReceiveMessageWaitTimeSeconds": "30", "RedrivePolicy": { "deadLetterTargetArn": {"Fn::GetAtt": ["abc", "Arn"]}, "maxReceiveCount": "3" }, "VisibilityTimeout": "60" } } } actual = templateCF(resources, 'resources') self.assertDictEqual(actual, expected) if __name__ == '__main__': unittest.main() ``` #### File: PyStacks/test/test_iot.py ```python import unittest from mock import MagicMock from PyStacks.PyStacks.iot import IoTAPI class TestIot(unittest.TestCase): def test_create_thing(self): pass def test_create_keys_and_certificate(self): pass def test_attach_thing_principal(self): pass def test_attach_policy(self): pass ```
{ "source": "0xADE1A1DE/Rositaplusplus", "score": 2 }
#### File: Rositaplusplus/ROSITAPP/Config.py ```python import os class CFG: cfg_leak_mark = "*" cfg_keep_orginal = False cfg_mod_line_end_marker = "\t\t\t@ edited" cfg_leak_search_threads = 4 cfg_monte_carlo_reps = 50 cfg_monte_carlo_std_multplier = 3 cfg_nprops = 28 cwd = "" ELMO = os.getenv('ELMO_DIR') if ELMO is None: print('Set $ELMO_DIR!') exit(-1) TOOLCHAIN = os.getenv('TOOLCHAIN_DIR') if TOOLCHAIN is None: print('Set $TOOLCHAIN_DIR!') exit(-1) elmo_asm_output = ELMO+"/test/output/asmoutput/asmtrace00001.txt" elmo_tvalues = ELMO+"/test/output/fixedvsrandomtstatistics.txt" elmo_tvalues_biv = ELMO+"/test/main/power-biv-fottest.npy" elmo_tvalues_biv_single = ELMO+"/test/power-biv-fottest.npy" elmo_rand_tvalues_biv = ELMO+"/test/rand/power-biv-fottest.npy" elmo_term_dump = ELMO+"/test/main/terms-dump.npy" elmo_rand_term_dump = ELMO+"/test/rand/terms-dump.npy" elmo_ptvalues = ELMO+"/test/output/fixedvsrandompropttest.txt" elmo_meta = ELMO+"/test/output/meta.json" elmo_cwd = ELMO+"/test" elmo_command = ELMO+"/elmo" cc = TOOLCHAIN+"/arm-none-eabi-gcc" asm = TOOLCHAIN+"/arm-none-eabi-as" objdump = TOOLCHAIN+"/arm-none-eabi-objdump" objcopy = TOOLCHAIN+"/arm-none-eabi-objcopy" readelf = TOOLCHAIN+"/arm-none-eabi-readelf" ld = TOOLCHAIN+"/arm-none-eabi-gcc" def __init__(self): if os.getenv("ASM_EDITOR_CWD"): self.cwd = os.getenv("ASM_EDITOR_CWD") if not os.path.exists(self.cwd): raise FileNotFoundError(self.cwd) self.elmo_tvalues = self.cwd + "/../ttest/ttesttestvectors.txt" self.elmo_asm_output = self.cwd + "/../asmoutput/asmtrace00001.txt" __g_cfg_inst = CFG() def get_config(): return __g_cfg_inst ``` #### File: Rositaplusplus/ROSITAPP/ELMO.py ```python from subprocess import check_call import Config import DeviceContext import ELMOVisitor import ASMParser import ARMParserMode import Factory class VisitException(BaseException): def __init__(self, line, lineno): pass class CoverageModelException(BaseException): pass class Interop: def run(self, binfile, args): cfg = Config.get_config() argslist = [cfg.elmo_command, '-analysis', args.elmo_analysis] if args.elmo_output_dir != "": argslist += ['-outputdir', args.elmo_output_dir] argslist += [binfile] check_call(argslist, cwd=cfg.elmo_cwd) class ExecutionTracer: asm_trace_lines = None asm_inst_lines = None def _get_debug_info(self, elmo_asm_line): ret = self.ib.build_from_elmoout(elmo_asm_line) if ret: return self.bin_info.find_source_line_at(int(ret.get_addr(), 16)) return None def find_inst_idx(self, insts, inst): idx = 0 for z in insts: if z == inst: return idx idx += 1 return None def run(self, visitor, trigs): # match starttrigger point with binary's start addrs = -1 idx = 0 exec_cycles = 0 for line in self.asm_trace_lines: if self.ib.isstarttrig(line): gg = self.ib.build_from_elmoout(line) addrs = int(gg.get_addr(), 16) idx +=1 break idx += 1 if addrs == -1: raise CoverageModelException() # get func addrs and name #faddrs, fname = self.bin_info.find_func_called_from(addrs) asm_trace_idx = idx # get corresponding function from assembly file lines asm_inst_idx = -1 # fname = ":" # for trig in trigs: # idx = 0 # for line in trig.get_lines(): # func_name = line.get_func_name() # print(line) # if func_name == fname: # asm_inst_idx = idx # self.asm_inst_lines = trig.get_lines() # break; # idx += 1 asm_inst_idx = 2 self.asm_inst_lines = trigs[0].get_lines() if asm_inst_idx == -1: raise CoverageModelException() i = asm_trace_idx j = asm_inst_idx - 2 #cycles_left = 0 for tlin in self.asm_trace_lines: tlin = tlin.strip() stack = [] while i < len(self.asm_trace_lines) -1: if self.ib.iscall(self.asm_trace_lines[i]): call_addr = self.ib.build_from_elmoout(self.asm_trace_lines[i]).get_addr() # get function name and addrs from the binary faddrs, fname = self.bin_info.find_func_called_from(int(call_addr, 16)) # if endtrigger is reached stop if fname == 'endtrigger': break if ASMParser.ASMFuncDefs.islibrary(fname): stack.append((j-1,self.asm_inst_lines)) # set new source asmfile = ASMParser.ASMFile("",ARMParserMode.ARMMode()) self.asm_inst_lines = [ASMParser.ASMInst("",0,asmfile)]*1024 # jump to function start j = 0 # elmo outputs ttwo lines per func i += 1 else: # get fsunction source func = self.fm.get()[fname] stack.append((j,self.asm_inst_lines)) # set new source self.asm_inst_lines = func.lines # jump to function start j = func.start - 1 # elmo outputs ttwo lines per func i += 1 # the source inst list also includes labels within it, ignore them while j < len(self.asm_inst_lines) and self.asm_inst_lines[j].is_label(): j += 1 visitor.on_visit(self, exec_cycles, self.statreader, self.asm_inst_lines, j, self.asm_trace_lines, i) # check for jumps jmploc = self.ib.isjump(self.asm_trace_lines[i], self.asm_trace_lines[i+1]) # cycle accuracy is turned off in ELMO because of discrepancies # http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0432c/CHDCICDF.html if jmploc: file = self.asm_inst_lines[j].get_file() jump_name = self.asm_inst_lines[j].get_jump_name() inst = self.labels[file][jump_name] # jump temp = self.find_inst_idx(self.insts_by_file[file], inst) # assuming that the next line is not a jump too next_inst = self.insts_by_file[file][temp+1] # -1 is compensation for j+=1 below, which would happen every loop j = self.find_inst_idx(self.asm_inst_lines, next_inst) - 1 # remove multiple instructions after push and pop (elmo adds these) # to show individual push and pop operations if self.ib.ispush(self.asm_trace_lines[i]): exec_cycles += 2 if self.asm_trace_lines[i+1].startswith('r'): i += 1 while self.asm_trace_lines[i].startswith('r'): i += 1 exec_cycles += 2 # recover last line i -= 2 else: j += 1 elif self.ib.ispop(self.asm_trace_lines[i]): exec_cycles += 2 if self.asm_trace_lines[i+1].startswith('r'): i += 1 while self.asm_trace_lines[i].startswith('r'): i += 1 exec_cycles += 2 # recover last line i -= 2 else: j += 1 else: if not self.asm_trace_lines[i].startswith('r'): inst = self.ib.build_from_elmoout(self.asm_trace_lines[i]) if inst is None: raise VisitException(self.asm_trace_lines[i], i) # calc executed cycles exec_cycles += self.dc.get_cyclecounter().get_cycle_count(inst) j += 1 if len(stack) > 0: F = self.asm_trace_lines[i+1].find("pop") != -1 and self.asm_trace_lines[i+1].find("pc") != -1 if self.asm_inst_lines[j].is_func_end() or F: self.asm_inst_lines = stack[-1][1] j = stack[-1][0] stack.pop(-1) i += 1 visitor.on_visiting_done() def __init__(self, dc: DeviceContext, elmo_asm_output, elmo_tvalues, elmo_ptvalues, bin_info, fm, trigs, labels, insts_by_file, args, xprint=False): fasm = open(elmo_asm_output, "r") self.asm_trace_lines = list(fasm.readlines()) fasm.close() self.statreader = Factory.build_stat_reader(elmo_ptvalues) self.insts_by_file = insts_by_file self.asm_trace_lines = list(map(str.strip, self.asm_trace_lines)) self.bin_info = bin_info self.fm = fm self.labels = labels self.dc = dc self.ib = dc.get_instbuilder() if xprint: self.run(ELMOVisitor.Printer(), trigs) ``` #### File: Rositaplusplus/ROSITAPP/LeakyTermSearcher.py ```python import TermSearcher import TraceNpy as tf import LeakCheck import Utils import Config import Logger import Factory import numpy as np import random import time import os import sys from functools import reduce def norm(x): return x - np.mean(x) # tvalue for difference between two distributions # and const relative mean # x - values from fixed vs. random test # D - mean difference from random vs. random test # v - variance of values from the random set def tvaluediff(x, D, v): n=np.shape(x)[0] y=x[n//2:] x=x[:n//2] vx = np.var(x) vy = np.var(y) Xc1 = D + np.sqrt(v) * 6/np.sqrt(n//2) Xc0 = D - np.sqrt(v) * 6/np.sqrt(n//2) mx= np.mean(x) my= np.mean(y) #print('hhh:', t0, pv0, pn0, t1, pv1, pn1, pv) t0 = np.sqrt(n//2) * ((mx - my) - Xc0) / np.sqrt(vx + vy) t1 = np.sqrt(n//2) * (Xc1 - (mx -my)) / np.sqrt(vx + vy) return t0, t1 def meandiff(x): n=np.shape(x)[0] y=x[n//2:] x=x[:n//2] return (np.mean(x) - np.mean(y), np.var(x-y)) def tvalue(x): n=np.shape(x)[0] y=x[n//2:] x=x[:n//2] vx = np.var(x) vy = np.var(y) t = np.sqrt(n//2) * (np.mean(x)-np.mean(y))/( np.sqrt(vx+vy) ) V1 = vx / vy V1 *= V1 V2 = vy / vx V2 *= V2 df = (n//2 -1) * (1 + 2 / (V1 + V2) ) return (t, df) def setsub(a, b): return list(set(a) - set(b)) def defaulttermset(): termset = list(range(0, 22)) termset.extend(list(range(24, Config.get_config().cfg_nprops))) return termset def pickterms(sampleterms, termset): return np.sum(sampleterms[:, termset], axis=1) def get_interact_samples(sampleterms, termsets, sampleinds): interactsamples = norm(pickterms(sampleterms[0], termsets[0])) for i in range(1, len(sampleinds)): interactsamples = np.multiply(interactsamples, norm(pickterms(sampleterms[i], termsets[i]))) return interactsamples class LeakyTermSearcher(TermSearcher.TermSearcher): def __init__(self): self.reader = Factory.build_stat_reader(None) self.tftraces = tf.TraceNpy() self.tftraces.open(Config.get_config().elmo_term_dump) self.tfrandtraces = tf.TraceNpy() self.tfrandtraces.open(Config.get_config().elmo_rand_term_dump) self.results = {} # cleanup self.ms = [ 'C','PI','SI','O1', #3 'O2','B1','B2','WP1', #7 'WP2','DP1','DP2','WS1', #11 'WS2','DS1','DS2','Op1I', #15 'Op2I','B1I','B2I','L', #19 'S', 'Op1Op2', 'Op1', 'Op2', #23 'E', 'M', 'Op1*Op2', 'Op2*Op1'] self.ns = self.tftraces.get_nsamples() self.nt = self.tftraces.get_ntraces() self.nterms = self.tftraces.get_nterms() self.traces = self.tftraces.get_traces() self.randtraces = self.tfrandtraces.get_traces() self.is_transposed = self.tftraces.is_transposed() self.TH = LeakCheck.tvaluethres(Utils.nc2(self.ns)) self.tost_runs = 0 self.monte_runs = 0 print('TH', self.TH) print('ns', self.ns) print('nt', self.nt) print('nterms', self.nterms) def loadsampledata(self, sampleinds, traces): self.sampleterms = [] print("sampleinds::", sampleinds) for s in range(0, len(sampleinds)): self.sampleterms += [np.empty((self.nt,self.nterms), dtype=np.float32)] for sampleindind in range(0,len(sampleinds)): sampleterm = self.sampleterms[sampleindind] sampleind = sampleinds[sampleindind] for t in range(0, self.nterms): if self.is_transposed: sampleterm[:,t] = np.transpose( traces[self.ns*t + sampleind, :] ) else: sampleterm[:,t] = traces[:,self.ns*t + sampleind] def run(self, sampleindslist): retmap = {} # st: sample index tuple for s, st in enumerate(sampleindslist): print("info: ", s, st) self.loadsampledata(st, self.randtraces) startt = time.perf_counter() self.process_rand_dist(st) print("ttt", time.perf_counter()-startt) self.loadsampledata(st, self.traces) res = self.run_tost(st) #res = self.run_montecarlo(st) if len(res) == 0: print("search failed for", st) continue for i, sampleind in enumerate(st): print("sss",res) mstrset = set(map(lambda a: self.ms[a], res[i])) print('info: ext', st, sampleind, mstrset) if sampleind in retmap: retmap[sampleind].update(mstrset) else: retmap[sampleind] = mstrset print("retmap", retmap) return retmap def run_tost(self, sampleinds): print('sample inds',sampleinds) termsets = [defaulttermset().copy()] * len(sampleinds) termset = defaulttermset().copy() nzterms = len(termsets[0]) termsets_o = termsets.copy() relations = [] print(sampleinds) for s, sampleind in enumerate(sampleinds): somerelation = [] tvals = [] for t in range(0, nzterms): termsets = termsets_o.copy() termsets[s] = setsub(termset,[termset[t]]) meandiff, v = self.meandiffs[(sampleind,t)] interactsamples = get_interact_samples(self.sampleterms, termsets, sampleinds) t0, t1 = tvaluediff(interactsamples, meandiff, v) # One side t-distribution t values for 0.00001 confidence if t0 > 4.5 and t1 > 4.5: somerelation += [termset[t]] #tvals += [(tvalue0, tvalue1)] relations += [somerelation] if len(somerelation) > 5: Logger.log_info("sample inds:", sampleinds, "high rel", tvals, self.reader.gettval(sampleind)) if reduce(lambda x, y: x+len(y), relations, 0) == 0: return self.run_montecarlo(sampleinds) self.tost_runs += 1 print("rel",relations) return relations def process_rand_dist(self, sampleinds): print('rand sample inds',sampleinds) termsets = [defaulttermset().copy()] * len(sampleinds) termset = defaulttermset().copy() nzterms = len(termsets[0]) termsets_o = termsets.copy() # collect mean differences and variances for random value set self.meandiffs = {} for s, sampleind in enumerate(sampleinds): for t in range(0, nzterms): termsets = termsets_o.copy() termsets[s] = setsub(termset,[termset[t]]) interactsamples = get_interact_samples(self.sampleterms, termsets, sampleinds) self.meandiffs[(sampleind,t)] = meandiff(interactsamples) print('mean diff:',(sampleind,t),self.meandiffs[(sampleind,t)]) # run Monte Carlo simulation for finding leaky terms where # there exists more than two leakages a sample set def run_montecarlo(self, sampleinds): termsets = [defaulttermset().copy()] * len(sampleinds) npdefterms = np.asarray(defaulttermset().copy()) nzterms = len(termsets[0]) bins = [ np.zeros((nzterms), dtype=np.int32), np.zeros((nzterms), dtype=np.int32) ] totruns = 0 leakyruns = 0 ret = [[].copy()] * len(sampleinds) while totruns < Config.get_config().cfg_monte_carlo_reps: newtermsets = [] newtermsetsidx = [] for ts in termsets: newts = random.sample(list(enumerate(ts)), k=len(ts)//2) newtermsets += [list(map(lambda x : x[1], newts))] newtermsetsidx += [list(map(lambda x : x[0], newts))] tvalue, df = self.calc_tvalue_termsets(newtermsets, sampleinds) if self.TH < np.abs(tvalue): #np.abs(tvalue) > 4.5: for idx in newtermsetsidx[0]: bins[0][idx] += 1 for idx in newtermsetsidx[1]: bins[1][idx] += 1 #if totruns > 400: # plt.plot(kk) # plt.show() #print(leakyruns,totruns) leakyruns += 1 totruns += 1 ret = [[].copy()] * len(sampleinds) for i,b in enumerate(bins): tup = np.nonzero(np.abs(b - np.mean(b)) > Config.get_config().cfg_monte_carlo_std_multplier*np.std(b)) ret[i] = npdefterms[np.asarray(tup[0])] if reduce(lambda x, y: x+len(y), ret, 0) > 0: self.monte_runs += 1 #plt.plot(vals) #plt.show() print("info monte:", ret) return ret def get_stats(self): return {'monte': self.monte_runs, 'tost': self.tost_runs, 'total': (self.tost_runs + self.monte_runs)} # tvalues for termsets from each sample point # each sample point at the code has some number of terms (self.nterms) the # sum of these are the emulated differential voltage at that point def calc_tvalue_termsets(self, termsets, sampleinds): interactsamples = get_interact_samples(self.sampleterms, termsets, sampleinds) return tvalue(interactsamples) ``` #### File: Rositaplusplus/ROSITAPP/LeakyTermSearchWorker.py ```python import Meta import Args import MarkerListFO import TermSearcher import LeakyTermSearcher import Utils import Config import Logger from collections import defaultdict import numpy as np import random from multiprocessing import Process, Queue from queue import Empty import pickle import os import time import sys import pprint def p_run(qin: Queue, qout: Queue, con: Queue): obj = LeakyTermSearcher.LeakyTermSearcher() while con.empty(): try: arg = qin.get(False) startt = time.perf_counter() ret = obj.run(arg) endt = time.perf_counter() print('info: ret',ret, 'time:', endt - startt) qout.put((tuple(arg),ret)) except Empty: time.sleep(1) pass print("joining", obj.get_stats()) Logger.log_info("stat:", obj.get_stats()) class LeakyTermSearchWorkerSave(TermSearcher.TermSearcher): def __init__(self): self.qin = Queue() self.qout = Queue() self.con = Queue() self.p = [] self.sentset = set([]) for i in range(0, Config.get_config().cfg_leak_search_threads): self.p += [Process(target=p_run, args=(self.qin,self.qout,self.con))] print("info: started work on %d"%(i)) self.p[i].daemon = True self.p[i].start() def run(self, sampleindslist): tt = tuple(sampleindslist) if tt not in self.sentset: self.qin.put(sampleindslist) self.sentset.add(tt) # no result can be provided at this moment as # the worker threads are loaded, so return {} and # wait for results to be written to file # only ./rosita-$suffix -d -m <bc> gives correct # output return MarkerListFO.MarkerListFO(set([])) def run_once(self, sampleinds): return [] def finit(self): print("info: main thread waiting") dumplist = {} while len(self.sentset) != 0: try: ret = self.qout.get(block=False) dumplist[ret[0]] = ret[1] self.sentset.remove(tuple(ret[0])) except Empty: time.sleep(2) fnobjs = Config.get_config().cwd + "/objs.pickled" if os._exists(fnobjs): Utils.backupfile(fnobjs) fout = open(fnobjs, "wb") pickle.dump(dumplist,fout) fout.close() ftout = open("objs.txt","w") pprint.pprint(dumplist, ftout) ftout.close() print("info: output written") self.con.put(None) for i in range(0, Config.get_config().cfg_leak_search_threads): print("info: %d joined"%(i)) self.p[i].join() class LeakyTermSearchWorkerLoad(TermSearcher.TermSearcher): def __init__(self): self.dummy = True objsname = Config.get_config().cwd + "/objs.pickled" self.marker_map_by_inst = defaultdict(set) if os.path.exists(objsname): fin = open(objsname,"rb") self.marker_map = pickle.load(fin) print('info c:', self.marker_map) self.dummy = False # merge all results for key, smap in self.marker_map.items(): for skey, sval in smap.items(): self.marker_map_by_inst[skey].update(sval) #else: # raise FileNotFoundError(objsname) def run(self, sampleindslist): if self.dummy == False: try: return MarkerListFO.MarkerListFO(self.marker_map_by_inst[sampleindslist[0][0]]) except: print('not found:', tuple(sampleindslist), file=sys.stderr) print(self.marker_map, file=sys.stderr) exit(-1) return MarkerListFO.MarkerListFO(set()) def run_once(self, sampleinds): return [] class LeakyTermSearchWorker(TermSearcher.TermSearcher): def __init__(self): self.wrapped = None if Args.get_args().get().save: self.wrapped = LeakyTermSearchWorkerSave() else: self.wrapped = LeakyTermSearchWorkerLoad() def run(self, sampleindslist): return self.wrapped.run(sampleindslist) def run_once(self, sampleinds): return self.wrapped.run_once(sampleinds) def finit(self): return self.wrapped.finit() ``` #### File: Rositaplusplus/ROSITAPP/MarkerListFO.py ```python import MarkerList class MarkerListFO(MarkerList.MarkerList): def __init__(self, markerset): self.markerset = set(markerset) def find(self, marker: str) -> int: if marker in self.markerset: return 1 else: return -1 def intersection(self, markers): return self.markerset.intersection(markers) def makeset(self): return self.markerset def isempty(self): return len(self.markerset) == 0 def __str__(self): return str(self.markerset) ``` #### File: Rositaplusplus/ROSITAPP/StatAnalyzer.py ```python import Config import MarkerListFO class StatAnalyzer: def __init__(self): self.indexes = [ 15, 19, 20, 21, 22, 23, 24, 25] self.markers = ['Op1I','L', 'S', 'Op1Op2', 'Op1', 'Op2', 'E','M'] def is_leaky(self, ptvals): for i in range(0, len(self.indexes)): if abs(ptvals[self.indexes[i]]) > 4.5: return True return False def get_markers(self, ptvals): markers = set([]) for i in range(0, len(self.indexes)): if abs(ptvals[self.indexes[i]]) > 4.5: markers.add(self.markers[i]) return MarkerListFO.MarkerListFO(markers) ``` #### File: xoodoo/tests/testuptochi.py ```python import struct class statemap(): def __init__(self, state): self.state = state def __getitem__(self, key): return self.state[key[0]*4 + key[1]] def __setitem__(self, key, value): self.state[key[0]*4 + key[1]] = value def __str__(self): rep = "" for i in range(0, 3): for j in range(0, 4): rep += hex(self.state[4*i + j]) + "," rep += "\n" return rep def rotl(v, sh): shmask = (2**sh-1) << (32-sh) shmaskr = (2**(32-sh)-1) return ((v & shmask) >> (32 - sh))| ((v & shmaskr) << sh) def runuptochi(a,b,c): va1=0 va2=0 vb1=0 vb2=0 vc1=0 vc2=0 a[0,0] ^= b[0,0] ^ c[0,0] a[0,1] ^= b[0,1] ^ c[0,1] a[0,2] ^= b[0,2] ^ c[0,2] a[0,3] ^= b[0,3] ^ c[0,3] a[1,0] ^= b[1,0] ^ c[1,0] a[1,1] ^= b[1,1] ^ c[1,1] a[1,2] ^= b[1,2] ^ c[1,2] a[1,3] ^= b[1,3] ^ c[1,3] a[2,0] ^= b[2,0] ^ c[2,0] a[2,1] ^= b[2,1] ^ c[2,1] a[2,2] ^= b[2,2] ^ c[2,2] a[2,3] ^= b[2,3] ^ c[2,3] va1 = a[0,3] ^ a[1,3] ^ a[2,3] va2 = a[0,0] ^ a[1,0] ^ a[2,0] va1 = rotl(va1, 5) ^ rotl(va1, 14) # va1 = (((va1) << 5) ^ # ((va1) >> 27)) ^ # (((va1) << 14) ^ # ((va1) >> 18)) a[0,0] ^= va1 a[1,0] ^= va1 a[2,0] ^= va1 va1 = a[0,1] ^ a[1,1] ^ a[2,1] va2 = rotl(va2, 5) ^ rotl(va2, 14) # va2 = (((va2) << 5) ^ # ((va2) >> 27)) ^ # (((va2) << 14) ^ # ((va2) >> 18)) a[0,1] ^= va2 a[1,1] ^= va2 a[2,1] ^= va2 va2 = a[0,2] ^ a[1,2] ^ a[2,2] va1 = rotl(va1, 5) ^ rotl(va1, 14) # va1 = (((va1) << 5) ^ # ((va1) >> 27)) ^ # (((va1) << 14) ^ # ((va1) >> 18)) a[0,2] ^= va1 a[1,2] ^= va1 a[2,2] ^= va1 va2 = rotl(va2, 5) ^ rotl(va2, 14) # va2 = (((va2) << 5) ^ # ((va2) >> 27)) ^ # (((va2) << 14) ^ # ((va2) >> 18)) a[0,3] ^= va2 a[1,3] ^= va2 a[2,3] ^= va2 vb1 = b[0,3] ^ b[1,3] ^ b[2,3] vb2 = b[0,0] ^ b[1,0] ^ b[2,0] vb1 = rotl(vb1, 5) ^ rotl(vb1, 14) # vb1 = (((vb1) << 5) ^ # ((vb1) >> 27)) ^ # (((vb1) << 14) ^ # ((vb1) >> 18)) b[0,0] ^= vb1 b[1,0] ^= vb1 b[2,0] ^= vb1 vb1 = b[0,1] ^ b[1,1] ^ b[2,1] vb2 = rotl(vb2, 5) ^ rotl(vb2, 14) # vb2 = (((vb2) << 5) ^ # ((vb2) >> 27)) ^ # (((vb2) << 14) ^ # ((vb2) >> 18)) b[0,1] ^= vb2 b[1,1] ^= vb2 b[2,1] ^= vb2 vb2 = b[0,2] ^ b[1,2] ^ b[2,2] vb1 = rotl(vb1, 5) ^ rotl(vb1, 14) # vb1 = (((vb1) << 5) ^ # ((vb1) >> 27)) ^ # (((vb1) << 14) ^ # ((vb1) >> 18)) b[0,2] ^= vb1 b[1,2] ^= vb1 b[2,2] ^= vb1 vb2 = rotl(vb2, 5) ^ rotl(vb2, 14) # vb2 = (((vb2) << 5) ^ # ((vb2) >> 27)) ^ # (((vb2) << 14) ^ # ((vb2) >> 18)) b[0,3] ^= vb2 b[1,3] ^= vb2 b[2,3] ^= vb2 vc1 = c[0,3] ^ c[1,3] ^ c[2,3] vc2 = c[0,0] ^ c[1,0] ^ c[2,0] vc1 = rotl(vc1, 5) ^ rotl(vc1, 14) # vc1 = (((vc1) << 5) ^ # ((vc1) >> 27)) ^ # (((vc1) << 14) ^ # ((vc1) >> 18)) c[0,0] ^= vc1 c[1,0] ^= vc1 c[2,0] ^= vc1 vc1 = c[0,1] ^ c[1,1] ^ c[2,1] vc2 = rotl(vc2, 5) ^ rotl(vc2, 14) # vc2 = (((vc2) << 5) ^ # ((vc2) >> 27)) ^ # (((vc2) << 14) ^ # ((vc2) >> 18)) c[0,1] ^= vc2 c[1,1] ^= vc2 c[2,1] ^= vc2 vc2 = c[0,2] ^ c[1,2] ^ c[2,2] vc1 = rotl(vc1, 5) ^ rotl(vc1, 14) # vc1 = (((vc1) << 5) ^ # ((vc1) >> 27)) ^ # (((vc1) << 14) ^ # ((vc1) >> 18)) c[0,2] ^= vc1 c[1,2] ^= vc1 c[2,2] ^= vc1 vc2 = rotl(vc2, 5) ^ rotl(vc2, 14) # vc2 = (((vc2) << 5) ^ # ((vc2) >> 27)) ^ # (((vc2) << 14) ^ # ((vc2) >> 18)) c[0,3] ^= vc2 c[1,3] ^= vc2 c[2,3] ^= vc2 a[2,0] = rotl(a[2,0], 11) # a[2,0] = (((a[2,0]) << 11) ^ # ((a[2,0]) >> 21)) a[2,1] = rotl(a[2,1], 11) # a[2,1] = (((a[2,1]) << 11) ^ # ((a[2,1]) >> 21)) a[2,2] = rotl(a[2,2], 11) # a[2,2] = (((a[2,2]) << 11) ^ # ((a[2,2]) >> 21)) a[2,3] = rotl(a[2,3], 11) # a[2,3] = (((a[2,3]) << 11) ^ # ((a[2,3]) >> 21)) va1 = a[1,3] a[1,3] = a[1,2] a[1,2] = a[1,1] a[1,1] = a[1,0] a[1,0] = va1 b[2,0] = rotl(b[2,0], 11) # b[2,0] = (((b[2,0]) << 11) ^ # ((b[2,0]) >> 21)) b[2,1] = rotl(b[2,1], 11) # b[2,1] = (((b[2,1]) << 11) ^ # ((b[2,1]) >> 21)) b[2,2] = rotl(b[2,2], 11) # b[2,2] = (((b[2,2]) << 11) ^ # ((b[2,2]) >> 21)) b[2,3] = rotl(b[2,3], 11) # b[2,3] = (((b[2,3]) << 11) ^ # ((b[2,3]) >> 21)) vb1 = b[1,3] b[1,3] = b[1,2] b[1,2] = b[1,1] b[1,1] = b[1,0] b[1,0] = vb1 c[2,0] = rotl(c[2,0], 11) # c[2,0] = (((c[2,0]) << 11) ^ # ((c[2,0]) >> 21)) c[2,1] = rotl(c[2,1], 11) # c[2,1] = (((c[2,1]) << 11) ^ # ((c[2,1]) >> 21)) c[2,2] = rotl(c[2,2], 11) # c[2,2] = (((c[2,2]) << 11) ^ # ((c[2,2]) >> 21)) c[2,3] = rotl(c[2,3], 11) # c[2,3] = (((c[2,3]) << 11) ^ # ((c[2,3]) >> 21)) vc1 = c[1,3] c[1,3] = c[1,2] c[1,2] = c[1,1] c[1,1] = c[1,0] c[1,0] = vc1 a[0,0] ^= 0x00000058 return def init_state(state): state[0] = 0xa144f6af; state[1] = 0x0a09bef3; state[2] = 0xf69b27da; state[3] = 0xebf1aa2f; state[4] = 0x79ec427d; state[5] = 0x394b82c3; state[6] = 0x15d52030; state[7] = 0xe3d85ac4; state[8] = 0x661b20a0; state[9] = 0x01fc8349; state[10] = 0x76868f25; state[11] = 0x98fcda2a; def state_absorb(state, inp8): for i in range(0, 4): state[i]=state[i] ^ struct.unpack("I",inp8[4*i:4*i+4])[0] def init_masks(masks, inp8): for i in range(0, 12): masks[i]=struct.unpack("I",inp8[4*i:4*i+4])[0] def share(inp8, masks8): state = [0] * 12 statemasks = [0] * 12 init_state(state) state_absorb(state, inp8[0:16]) masks0 = [0] * 12 init_masks(masks0, masks8[0:48]) masks1 = [0] * 12 init_masks(masks1, masks8[48:96]) a=statemap(state) b=statemap(masks0) c=statemap(masks1) runuptochi(a,b,c) return a,b,c a,b,c = share(bytes.fromhex('33445566225588993344556622558899'),bytes.fromhex('00'*96)) print(a) ```
{ "source": "0xADE1A1DE/Rosita", "score": 2 }
#### File: Rosita/ROSITA/ARMCycleCounterImpl.py ```python def check_inst(inst, mask, match): return inst & mask == match def cycles_push(inst): if check_inst(inst, 0b1111111000000000, 0b1011010000000000): return 1 + bin(inst & 0x00ff).count('1') return -1 def cycles_pop(inst): # Format 14: push/pop registers if check_inst(inst, 0b1111111000000000, 0b1011110000000000): return 1 + bin(inst & 0x00ff).count('1') return -1 def cycles_pop_pc(inst): # Format 14: push/pop registers if check_inst(inst, 0b1111111100000000, 0b1011110100000000): return 4 + bin(inst & 0x00ff).count('1') return -1 def cycles_add(inst): # Format 2: add/subtract if check_inst(inst, 0b1111101000000000, 0b0001100000000000): return 1 return -1 def cycles_add_pc(inst): return -1 def cycles_rot(inst): # Format 4 if check_inst(inst, 0b1111111111000000, 0b0100000111000000): return 1 return -1 def cycles_ldr(inst): # Format 7 # Format 9 if check_inst(inst, 0b1111101000000000, 0b0101100000000000) or \ check_inst(inst, 0b1110100000000000, 0b0110100000000000): return 2 return -1 def cycles_str(inst): # Format 7 # Format 9 if check_inst(inst, 0b1111101000000000, 0b0101000000000000) or \ check_inst(inst, 0b1110100000000000, 0b0110000000000000): return 2 return -1 def cycles_mov(inst): # Format 1: move shifted register # Format 3: move/compare/add/subtract immediate # Format 5: Hi register operations/branch exchange if check_inst(inst, 0b1111111111000000, 0b0000000000000000) or \ check_inst(inst, 0b1111100000000000, 0b0010000000000000) or \ check_inst(inst, 0b1111111100000000, 0b0100011000000000): return 1 return -1 def cycles_mov_pc(inst): # Format 5: dest = pc if check_inst(inst, 0b1111111101000111, 0b0100011001000111): return 3 return -1 __cycle_counts = [ [ cycles_mov, cycles_mov_pc ], [ cycles_add, cycles_add_pc ], [ cycles_ldr ], [ cycles_str ], [ cycles_rot ], [ cycles_pop, cycles_pop_pc ], [ cycles_push ] ] def get_cycle_counts(): return __cycle_counts ``` #### File: Rosita/ROSITA/ARMParserMode.py ```python from ASMParser import DefaultMode import re class ARMMode(DefaultMode): ret = re.compile(r"bx\s+lr|pop\s+{[r0-9, ]*pc}|pop\s+pc") call = re.compile(r"bl\s+([_a-zA-Z0-9]+)") jmp_b = re.compile(r"b\s+(\.[_a-zA-Z0-9]+)") jmp_ble = re.compile(r"ble\s+(\.[_a-zA-Z0-9]+)") jmp_bls = re.compile(r"bls\s+(\.[_a-zA-Z0-9]+)") jmp_bgt = re.compile(r"bgt\s+(\.[_a-zA-Z0-9]+)") jmp_bne = re.compile(r"bne\s+(\.[_a-zA-Z0-9]+)") jml_bhi = re.compile(r"bhi\s+(\.[_a-zA-Z0-9]+)") jumps = [jmp_b, jmp_ble, jmp_bls, jmp_bne, jmp_bgt, jml_bhi] oper = re.compile(r"(ldr|ldrb|str|strb|movs|movb|ror|rors|lsls|lsl|eors|eor|orrs|bl|bx|push|pop|asrs|asr|and|ands|cmp|b)") def strip_comments(self, line): return line.split("@")[0] def __str__(self): return "ARM" def get_op(self, line: str): res = self.oper.match(line.split(' ')[0]) if res: return res[1] return None ``` #### File: Rosita/ROSITA/Build.py ```python import Utils import CTemplate from subprocess import call from subprocess import Popen, PIPE from os import listdir from os.path import isfile, join, splitext, splitext from os import devnull import json OPT_SINGLE = "-c" OPT_ASM_OUT = "-S" OPT_OBJ_OUT = "-o" EXT_ASM = ".s" EXT_OBJ = ".o" class Builder(): def __init__(self,src_config={},cc="",asm="",ld="",cxx="",cxxflags="",cflags="",lflags="",ldflags="",ldendflags="",cwd=".",objcopy="",buildfile=""): self.cc = cc self.cxx = cxx self.cflags = cflags self.cxxflags = cxxflags self.lflags = lflags self.ldflags = ldflags self.ldendflags = ldendflags self.asm_files = [] self.cwd = cwd self.asm = asm self.ld = ld self.objcopy = objcopy self.c_files_preconfig = [] if buildfile == "": buildfile = "build.json" buildjson = join(cwd, buildfile) if isfile(buildjson): A = json.loads(open(buildjson).read()) self.c_tmpl_files = A['c_tmpl_files'] self.c_files_preconfig = A['c_files'] self.c_files = A['c_files'] if src_config != {}: tc = open(join(cwd,'tracecount.txt'),'w') tc.write(src_config['NTRACES']) tc.close() for tf in self.c_tmpl_files: tc = splitext(join(cwd,tf))[0] + '_tmpl.c' if src_config == {}: raise Exception('Empty source config') CTemplate.CTemplate(src_config, join(cwd,tf), tc) self.c_files.append(splitext(tf)[0] + '_tmpl.c') self.cflags = A['cflags'] self.res_reg_files = A['reserve_register_in_files'] self.res_reg_cflags = A['reserve_register_cflags'] self.res_reg_cxxflags = A['reserve_register_cxxflags'] self.cxxflags = A['cxxflags'] self.ldflags = A['ldflags'] self.cxx_files = A['cxx_files'] self.obj_files = A['obj_files'] self.asm_files = A['asm_files'] else: raise FileNotFoundError(buildfile) def source_cfg_update(self, source_cfg): tc = open(join(self.cwd,'tracecount.txt'),'w') tc.write(source_cfg['NTRACES']) tc.close() for tf in self.c_tmpl_files: tc = splitext(join(self.cwd,tf))[0] + '_tmpl.c' if source_cfg == {}: raise Exception('Empty source config') CTemplate.CTemplate(source_cfg, join(self.cwd,tf), tc) def build_asm_for_tmpl(self): for tf in self.c_tmpl_files: tc = splitext(join(self.cwd,tf))[0] + '_tmpl.c' self._build_asm(tc) def build_asm(self): # todo: add logic to check modification time and # not replace asm files that are modified after # c file mod time onlyfiles = list(self.c_files) onlyfiles.extend(self.cxx_files) for file in onlyfiles: self._build_asm(file) def build_obj(self): for file in self.get_asm_files(): self._build_obj_from_asm(file) def get_asm_files(self, fullpath=False): onlyfiles = list(self.c_files) onlyfiles.extend(self.cxx_files) asmfiles = self._switchext(onlyfiles, EXT_ASM) asmfiles.extend(self.asm_files) return self._switchext(asmfiles,EXT_ASM,fullpath) def build_obj_from_sources(self): onlyfiles = list(self.c_files) onlyfiles.extend(self.cxx_files) for file in onlyfiles: if Utils.iscxxfile(file): self._run_command([self.cxx, self.cxxflags.replace('$file', file), self._obj_out(file) ]) elif Utils.iscfile(file): self._run_command([self.cc, self.cflags.replace('$file', file), self._obj_out(file) ]) def _build_asm(self,file): res_reg_cflag = "" res_reg_cxxflag = "" if file in set(self.res_reg_files): res_reg_cflag = self.res_reg_cflags res_reg_cxxflag = self.res_reg_cxxflags if Utils.iscxxfile(file): self._run_command([self.cxx, self.cxxflags.replace('$file', file), res_reg_cflag, self._asm_out(file) ]) #self._run_command([self.cxx, self.cxxflags, OPT_SINGLE, file, self._obj_out(file) ]) elif Utils.iscfile(file): self._run_command([self.cc, self.cflags.replace('$file', file), res_reg_cxxflag, self._asm_out(file) ]) #self._run_command([self.cc, self.cflags, OPT_SINGLE, file, self._obj_out(file) ]) def _build_obj_from_asm(self,file): if Utils.isasfile(file): self._run_command([self.asm, file, self._obj_out(file)]) #self._run_command([self.cxx, self.cxxflags, OPT_SINGLE, file, self._obj_out(file) ]) def _switchext(self, files, ext, fullpath=False): nfiles = [] for file in files: if fullpath: nfiles.append(join(self.cwd, splitext(file)[0]) + ext) else: nfiles.append(splitext(file)[0] + ext) return nfiles def link(self, final): onlyfiles = list(self.c_files) onlyfiles.extend(self.cxx_files) onlyfiles.extend(self.asm_files) objs = list(self.obj_files) objs.extend(self._switchext(onlyfiles, EXT_OBJ)) self._run_command([self.ld, self.ldflags.replace("$files", " ".join(objs)), OPT_OBJ_OUT, final]) def makeflatbin(self, final): self._run_command([self.objcopy, "-Obinary", final, final+".bin"]) def _run_command(self, command): args = [] for cmd in command: args.extend(cmd.split()) print(" ".join(args)) #w = open('cmdout','w') #ret = call(args, cwd = self.cwd, stdout = w.fileno()) p = Popen(args, cwd = self.cwd, stdin=PIPE, stdout=PIPE, stderr=PIPE) output, err = p.communicate(b"") if p.returncode != 0: print(output,err) raise OSError(command) #call(args, cwd=self.cwd) def _asm_out(self,file): return OPT_ASM_OUT def _obj_out(self,file): return OPT_OBJ_OUT + " " + file.split(".")[0]+'.o' ```
{ "source": "0xaesc/cryptor", "score": 2 }
#### File: 0xaesc/cryptor/file_handler.py ```python def read_cipher(infile): print('reading' + infile) ```
{ "source": "0xaf1f/cactus", "score": 2 }
#### File: cactus/preprocessor/dnabrnnMasking.py ```python import os import re import sys import shutil from toil.lib.threading import cpu_count from sonLib.bioio import catFiles from cactus.shared.common import cactus_call from cactus.shared.common import RoundedJob from cactus.shared.common import cactusRootPath from cactus.shared.common import getOptionalAttrib from cactus.shared.common import makeURL from toil.realtimeLogger import RealtimeLogger def loadDnaBrnnModel(toil, configNode, maskAlpha = False): """ store the model in a toil file id so it can be used in any workflow """ for prepXml in configNode.findall("preprocessor"): if prepXml.attrib["preprocessJob"] == "dna-brnn": if maskAlpha or getOptionalAttrib(prepXml, "active", typeFn=bool, default=False): dnabrnnOpts = getOptionalAttrib(prepXml, "dna-brnnOpts", default="") if '-i' in dnabrnnOpts: model_path = dnabrnnOpts[dnabrnnOpts.index('-i') + 1] else: model_path = os.path.join(cactusRootPath(), 'attcc-alpha.knm') os.environ["CACTUS_DNA_BRNN_MODEL_ID"] = toil.importFile(makeURL(model_path)) class DnabrnnMaskJob(RoundedJob): def __init__(self, fastaID, dnabrnnOpts, cpu, minLength=None, action=None, inputBedID=None, eventName=None): memory = 4*1024*1024*1024 disk = 2*(fastaID.size) cores = min(cpu_count(), cpu) RoundedJob.__init__(self, memory=memory, disk=disk, cores=cores, preemptable=True) self.fastaID = fastaID self.minLength = minLength self.action = action self.dnabrnnOpts = dnabrnnOpts self.inputBedID = inputBedID #todo: moved to fileMasking --> remove from here self.eventName = eventName def run(self, fileStore): """ mask alpha satellites with dna-brnn. returns (masked fasta, dna-brnn's raw output bed, filtered bed used for masking) where the filter bed has the minLength filters applied. When clip is the selected action, suffixes get added to the contig names in the format of :<start>-<end> (one-based, inclusive) """ work_dir = fileStore.getLocalTempDir() fastaFile = os.path.join(work_dir, 'seq.fa') fileStore.readGlobalFile(self.fastaID, fastaFile) # download the model modelFile = os.path.join(work_dir, 'model.knm') assert os.environ.get("CACTUS_DNA_BRNN_MODEL_ID") is not None modelID = os.environ.get("CACTUS_DNA_BRNN_MODEL_ID") fileStore.readGlobalFile(modelID, modelFile) # download the input bed (which we'll merge in with the bed we compute here) if self.inputBedID: inputBedFile = os.path.join(work_dir, 'input-regions.bed') fileStore.readGlobalFile(self.inputBedID, inputBedFile) else: inputBedFile = None # ignore existing model flag if '-i' in self.dnabrnnOpts: i = self.dnabrnnOpts.index('-i') del self.dnabrnnOpts[i] del self.dnabrnnOpts[i] cmd = ['dna-brnn', fastaFile] + self.dnabrnnOpts.split() + ['-i', modelFile] if self.cores: cmd += ['-t', str(self.cores)] bedFile = os.path.join(work_dir, 'regions.bed') # run dna-brnn to make a bed file cactus_call(outfile=bedFile, parameters=cmd) if self.minLength is None: self.minLength = 0 # load the fasta sequence information (needed for below clipping and/or bed merging) if inputBedFile or self.action == "clip": cactus_call(parameters=['samtools', 'faidx', fastaFile]) # load the contig lengths contig_lengths = {} with open(fastaFile + '.fai', 'r') as fai: for line in fai: toks = line.strip().split('\t') contig_lengths[toks[0]] = int(toks[1]) # merge in the input bed file if inputBedFile: input_line_count = 0 with open(inputBedFile, 'r') as inputBedStream, open(bedFile, 'a') as bedStream: if self.eventName: eventPrefix = 'id={}|'.format(self.eventName) else: eventPrefix = '' for line in inputBedStream: toks = line.split('\t') if toks: # our PAF file probably has prefixes like id=EVENT| which won't match up to the fasta # so we strip here: from_event = toks[0].startswith(eventPrefix) if from_event: toks[0] = toks[0][len(eventPrefix):] # we may have given a whole-genome paf, so filter down our bed to the # relevant contigs for this fasta (won't change masking output, but bed output will be cleaner) if toks[0] in contig_lengths: assert from_event bedStream.write('\t'.join(toks)) input_line_count += 1 RealtimeLogger.info("Merged in {} bed lines from input bed file for eventPrefix=\"{}\"".format(input_line_count, eventPrefix)) # merge up the intervals into a new bed file mergedBedFile = os.path.join(work_dir, 'filtered.bed') merge_cmd = [] merge_cmd.append(['awk', '{{if($3-$2 > {}) print}}'.format(self.minLength), bedFile]) merge_cmd.append(['bedtools', 'sort', '-i', '-']) merge_cmd.append(['bedtools', 'merge', '-i', '-', '-d', str(self.minLength)]) cactus_call(outfile=mergedBedFile, parameters=merge_cmd) maskedFile = os.path.join(work_dir, 'masked.fa') if self.action in ('softmask', 'hardmask'): mask_cmd = ['cactus_fasta_softmask_intervals.py', '--origin=zero', mergedBedFile] if self.minLength: mask_cmd += ['--minLength={}'.format(self.minLength)] if self.action == 'hardmask': mask_cmd += ['--mask=N'] # do the softmasking cactus_call(infile=fastaFile, outfile=maskedFile, parameters=mask_cmd) else: assert self.action == "clip" # to clip, we need a bed of the regions we want to *keep*. We'll start with the whole thing allRegionsFile = os.path.join(work_dir, 'chroms.bed') cactus_call(outfile=allRegionsFile, parameters=['awk', '{print $1 "\\t0\\t" $2}', fastaFile + '.fai']) # now we cut out the regions clippedRegionsFile = os.path.join(work_dir, 'clipped.bed') cactus_call(outfile=clippedRegionsFile, parameters=['bedtools', 'subtract', '-a', allRegionsFile, '-b', mergedBedFile]) # now we make a fiadx input regions faidxRegionsFile = os.path.join(work_dir, 'faidx_regions.txt') with open(clippedRegionsFile, 'r') as clipFile, open(mergedBedFile, 'a') as mergeFile, open(faidxRegionsFile, 'w') as listFile: for line in clipFile: toks = line.strip().split("\t") if len(toks) > 2: seq, start, end = toks[0], int(toks[1]), int(toks[2]) if end - start > self.minLength or contig_lengths[seq] <= self.minLength: region = seq if end - start < contig_lengths[seq]: # go from 0-based end exlusive to 1-based end inclusive when # converting from BED to samtools region region += ':{}-{}'.format(start + 1, end) else: assert start == 0 and end == contig_lengths[seq] listFile.write('{}\n'.format(region)) else: # the region was too small, we remember it in our filtered bed file mergeFile.write(line) # and cut the fasta apart with samtools cactus_call(outfile=maskedFile, parameters=['samtools', 'faidx', fastaFile, '-r', faidxRegionsFile]) return fileStore.writeGlobalFile(maskedFile), fileStore.writeGlobalFile(bedFile), fileStore.writeGlobalFile(mergedBedFile) def computePAFCoverage(job, config_node, paf_id): """ compute the gaps in PAF coverage, store them as a bed file, and add the bed file's filestore id into the config's dna-brnn xml element """ paf_file = job.fileStore.readGlobalFile(paf_id) bed_file = job.fileStore.getLocalTempFile() dnabrnn_node = None for node in config_node.findall("preprocessor"): if getOptionalAttrib(node, "preprocessJob") == 'dna-brnn': dnabrnn_node = node break assert dnabrnn_node is not None min_length = max(1, getOptionalAttrib(dnabrnn_node, 'minLength', typeFn=int, default=0)) cactus_call(parameters=['pafcoverage', paf_file, '-g', '-m', str(min_length)], outfile=bed_file) dnabrnn_node.attrib["inputBedID"] = job.fileStore.writeGlobalFile(bed_file) return config_node ``` #### File: cactus/preprocessor/fileMasking.py ```python import os import re import sys import shutil import xml.etree.ElementTree as ET from toil.lib.threading import cpu_count from Bio import SeqIO from Bio.SeqRecord import SeqRecord from sonLib.bioio import catFiles from cactus.shared.common import cactus_call from cactus.shared.common import RoundedJob from cactus.shared.common import getOptionalAttrib from cactus.shared.common import makeURL from toil.realtimeLogger import RealtimeLogger class FileMaskingJob(RoundedJob): def __init__(self, fastaID, inputBedID=None, eventName=None, minLength=None): disk = 2*(fastaID.size) memory = fastaID.size RoundedJob.__init__(self, disk=disk, memory=memory, preemptable=True) self.fastaID = fastaID self.minLength = minLength self.inputBedID = inputBedID self.eventName = eventName def run(self, fileStore): """ extract any existing masking, merge it with the input bed, then apply it to the fasta """ work_dir = fileStore.getLocalTempDir() fastaFile = os.path.join(work_dir, 'seq.fa') fileStore.readGlobalFile(self.fastaID, fastaFile) # download the input bed (which we'll merge in with the bed we compute here) inputBedFile = os.path.join(work_dir, 'input-regions.bed') fileStore.readGlobalFile(self.inputBedID, inputBedFile) if self.minLength is None: self.minLength = 0 # extract the existing masked regions to merge in bedFile = get_mask_bed_from_fasta(self, self.eventName, None, fastaFile, self.minLength, work_dir) # load the fasta sequence information (needed for below clipping and/or bed merging) cactus_call(parameters=['samtools', 'faidx', fastaFile]) # load the contig lengths contig_lengths = {} with open(fastaFile + '.fai', 'r') as fai: for line in fai: toks = line.strip().split('\t') contig_lengths[toks[0]] = int(toks[1]) # merge in the input bed file if inputBedFile: input_line_count = 0 with open(inputBedFile, 'r') as inputBedStream, open(bedFile, 'a') as bedStream: if self.eventName: eventPrefix = 'id={}|'.format(self.eventName) else: eventPrefix = '' for line in inputBedStream: toks = line.split('\t') if toks: # our PAF file probably has prefixes like id=EVENT| which won't match up to the fasta # so we strip here: from_event = toks[0].startswith(eventPrefix) if from_event: toks[0] = toks[0][len(eventPrefix):] # we may have given a whole-genome paf, so filter down our bed to the # relevant contigs for this fasta (won't change masking output, but bed output will be cleaner) if toks[0] in contig_lengths: assert from_event bedStream.write('\t'.join(toks)) input_line_count += 1 RealtimeLogger.info("Merged in {} bed lines from input bed file for eventPrefix=\"{}\"".format(input_line_count, eventPrefix)) # merge up the intervals into a new bed file mergedBedFile = os.path.join(work_dir, 'filtered.bed') merge_cmd = [] merge_cmd.append(['awk', '{{if($3-$2 > {}) print $1\"\\t\"$2\"\\t\"$3}}'.format(self.minLength), bedFile]) merge_cmd.append(['bedtools', 'sort', '-i', '-']) merge_cmd.append(['bedtools', 'merge', '-i', '-', '-d', str(self.minLength)]) if self.eventName: merge_cmd.append(['sed', '-e', 's/id={}|//g'.format(self.eventName)]) cactus_call(outfile=mergedBedFile, parameters=merge_cmd) maskedFile = os.path.join(work_dir, 'masked.fa') mask_cmd = ['cactus_fasta_softmask_intervals.py', '--origin=zero', mergedBedFile] if self.minLength: mask_cmd += ['--minLength={}'.format(self.minLength)] # do the softmasking cactus_call(infile=fastaFile, outfile=maskedFile, parameters=mask_cmd) return fileStore.writeGlobalFile(maskedFile), fileStore.writeGlobalFile(bedFile), fileStore.writeGlobalFile(mergedBedFile) def maskJobOverride(job, config_node, mask_file_path, mask_file_id, min_length): """ return a hijacked config file that does just one preprocessing job: mask each fasta sequence with the given bed file. if paf_length is specified, the file is treated as a PAF file, and a BED is extracted from it using coverage gaps of at least the given length. """ # this was unzipped upstream if mask_file_path.endswith('.gz'): mask_file_path = mask_file_path[:-3] if mask_file_path.endswith('.paf'): # convert the PAF to BED paf_file = job.fileStore.readGlobalFile(mask_file_id) bed_file = job.fileStore.getLocalTempFile() if not min_length: min_length = 1 cactus_call(parameters=['pafcoverage', paf_file, '-g', '-m', str(min_length)], outfile=bed_file) mask_file_id = job.fileStore.writeGlobalFile(bed_file) # rewrite the config for node in config_node.findall("preprocessor"): config_node.remove(node) mask_node = ET.SubElement(config_node, 'preprocessor') mask_node.attrib['preprocessJob'] = 'maskFile' mask_node.attrib['inputBedID'] = mask_file_id return config_node def get_mask_bed_from_fasta(job, event, fa_id, fa_path, min_length, work_dir = None): """ make a bed file from one fasta""" return_id = False # hack in a toggle (work_dir) that lets this be called as a job or a function if not work_dir: work_dir = job.fileStore.getLocalTempDir() return_id = True bed_path = os.path.join(work_dir, os.path.basename(fa_path) + '.mask.bed') fa_path = os.path.join(work_dir, os.path.basename(fa_path)) is_gz = fa_path.endswith(".gz") if return_id: job.fileStore.readGlobalFile(fa_id, fa_path, mutable=is_gz) if is_gz: cactus_call(parameters=['gzip', '-fd', fa_path]) fa_path = fa_path[:-3] cactus_call(parameters=['cactus_softmask2hardmask', fa_path, '-b', '-m', str(min_length)], outfile=bed_path) if return_id: return job.fileStore.writeGlobalFile(bed_path) else: return bed_path ``` #### File: cactus/refmap/cactus_graphmap_join.py ```python import os from argparse import ArgumentParser import xml.etree.ElementTree as ET import copy import timeit from operator import itemgetter from cactus.progressive.seqFile import SeqFile from cactus.progressive.multiCactusTree import MultiCactusTree from cactus.shared.common import setupBinaries, importSingularityImage from cactus.progressive.multiCactusProject import MultiCactusProject from cactus.shared.experimentWrapper import ExperimentWrapper from cactus.shared.common import cactusRootPath from cactus.shared.configWrapper import ConfigWrapper from cactus.pipeline.cactus_workflow import CactusWorkflowArguments from cactus.pipeline.cactus_workflow import addCactusWorkflowOptions from cactus.pipeline.cactus_workflow import CactusTrimmingBlastPhase from cactus.shared.common import makeURL, catFiles from cactus.shared.common import enableDumpStack from cactus.shared.common import cactus_override_toil_options from cactus.shared.common import cactus_call from cactus.shared.common import getOptionalAttrib, findRequiredNode from cactus.shared.common import unzip_gz, write_s3 from cactus.preprocessor.fileMasking import get_mask_bed_from_fasta from toil.job import Job from toil.common import Toil from toil.lib.bioio import logger from toil.lib.bioio import setLoggingFromOptions from toil.realtimeLogger import RealtimeLogger from toil.lib.threading import cpu_count from sonLib.nxnewick import NXNewick from sonLib.bioio import getTempDirectory, getTempFile, catFiles def main(): parser = ArgumentParser() Job.Runner.addToilOptions(parser) addCactusWorkflowOptions(parser) parser.add_argument("--vg", required=True, nargs='+', help = "Input vg files (PackedGraph or HashGraph format)") parser.add_argument("--outDir", required=True, type=str, help = "Output directory") parser.add_argument("--outName", required=True, type=str, help = "Basename of all output files") parser.add_argument("--reference", required=True, type=str, help = "Reference event name") parser.add_argument("--vcfReference", type=str, help = "Reference event for VCF (if different from --reference)") parser.add_argument("--rename", nargs='+', default = [], help = "Path renaming, each of form src>dest (see clip-vg -r)") parser.add_argument("--clipLength", type=int, default=None, help = "clip out unaligned sequences longer than this") parser.add_argument("--wlineSep", type=str, help = "wline separator for vg convert") parser.add_argument("--indexCores", type=int, default=1, help = "cores for indexing processes") parser.add_argument("--decoyGraph", help= "decoy sequences vg graph to add (PackedGraph or HashGraph format)") parser.add_argument("--hal", nargs='+', default = [], help = "Input hal files (for merging)") #Progressive Cactus Options parser.add_argument("--configFile", dest="configFile", help="Specify cactus configuration file", default=os.path.join(cactusRootPath(), "cactus_progressive_config.xml")) parser.add_argument("--latest", dest="latest", action="store_true", help="Use the latest version of the docker container " "rather than pulling one matching this version of cactus") parser.add_argument("--containerImage", dest="containerImage", default=None, help="Use the the specified pre-built containter image " "rather than pulling one from quay.io") parser.add_argument("--binariesMode", choices=["docker", "local", "singularity"], help="The way to run the Cactus binaries", default=None) options = parser.parse_args() setupBinaries(options) setLoggingFromOptions(options) enableDumpStack() if options.outDir and not options.outDir.startswith('s3://'): if not os.path.isdir(options.outDir): os.makedirs(options.outDir) if options.hal and len(options.hal) != len(options.vg): raise RuntimeError("If --hal and --vg should specify the same number of files") # Mess with some toil options to create useful defaults. cactus_override_toil_options(options) start_time = timeit.default_timer() runCactusGraphMapJoin(options) end_time = timeit.default_timer() run_time = end_time - start_time logger.info("cactus-graphmap-join has finished after {} seconds".format(run_time)) def runCactusGraphMapJoin(options): with Toil(options) as toil: importSingularityImage(options) #Run the workflow if options.restart: wf_output = toil.restart() else: options.cactusDir = getTempDirectory() #load cactus config configNode = ET.parse(options.configFile).getroot() config = ConfigWrapper(configNode) config.substituteAllPredefinedConstantsWithLiterals() # load up the vgs vg_ids = [] for vg_path in options.vg: logger.info("Importing {}".format(vg_path)) vg_ids.append(toil.importFile(makeURL(vg_path))) # tack on the decoys if options.decoyGraph: logger.info("Importing decoys {}".format(options.decoyGraph)) vg_ids.append(toil.importFile(makeURL(options.decoyGraph))) # we'll treat it like any other graph downstream, except clipping # where we'll check first using the path name options.vg.append(options.decoyGraph) # load up the hals hal_ids = [] for hal_path in options.hal: logger.info("Importing {}".format(hal_path)) hal_ids.append(toil.importFile(makeURL(hal_path))) # run the workflow wf_output = toil.start(Job.wrapJobFn(graphmap_join_workflow, options, config, vg_ids, hal_ids)) #export the split data export_join_data(toil, options, wf_output[0], wf_output[1], wf_output[2]) def graphmap_join_workflow(job, options, config, vg_ids, hal_ids): root_job = Job() job.addChild(root_job) # run clip-vg on each input clipped_vg_ids = [] for vg_path, vg_id in zip(options.vg, vg_ids): clip_job = root_job.addChildJobFn(clip_vg, options, config, vg_path, vg_id, disk=vg_id.size * 2, memory=vg_id.size * 4) clipped_vg_ids.append(clip_job.rv()) # join the ids join_job = root_job.addFollowOnJobFn(join_vg, options, config, clipped_vg_ids, disk=sum([f.size for f in vg_ids])) clipped_vg_ids = join_job.rv() # make a gfa for each gfa_root_job = Job() join_job.addFollowOn(gfa_root_job) clipped_gfa_ids = [] for i in range(len(options.vg)): vg_path = options.vg[i] clipped_id = join_job.rv(i) vg_id = vg_ids[i] gfa_job = gfa_root_job.addChildJobFn(vg_to_gfa, options, config, vg_path, clipped_id, disk=vg_id.size * 5) clipped_gfa_ids.append(gfa_job.rv()) # merge up the gfas and make the various vg indexes gfa_merge_job = gfa_root_job.addFollowOnJobFn(vg_indexes, options, config, clipped_gfa_ids, cores=options.indexCores, disk=sum(f.size for f in vg_ids) * 5) if hal_ids: merge_hal_id = job.addChildJobFn(merge_hal, options, hal_ids, disk=sum(f.size for f in hal_ids) * 2).rv() else: merge_hal_id = None return clipped_vg_ids, gfa_merge_job.rv(), merge_hal_id def clip_vg(job, options, config, vg_path, vg_id): """ run clip-vg """ work_dir = job.fileStore.getLocalTempDir() is_decoy = vg_path == options.decoyGraph vg_path = os.path.join(work_dir, os.path.basename(vg_path)) job.fileStore.readGlobalFile(vg_id, vg_path) out_path = vg_path + '.clip' cmd = ['clip-vg', vg_path, '-f'] if options.clipLength is not None and not is_decoy: cmd += ['-u', str(options.clipLength)] for rs in options.rename: cmd += ['-r', rs] if options.reference: cmd += ['-e', options.reference] if getOptionalAttrib(findRequiredNode(config.xmlRoot, "hal2vg"), "includeMinigraph", typeFn=bool, default=False): # our vg file has minigraph sequences -- we'll filter them out, along with any nodes # that don't appear in a non-minigraph path graph_event = getOptionalAttrib(findRequiredNode(config.xmlRoot, "graphmap"), "assemblyName", default="_MINIGRAPH_") cmd += ['-d', graph_event] # sort while we're at it cmd = [cmd, ['vg', 'ids', '-s', '-']] cactus_call(parameters=cmd, outfile=out_path) # worth it cactus_call(parameters=['vg', 'validate', out_path]) return job.fileStore.writeGlobalFile(out_path) def join_vg(job, options, config, clipped_vg_ids): """ run vg ids -j """ work_dir = job.fileStore.getLocalTempDir() vg_paths = [] for vg_path, vg_id in zip(options.vg, clipped_vg_ids): vg_path = os.path.join(work_dir, os.path.basename(vg_path)) job.fileStore.readGlobalFile(vg_id, vg_path, mutable=True) vg_paths.append(vg_path) cactus_call(parameters=['vg', 'ids', '-j'] + vg_paths) return [job.fileStore.writeGlobalFile(f) for f in vg_paths] def vg_to_gfa(job, options, config, vg_path, vg_id): """ run gfa conversion """ work_dir = job.fileStore.getLocalTempDir() vg_path = os.path.join(work_dir, os.path.basename(vg_path)) job.fileStore.readGlobalFile(vg_id, vg_path) out_path = vg_path + '.gfa' cmd = ['vg', 'convert', '-f', '-Q', options.reference, os.path.basename(vg_path), '-B'] if options.wlineSep: cmd += ['-w', options.wlineSep] # important, when options.wlineSep is ., it throws off prepareWorkDir in cactus_call # so important to specify the work_dir below cactus_call(parameters=cmd, outfile=out_path, work_dir=work_dir) return job.fileStore.writeGlobalFile(out_path) def vg_indexes(job, options, config, gfa_ids): """ merge of the gfas, then make gbwt / xg / snarls / vcf """ work_dir = job.fileStore.getLocalTempDir() vg_paths = [] merge_gfa_path = os.path.join(work_dir, 'merged.gfa') with open(merge_gfa_path, 'w') as merge_gfa_file: merge_gfa_file.write('H\tVN:Z:1.0\n') # merge the gfas for vg_path, gfa_id in zip(options.vg, gfa_ids): gfa_path = os.path.join(work_dir, os.path.basename(vg_path) + '.gfa') job.fileStore.readGlobalFile(gfa_id, gfa_path, mutable=True) cactus_call(parameters=['grep', '-v', '^H', gfa_path], outfile=merge_gfa_path, outappend=True) os.remove(gfa_path) # make the gbwt gbwt_path = os.path.join(work_dir, 'merged.gbwt') gg_path = os.path.join(work_dir, 'merged.gg') trans_path = os.path.join(work_dir, 'merged.trans') cactus_call(parameters=['vg', 'gbwt', '-G', merge_gfa_path, '-o', gbwt_path, '-g', gg_path, '--translation', trans_path]) # zip the gfa cactus_call(parameters=['bgzip', merge_gfa_path, '--threads', str(job.cores)]) gfa_path = merge_gfa_path + '.gz' # make the xg xg_path = os.path.join(work_dir, 'merged.xg') cactus_call(parameters=['vg', 'convert', gg_path, '-b', gbwt_path, '-x', '-t', str(job.cores)], outfile=xg_path) # worth it cactus_call(parameters=['vg', 'validate', xg_path]) # make the snarls snarls_path = os.path.join(work_dir, 'merged.snarls') cactus_call(parameters=['vg', 'snarls', xg_path, '-T', '-t', str(job.cores)], outfile=snarls_path) # make the vcf vcf_path = os.path.join(work_dir, 'merged.vcf.gz') vcf_ref = options.vcfReference if options.vcfReference else options.reference cactus_call(parameters=[['vg', 'deconstruct', xg_path, '-P', vcf_ref, '-a', '-r', snarls_path, '-g', gbwt_path, '-T', trans_path, '-t', str(job.cores)], ['bgzip', '--threads', str(job.cores)]], outfile=vcf_path) cactus_call(parameters=['tabix', '-p', 'vcf', vcf_path]) # compress the trans cactus_call(parameters=['bgzip', trans_path, '--threads', str(job.cores)]) trans_path += '.gz' return { 'gfa.gz' : job.fileStore.writeGlobalFile(gfa_path), 'gbwt' : job.fileStore.writeGlobalFile(gbwt_path), 'gg' : job.fileStore.writeGlobalFile(gg_path), 'trans.gz' : job.fileStore.writeGlobalFile(trans_path), 'xg' : job.fileStore.writeGlobalFile(xg_path), 'snarls' : job.fileStore.writeGlobalFile(snarls_path), 'vcf.gz' : job.fileStore.writeGlobalFile(vcf_path), 'vcf.gz.tbi' : job.fileStore.writeGlobalFile(vcf_path + '.tbi') } def merge_hal(job, options, hal_ids): """ call halMergeChroms to make one big hal file out of the chromosome hal files """ work_dir = job.fileStore.getLocalTempDir() hal_paths = [] for in_path, hal_id in zip(options.hal, hal_ids): hal_path = os.path.join(work_dir, os.path.basename(in_path)) job.fileStore.readGlobalFile(hal_id, hal_path) hal_paths.append(hal_path) merged_path = os.path.join(work_dir, '__merged__.hal') assert merged_path not in hal_paths # note: cactus_call tries to sort out relative paths by itself for docker. but the comma-separated list # will likely throw it off, so we take care to specify it relative manually. # also note: most hal commands need --inMemory to run at scale, but the access patterns for chrom # merging are linear enough that it shouldn't be needed cmd = ['halMergeChroms', ','.join([os.path.basename(p) for p in hal_paths]), os.path.basename(merged_path), '--progress'] cactus_call(parameters=cmd, work_dir = work_dir) return job.fileStore.writeGlobalFile(merged_path) def export_join_data(toil, options, clip_ids, idx_map, merge_hal_id): """ download all the output data """ # download the clip vgs clip_base = os.path.join(options.outDir, 'clip-{}'.format(options.outName)) if not clip_base.startswith('s3://') and not os.path.isdir(clip_base): os.makedirs(clip_base) for vg_path, vg_id in zip(options.vg, clip_ids): toil.exportFile(vg_id, makeURL(os.path.join(clip_base, os.path.basename(vg_path)))) # download everything else for ext, idx_id in idx_map.items(): toil.exportFile(idx_id, makeURL(os.path.join(options.outDir, '{}.{}'.format(options.outName, ext)))) # download the merged hal if merge_hal_id: toil.exportFile(merge_hal_id, makeURL(os.path.join(options.outDir, '{}.hal'.format(options.outName)))) if __name__ == "__main__": main() ```
{ "source": "0xangelo/nnrl", "score": 3 }
#### File: nn/distributions/abstract.py ```python from typing import Dict, List import numpy as np import torch from torch import nn from . import flows class ConditionalDistribution(nn.Module): """Implements torch.distribution.Distribution interface as a nn.Module. If passed a Distribution, wraps the unconditional distribution to be used with the ConditionalDistribution interface. """ # pylint:disable=abstract-method,unused-argument,not-callable def __init__(self, *, distribution: nn.Module = None): super().__init__() self.distribution = distribution @torch.jit.export def sample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): """ Generates a sample_shape shaped sample or sample_shape shaped batch of samples if the distribution parameters are batched. Returns a (sample, log_prob) pair. """ if self.distribution is not None: return self.distribution.sample(sample_shape) return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() @torch.jit.export def rsample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): """ Generates a sample_shape shaped reparameterized sample or sample_shape shaped batch of reparameterized samples if the distribution parameters are batched. Returns a (rsample, log_prob) pair. """ if self.distribution is not None: return self.distribution.rsample(sample_shape) return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() @torch.jit.export def log_prob(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): """ Returns the log of the probability density/mass function evaluated at `value`. """ if self.distribution is not None: return self.distribution.log_prob(value) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def cdf(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): """Returns the cumulative density/mass function evaluated at `value`.""" if self.distribution is not None: return self.distribution.cdf(value) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def icdf(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): """Returns the inverse cumulative density/mass function evaluated at `value`.""" if self.distribution is not None: return self.distribution.icdf(value) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def entropy(self, params: Dict[str, torch.Tensor]): """Returns entropy of distribution.""" if self.distribution is not None: return self.distribution.entropy() return torch.tensor(np.nan).float() @torch.jit.export def perplexity(self, params: Dict[str, torch.Tensor]): """Returns perplexity of distribution.""" return self.entropy(params).exp() @torch.jit.export def reproduce(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): """Produce a reparametrized sample with the same value as `value`.""" if self.distribution is not None: return self.distribution.reproduce(value) return ( torch.tensor(np.nan).float().expand_as(value), torch.tensor(np.nan).float().expand_as(value), ) @torch.jit.export def deterministic(self, params: Dict[str, torch.Tensor]): """ Generates a deterministic sample or batch of samples if the distribution parameters are batched. Returns a (rsample, log_prob) pair. """ if self.distribution is not None: return self.distribution.deterministic() return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() class Distribution(nn.Module): """Unconditional Distribution. If passed a ConditionalDistribution, wraps the unconditional distribution to be used with the Distribution interface. `nn.Parameter`s passed as distribution parameters will be registered as module parameters, making the distribution learnable. Otherwise, parameters will be registered as buffers. """ # pylint:disable=abstract-method,not-callable params: Dict[str, torch.Tensor] def __init__( self, *, cond_dist: nn.Module = None, params: Dict[str, torch.Tensor] = None ): super().__init__() self.cond_dist = cond_dist self.params = params or {} for name, param in self.params.items(): if isinstance(param, nn.Parameter): self.register_parameter(name, param) else: self.register_buffer(name, param) @torch.jit.export def sample(self, sample_shape: List[int] = ()): """ Generates a sample_shape shaped sample or sample_shape shaped batch of samples if the distribution parameters are batched. Returns a (sample, log_prob) pair. """ if self.cond_dist is not None: return self.cond_dist.sample(self.params, sample_shape) return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() @torch.jit.export def rsample(self, sample_shape: List[int] = ()): """ Generates a sample_shape shaped reparameterized sample or sample_shape shaped batch of reparameterized samples if the distribution parameters are batched. Returns a (rsample, log_prob) pair. """ if self.cond_dist is not None: return self.cond_dist.rsample(self.params, sample_shape) return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() @torch.jit.export def log_prob(self, value): """ Returns the log of the probability density/mass function evaluated at `value`. """ if self.cond_dist is not None: return self.cond_dist.log_prob(value, self.params) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def cdf(self, value): """Returns the cumulative density/mass function evaluated at `value`.""" if self.cond_dist is not None: return self.cond_dist.cdf(value, self.params) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def icdf(self, value): """Returns the inverse cumulative density/mass function evaluated at `value`.""" if self.cond_dist is not None: return self.cond_dist.icdf(value, self.params) return torch.tensor(np.nan).float().expand_as(value) @torch.jit.export def entropy(self): """Returns entropy of distribution.""" if self.cond_dist is not None: return self.cond_dist.entropy(self.params) return torch.tensor(np.nan).float() @torch.jit.export def perplexity(self): """Returns perplexity of distribution.""" return self.entropy().exp() @torch.jit.export def reproduce(self, value): """Produce a reparametrized sample with the same value as `value`.""" if self.cond_dist is not None: return self.cond_dist.reproduce(value, self.params) return ( torch.tensor(np.nan).float().expand_as(value), torch.tensor(np.nan).float().expand_as(value), ) @torch.jit.export def deterministic(self): """ Generates a deterministic sample or batch of samples if the distribution parameters are batched. Returns a (rsample, log_prob) pair. """ if self.cond_dist is not None: return self.cond_dist.deterministic(self.params) return torch.tensor(np.nan).float(), torch.tensor(np.nan).float() class Independent(ConditionalDistribution): """Reinterprets some of the batch dims of a distribution as event dims.""" # pylint:disable=abstract-method def __init__(self, base_dist, reinterpreted_batch_ndims): super().__init__() self.base_dist = base_dist self.reinterpreted_batch_ndims = reinterpreted_batch_ndims def forward(self, inputs): # pylint:disable=arguments-differ,missing-function-docstring return self.base_dist(inputs) @torch.jit.export def sample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): out, base_log_prob = self.base_dist.sample(params, sample_shape) return ( out, flows.utils.sum_rightmost(base_log_prob, self.reinterpreted_batch_ndims), ) @torch.jit.export def rsample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): out, base_log_prob = self.base_dist.rsample(params, sample_shape) if out is not None: return ( out, flows.utils.sum_rightmost( base_log_prob, self.reinterpreted_batch_ndims ), ) return out, base_log_prob @torch.jit.export def log_prob(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): base_log_prob = self.base_dist.log_prob(value, params) return flows.utils.sum_rightmost(base_log_prob, self.reinterpreted_batch_ndims) @torch.jit.export def cdf(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): return self.base_dist.cdf(value, params) @torch.jit.export def icdf(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): return self.base_dist.icdf(value, params) @torch.jit.export def entropy(self, params: Dict[str, torch.Tensor]): base_entropy = self.base_dist.entropy(params) return flows.utils.sum_rightmost(base_entropy, self.reinterpreted_batch_ndims) @torch.jit.export def reproduce(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): sample_, log_prob_ = self.base_dist.reproduce(value, params) return ( sample_, flows.utils.sum_rightmost(log_prob_, self.reinterpreted_batch_ndims), ) @torch.jit.export def deterministic(self, params: Dict[str, torch.Tensor]): sample, log_prob = self.base_dist.deterministic(params) return ( sample, flows.utils.sum_rightmost(log_prob, self.reinterpreted_batch_ndims), ) class TransformedDistribution(ConditionalDistribution): """ Extension of the ConditionalDistribution class, which applies a sequence of transformations to a base distribution. """ # pylint:disable=abstract-method def __init__(self, base_dist, transform): super().__init__() self.base_dist = ( ConditionalDistribution(distribution=base_dist) if isinstance(base_dist, Distribution) else base_dist ) self.transform = ( flows.ConditionalTransform(transform=transform) if isinstance(transform, flows.Transform) else transform ) @torch.jit.export def sample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): base_sample, base_log_prob = self.base_dist.sample(params, sample_shape) transformed, log_abs_det_jacobian = self.transform(base_sample, params) return transformed.detach(), base_log_prob - log_abs_det_jacobian @torch.jit.export def rsample(self, params: Dict[str, torch.Tensor], sample_shape: List[int] = ()): base_rsample, base_log_prob = self.base_dist.rsample(params, sample_shape) transformed, log_abs_det_jacobian = self.transform(base_rsample, params) return transformed, base_log_prob - log_abs_det_jacobian @torch.jit.export def log_prob(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): latent, log_abs_det_jacobian = self.transform(value, params, reverse=True) base_log_prob = self.base_dist.log_prob(latent, params) return base_log_prob + log_abs_det_jacobian @torch.jit.export def reproduce(self, value: torch.Tensor, params: Dict[str, torch.Tensor]): latent, _ = self.transform(value, params, reverse=True) latent_, base_log_prob_ = self.base_dist.reproduce(latent, params) value_, log_abs_det_jacobian_ = self.transform(latent_, params) return value_, base_log_prob_ - log_abs_det_jacobian_ @torch.jit.export def deterministic(self, params: Dict[str, torch.Tensor]): base_sample, base_log_prob = self.base_dist.deterministic(params) transformed, log_abs_det_jacobian = self.transform(base_sample, params) return transformed, base_log_prob - log_abs_det_jacobian ``` #### File: distributions/flows/abstract.py ```python from typing import Dict import torch from torch import nn from .utils import sum_rightmost class Transform(nn.Module): """A diffeomorphism. Transforms are differentiable bijections with tractable Jacobians. All transforms map samples from a latent space to another (f(z) -> x) Use the `reverse` flag to invert the transformation (f^{-1}(x) -> z). """ params: Dict[str, torch.Tensor] def __init__(self, *, cond_transform=None, params=None, event_dim=0): super().__init__() self.event_dim = ( event_dim if cond_transform is None else cond_transform.event_dim ) self.cond_transform = cond_transform self.params = params or {} for name, param in self.params.items(): if isinstance(param, nn.Parameter): self.register_parameter(name, param) else: self.register_buffer(name, param) def forward(self, inputs, reverse: bool = False): # pylint:disable=arguments-differ,missing-function-docstring return self.decode(inputs) if reverse else self.encode(inputs) def encode(self, inputs): """ Computes the transform `z => x` and the log det jacobian `log |dz/dx|` """ return self.cond_transform.encode(inputs, self.params) def decode(self, inputs): """ Inverts the transform `x => z` and the log det jacobian `log |dx/dz|`, or `- log |dz/dx|`. """ return self.cond_transform.decode(inputs, self.params) class ConditionalTransform(nn.Module): """A Transform conditioned on some external variable(s).""" def __init__(self, *, transform=None, event_dim=0): super().__init__() self.event_dim = event_dim if transform is None else transform.event_dim self.transform = transform def forward(self, inputs, params: Dict[str, torch.Tensor], reverse: bool = False): # pylint:disable=arguments-differ,missing-function-docstring return self.decode(inputs, params) if reverse else self.encode(inputs, params) def encode(self, inputs, params: Dict[str, torch.Tensor]): """ Computes the transform `(z, y) => x`. """ # pylint:disable=unused-argument return self.transform.encode(inputs) def decode(self, inputs, params: Dict[str, torch.Tensor]): """ Inverts the transform `(x, y) => z`. """ # pylint:disable=unused-argument return self.transform.decode(inputs) class InverseTransform(ConditionalTransform): """Invert the transform, effectively swapping the encoding/decoding directions.""" def __init__(self, transform): super().__init__(event_dim=transform.event_dim) self.transform = ( ConditionalTransform(transform=transform) if isinstance(transform, Transform) else transform ) def encode(self, inputs, params: Dict[str, torch.Tensor]): return self.transform.decode(inputs, params) def decode(self, inputs, params: Dict[str, torch.Tensor]): return self.transform.encode(inputs, params) class CompositeTransform(ConditionalTransform): # pylint:disable=missing-docstring def __init__(self, transforms, event_dim=None): event_dim = event_dim or max(t.event_dim for t in transforms) super().__init__(event_dim=event_dim) assert self.event_dim >= max(t.event_dim for t in transforms), ( "CompositeTransform cannot have an event_dim smaller than any " "of its components'" ) transforms = self.unpack(transforms) self.transforms = nn.ModuleList(transforms) self.inv_transforms = nn.ModuleList(transforms[::-1]) @staticmethod def unpack(transforms): """Recursively unfold CompositeTransforms in a list.""" result = [] for trans in transforms: if isinstance(trans, CompositeTransform): result.extend(trans.unpack(trans.transforms)) elif isinstance(trans, Transform): result += [ConditionalTransform(transform=trans)] else: result += [trans] return result def encode(self, inputs, params: Dict[str, torch.Tensor]): out = inputs log_abs_det_jacobian = 0.0 for transform in self.transforms: out, log_det = transform(out, params, reverse=False) log_abs_det_jacobian += sum_rightmost( log_det, self.event_dim - transform.event_dim ) return out, log_abs_det_jacobian def decode(self, inputs, params: Dict[str, torch.Tensor]): out = inputs log_abs_det_jacobian = 0.0 for transform in self.inv_transforms: out, log_det = transform(out, params, reverse=True) log_abs_det_jacobian += sum_rightmost( log_det, self.event_dim - transform.event_dim ) return out, log_abs_det_jacobian ``` #### File: distributions/flows/affine_constant.py ```python import torch from torch import nn from .abstract import Transform class AffineConstantFlow(Transform): """ Scales + Shifts the flow by (learned) constants per dimension. In NICE paper there is a Scaling layer which is a special case of this where t is None """ def __init__(self, shape, scale=True, shift=True, **kwargs): super().__init__(**kwargs) if scale: self.scale = nn.Parameter(torch.randn(shape)) else: self.register_buffer("scale", torch.zeros(shape)) if shift: self.loc = nn.Parameter(torch.randn(shape)) else: self.register_buffer("loc", torch.zeros(shape)) def encode(self, inputs): out = inputs * torch.exp(self.scale) + self.loc # log |dy/dx| = log |torch.exp(scale)| = scale log_abs_det_jacobian = self.scale return out, log_abs_det_jacobian def decode(self, inputs): out = (inputs - self.loc) * torch.exp(-self.scale) # log |dx/dy| = - log |dy/dx| = - scale log_abs_det_jacobian = -self.scale return out, log_abs_det_jacobian class ActNorm(Transform): """ Really an AffineConstantFlow but with a data-dependent initialization, where on the very first batch we clever initialize the s,t so that the output is unit gaussian. As described in Glow paper. """ def __init__(self, affine_const): super().__init__(event_dim=0) self.affine_const = affine_const self.data_dep_init_done = not ( isinstance(self.affine_const.scale, nn.Parameter) and isinstance(self.affine_const.loc, nn.Parameter) ) def encode(self, inputs): # first batch is used for init if not self.data_dep_init_done: scale = self.affine_const.scale loc = self.affine_const.loc # pylint:disable=unnecessary-comprehension dims = [i for i in range(inputs.dim() - self.event_dim)] # pylint:enable=unnecessary-comprehension std = -inputs.std(dim=dims).log().detach() scale.data.copy_(torch.where(torch.isnan(std), scale, std)) mean = -torch.mean(inputs * scale.exp(), dim=dims).detach() loc.data.copy_(mean) self.data_dep_init_done = True return self.affine_const.encode(inputs) def decode(self, inputs): return self.affine_const.decode(inputs) ``` #### File: distributions/flows/masks.py ```python import torch def create_alternating_binary_mask(features, even=True): """ Creates a binary mask of a given dimension which alternates its masking. :param features: Dimension of mask. :param even: If True, even values are assigned 1s, odd 0s. If False, vice versa. :return: Alternating binary mask of type torch.Tensor. """ mask = torch.zeros(features).byte() start = 0 if even else 1 mask[start::2] += 1 return mask def create_mid_split_binary_mask(features): """ Creates a binary mask of a given dimension which splits its masking at the midpoint. :param features: Dimension of mask. :return: Binary mask split at midpoint of type torch.Tensor """ mask = torch.zeros(features).byte() midpoint = features // 2 if features % 2 == 0 else features // 2 + 1 mask[:midpoint] += 1 return mask def create_random_binary_mask(features): """ Creates a random binary mask of a given dimension with half of its entries randomly set to 1s. :param features: Dimension of mask. :return: Binary mask with half of its entries set to 1s, of type torch.Tensor. """ mask = torch.zeros(features).byte() weights = torch.ones(features).float() num_samples = features // 2 if features % 2 == 0 else features // 2 + 1 indices = torch.multinomial( input=weights, num_samples=num_samples, replacement=False ) mask[indices] += 1 return mask ``` #### File: model/stochastic/ensemble.py ```python from typing import List import torch from torch import Tensor, nn from torch.jit import fork, wait from nnrl.nn.distributions.types import SampleLogp from nnrl.types import TensorDict from .single import StochasticModel class SME(nn.ModuleList): """Stochastic Model Ensemble. A static NN module list of `N` stochastic dynamics models. Implements the StochasticModel API but returns python lists of `N` outputs, one for each model in the ensemble. Assumes inputs are lists of the same length as the model ensemble. Applies each model in the ensemble to one of the inputs in the list. Args: models: List of StochasticModel modules Notes: `O` is the observation shape and `A` is the action shape. """ # pylint:disable=abstract-method def __init__(self, models: List[StochasticModel]): cls_name = type(self).__name__ assert all( isinstance(m, StochasticModel) for m in models ), f"All modules in {cls_name} must be instances of StochasticModel." super().__init__(models) def forward(self, obs: List[Tensor], act: List[Tensor]) -> List[TensorDict]: # pylint:disable=arguments-differ,missing-function-docstring return [m(obs[i], act[i]) for i, m in enumerate(self)] @torch.jit.export def sample(self, params: List[TensorDict]) -> List[SampleLogp]: """Compute samples and likelihoods for each model in the ensemble. Args: obs: List of `N` observation tensors of shape `(*,) + O` action: List of `N` action tensors of shape `(*,) + A` sample_shape: Sample shape argument for each model in the ensemble Returns: List of `N` tuples of sample and log-likelihood tensors of shape `S + (*,) + O` and `S + (*,)` respectively, where `S` is the `sample_shape`. """ return [m.sample(params[i]) for i, m in enumerate(self)] @torch.jit.export def rsample(self, params: List[TensorDict]) -> List[SampleLogp]: """Compute reparameterized samples and likelihoods for each model. Uses the same semantics as :meth:`SME.sample`. """ return [m.rsample(params[i]) for i, m in enumerate(self)] @torch.jit.export def log_prob(self, new_obs: List[Tensor], params: List[TensorDict]) -> List[Tensor]: """Compute likelihoods for each model in the ensemble. Args: obs: List of `N` observation tensors of shape `(*,) + O` action: List of `N` action tensors of shape `(*,) + A` new_obs: List of `N` observation tensors of shape `(*,) + O` Returns: List of `N` log-likelihood tensors of shape `(*,)` """ return [m.log_prob(new_obs[i], params[i]) for i, m in enumerate(self)] @torch.jit.export def deterministic(self, params: List[TensorDict]) -> List[SampleLogp]: """Compute deterministic new observations and their likelihoods for each model. Uses the same semantics as :meth:`SME.sample`. """ return [m.deterministic(params[i]) for i, m in enumerate(self)] class ForkedSME(SME): """Stochastic Model Ensemble with parallelized methods.""" # pylint:disable=abstract-method def forward(self, obs: List[Tensor], act: List[Tensor]) -> List[TensorDict]: futures = [fork(m, obs[i], act[i]) for i, m in enumerate(self)] return [wait(f) for f in futures] @torch.jit.export def sample(self, params: List[TensorDict]) -> List[SampleLogp]: futures = [fork(m.sample, params[i]) for i, m in enumerate(self)] return [wait(f) for f in futures] @torch.jit.export def rsample(self, params: List[TensorDict]) -> List[SampleLogp]: futures = [fork(m.rsample, params[i]) for i, m in enumerate(self)] return [wait(f) for f in futures] @torch.jit.export def log_prob(self, new_obs: List[Tensor], params: List[TensorDict]) -> List[Tensor]: futures = [fork(m.log_prob, new_obs[i], params[i]) for i, m in enumerate(self)] return [wait(f) for f in futures] @torch.jit.export def deterministic(self, params: List[TensorDict]) -> List[SampleLogp]: futures = [fork(m.deterministic, params[i]) for i, m in enumerate(self)] return [wait(f) for f in futures] ``` #### File: model/stochastic/svg.py ```python from dataclasses import dataclass import torch from gym.spaces import Box from torch import nn import nnrl.nn as nnx import nnrl.nn.distributions as ptd from nnrl.nn.init import initialize_ from nnrl.nn.networks.mlp import StateActionMLP from .single import StochasticModel @dataclass class SVGModelSpec(StateActionMLP.spec_cls): """Specifications for stochastic mlp model network. Inherits parameters from `StateActionMLP.spec_cls`. Args: units: Number of units in each hidden layer activation: Nonlinearity following each linear layer delay_action: Whether to apply an initial preprocessing layer on the observation before concatenating the action to the input. input_dependent_scale: Whether to parameterize the Gaussian standard deviation as a function of the state and action residual: Whether to build model as a residual one, i.e., that predicts the change in state rather than the next state itself """ input_dependent_scale: bool = True residual: bool = True class SVGModel(StochasticModel): """Model from Stochastic Value Gradients.""" # pylint:disable=abstract-method spec_cls = SVGModelSpec def __init__(self, obs_space: Box, action_space: Box, spec: SVGModelSpec): params = SVGDynamicsParams(obs_space, action_space, spec) dist = ptd.Independent(ptd.Normal(), reinterpreted_batch_ndims=1) super().__init__(params, dist) def initialize_parameters(self, initializer_spec: dict): """Initialize all encoder parameters. Args: initializer_spec: Dictionary with mandatory `name` key corresponding to the initializer function name in `torch.nn.init` and optional keyword arguments. """ self.params.initialize_parameters(initializer_spec) class SVGDynamicsParams(nn.Module): """ Neural network module mapping inputs to distribution parameters through parallel subnetworks for each output dimension. """ # pylint:disable=abstract-method spec_cls = SVGModelSpec def __init__(self, obs_space, action_space, spec: SVGModelSpec): super().__init__() def make_encoder(): return StateActionMLP(obs_space, action_space, spec) self.logits = nn.ModuleList([make_encoder() for _ in range(obs_space.shape[0])]) self._activation = spec.activation def make_param(in_features): kwargs = dict( event_size=1, input_dependent_scale=spec.input_dependent_scale ) return nnx.NormalParams(in_features, **kwargs) self.params = nn.ModuleList([make_param(m.out_features) for m in self.logits]) def forward(self, obs, act): # pylint:disable=arguments-differ,missing-function-docstring params = [p(l(obs, act)) for p, l in zip(self.params, self.logits)] loc = torch.cat([d["loc"] for d in params], dim=-1) scale = torch.cat([d["scale"] for d in params], dim=-1) return {"loc": loc, "scale": scale} def initialize_parameters(self, initializer_spec: dict): # pylint:disable=missing-docstring self.logits.apply(initialize_(activation=self._activation, **initializer_spec)) ``` #### File: nn/modules/activation.py ```python import torch from torch import nn class Swish(nn.Module): r"""Swish activation function. Notes: Applies the mapping :math:`x \mapsto x \cdot \sigma(x)`, where :math:`sigma` is the sigmoid function. Reference: Eger, Steffen, <NAME>, and <NAME>. "Is it time to swish? Comparing deep learning activation functions across NLP tasks." arXiv preprint arXiv:1901.02671 (2019). """ def forward(self, value: torch.Tensor) -> torch.Tensor: # pylint:disable=arguments-differ,no-self-use,missing-function-docstring return value * value.sigmoid() ``` #### File: nn/modules/leaf_parameter.py ```python import torch from torch import nn class LeafParameter(nn.Module): """Holds a single paramater vector an expands it to match batch shape of inputs.""" def __init__(self, in_features): super().__init__() self.bias = nn.Parameter(torch.zeros(in_features)) def forward(self, inputs): # pylint:disable=arguments-differ return self.bias.expand(inputs.shape[:-1] + (-1,)) ``` #### File: nnrl/optim/kfac.py ```python from __future__ import annotations import abc import contextlib import torch import torch.nn.functional as F from torch import Tensor, nn from torch.optim import Optimizer class KFACMixin(metaclass=abc.ABCMeta): """Adds methods for forward hooks, covariance computation and updating.""" # pylint:disable=invalid-name,no-member _hookable_modules: list[nn.Module] @contextlib.contextmanager def record_stats(self): """Activate registered forward and backward hooks.""" self.zero_grad() fwd_handles, bwd_handles = self.register_hooks() try: yield finally: self.remove_hooks(fwd_handles, bwd_handles) def register_hooks(self) -> tuple[list, list]: """Adds hooks to the monitored module for storing inputs and grads.""" fwd_handles, bwd_handles = [], [] for mod in self._hookable_modules: fwd_handles += [mod.register_forward_pre_hook(self.save_input)] bwd_handles += [mod.register_full_backward_hook(self.save_grad_out)] return fwd_handles, bwd_handles @staticmethod def remove_hooks(fwd_handles: list, bwd_handles: list): """Removes hooks from the monitored module.""" for handle in fwd_handles + bwd_handles: handle.remove() def save_input(self, mod, inputs): """Saves input of layer to compute covariance. Note: inputs must be divided by the batch size to weight them appropriately when computing the average whitening matrix. """ inputs = inputs[0].detach() if isinstance(mod, nn.Linear): inputs = inputs.reshape(-1, inputs.shape[-1]) elif isinstance(mod, nn.Conv2d): inputs = inputs.reshape(-1, *inputs.shape[-3:]) self.state[mod.weight]["x"] = inputs / inputs.shape[0] def save_grad_out(self, mod, _, grad_outputs): """Saves grad on output of layer to compute covariance. Note: grads are already properly weighted when the final loss function uses .mean() to aggregate element-wise losses. Since this is always the case when computing the entropy (average negative log-likelihood), we don't weight them here. """ grad_outputs = grad_outputs[0].detach() if isinstance(mod, nn.Linear): grad_outputs = grad_outputs.reshape(-1, grad_outputs.shape[-1]) elif isinstance(mod, nn.Conv2d): grad_outputs = grad_outputs.reshape(-1, *grad_outputs.shape[-3:]) self.state[mod.weight]["gy"] = grad_outputs def step(self): # pylint:disable=arguments-differ """Preconditions and applies gradients.""" fisher_norm = 0.0 for group in self.param_groups[:-1]: # Getting parameters params = group["params"] weight, bias = params if len(params) == 2 else (params[0], None) state = self.state[weight] # Update convariances and inverses state.setdefault("step", 0) state.update(self._compute_covs(group, state)) if state["step"] % self.update_freq == 0: state.update(self._process_covs(state)) state["step"] += 1 # Preconditionning gw, gb, new_state = self._precond(weight, bias, group, state) state.update(new_state) # Updating gradients fisher_norm += (weight.grad * gw).sum() weight.grad.data = gw if bias is not None: fisher_norm += (bias.grad * gb).sum() bias.grad.data = gb # Cleaning self.state[weight].pop("x", None) self.state[weight].pop("gy", None) fisher_norm += sum( (p.grad * p.grad).sum() for p in self.param_groups[-1]["params"] ) # Eventually scale the norm of the gradients and apply each scale = min(self.eta, torch.sqrt(self.state["kl_clip"] / fisher_norm)) for group in self.param_groups: for param in group["params"]: param.grad.data.mul_(scale) param.data.sub_(param.grad.data, alpha=group["lr"]) @abc.abstractmethod def _compute_covs(self, group, state): """Computes the covariances.""" @abc.abstractmethod def _process_covs(self, state): """Process the covariances for preconditioning gradients later.""" @abc.abstractmethod def _precond(self, weight, bias, group, state) -> tuple[Tensor, Tensor, dict]: """Applies preconditioning.""" class KFAC(KFACMixin, Optimizer): """K-FAC Optimizer for Linear and Conv2d layers. Computes the K-FAC of the second moment of the gradients. It works for Linear and Conv2d layers and silently skip other layers. Args: net: Network to optimize. eps: Tikhonov regularization parameter for the inverses. sua: Applies SUA (Spatially Uncorrelated Activations) approximation. pi: Computes pi correction for Tikhonov regularization. update_freq: Perform inverses every update_freq updates. alpha: Running average parameter (if == 1, no r. ave.). kl_clip: Scale the gradients by the squared fisher norm. eta: upper bound for gradient scaling. """ # pylint:disable=invalid-name,too-many-instance-attributes def __init__( self, net: nn.Module, eps: float, sua: bool = False, pi: bool = False, update_freq: int = 1, alpha: float = 1.0, kl_clip: float = 1e-3, eta: float = 1.0, lr: float = 1.0, ): # pylint:disable=too-many-arguments,too-many-locals assert isinstance(net, nn.Module), "KFAC needs access to module structure." self.eps = eps self.sua = sua self.pi = pi self.update_freq = update_freq self.alpha = alpha self.eta = eta self._hookable_modules = [] param_groups = [] param_set = set() for mod in net.modules(): mod_class = type(mod).__name__ if mod_class in ["Linear", "Conv2d"]: self._hookable_modules += [mod] info = ( (mod.kernel_size, mod.padding, mod.stride) if mod_class == "Conv2d" else None ) params = [mod.weight] if mod.bias is not None: params.append(mod.bias) param_groups.append( {"params": params, "info": info, "layer_type": mod_class} ) param_set.update(set(params)) param_groups.append( {"params": [p for p in net.parameters() if p not in param_set]} ) super().__init__(param_groups, {"lr": lr}) self.state["kl_clip"] = kl_clip def _compute_covs(self, group, state): x, gy = state["x"], state["gy"] # Computation of xxt if group["layer_type"] == "Conv2d": if not self.sua: kernel_size, padding, stride = group["info"] x = F.unfold(x, kernel_size, padding=padding, stride=stride) else: x = x.view(x.shape[0], x.shape[1], -1) x = x.data.permute(1, 0, 2).reshape(x.shape[1], -1) else: x = x.data.T if len(group["params"]) == 2: ones = torch.ones_like(x[:1]) x = torch.cat([x, ones], dim=0) # Computation of xxt xxt = x @ x.T if "xxt" in state: xxt = state["xxt"] * (1.0 - self.alpha) + xxt * self.alpha # Computation of ggt if group["layer_type"] == "Conv2d": gy = gy.data.permute(1, 0, 2, 3) num_locations = gy.shape[2] * gy.shape[3] gy = gy.reshape(gy.shape[0], -1) else: gy = gy.data.T num_locations = 1 ggt = gy @ gy.T if "ggt" in state: ggt = state["ggt"] * (1.0 - self.alpha) + ggt * self.alpha return {"xxt": xxt, "ggt": ggt, "num_locations": num_locations} def _process_covs(self, state): xxt, ggt, num_locations = (state[k] for k in "xxt ggt num_locations".split()) # Computes pi pi = 1.0 if self.pi: pi = (torch.trace(xxt) * ggt.shape[0]) / (torch.trace(ggt) * xxt.shape[0]) # Regularizes and inverts eps = self.eps / num_locations diag_xxt = torch.diag(torch.empty(xxt.shape[0]).fill_(torch.sqrt(eps * pi))) diag_ggt = torch.diag(torch.empty(ggt.shape[0]).fill_(torch.sqrt(eps / pi))) ixxt = (xxt + diag_xxt).inverse() iggt = (ggt + diag_ggt).inverse() return {"ixxt": ixxt, "iggt": iggt} def _precond(self, weight, bias, group, state): if group["layer_type"] == "Conv2d" and self.sua: return self._precond_sua(weight, bias, state) g = weight.grad.data if group["layer_type"] == "Conv2d": g = g.reshape(g.shape[0], -1) if bias is not None: gb = bias.grad.data g = torch.cat([g, gb.view(gb.shape[0], 1)], dim=1) ixxt, iggt = state["ixxt"], state["iggt"] g = iggt @ g @ ixxt if group["layer_type"] == "Conv2d": g /= state["num_locations"] if bias is not None: gb = g[:, -1].reshape_as(bias) g = g[:, :-1] else: gb = None g = g.reshape_as(weight) return g, gb, {} @staticmethod def _precond_sua(weight, bias, state): """Preconditioning for KFAC SUA.""" g = weight.grad.data s = g.shape g = g.permute(1, 0, 2, 3).contiguous() if bias is not None: gb = bias.grad.view(1, -1, 1, 1).expand(1, -1, s[2], s[3]) g = torch.cat([g, gb], dim=0) ixxt, iggt = state["ixxt"], state["iggt"] g = ixxt @ g.reshape(-1, s[0] * s[2] * s[3]) g = g.reshape(-1, s[0], s[2], s[3]).permute(1, 0, 2, 3) g = iggt @ g.reshape(s[0], -1) g = g.reshape(s[0], -1, s[2], s[3]) / state["num_locations"] if bias is not None: gb = g[:, -1, s[2] // 2, s[3] // 2] g = g[:, :-1] else: gb = None return g, gb, {} class EKFAC(KFACMixin, Optimizer): """EKFAC Optimizer for Linear layers. It works for Linear layers and silently skip other layers. Note: unlike the paper's pseudocode, we maintain running averages of the Kronecker-factored covariance matrices. Args: net: Network to optimize. eps: Tikhonov regularization parameter for the inverses. update_freq: Perform inverses every update_freq updates. alpha: Running average parameter (if == 1, no r. ave.). kl_clip: Scale the gradients by the squared fisher norm. eta: upper bound for gradient scaling. """ # pylint:disable=invalid-name def __init__( self, net: nn.Module, eps: float, update_freq: int = 1, alpha: float = 1.0, kl_clip: float = 1e-3, eta: float = 1.0, lr: float = 1.0, ): # pylint:disable=too-many-arguments assert isinstance(net, nn.Module), "EKFAC needs access to module structure." self.eps = eps self.update_freq = update_freq self.alpha = alpha self.eta = eta self._hookable_modules = [] param_groups = [] param_set = set() for mod in net.modules(): mod_class = type(mod).__name__ if mod_class in ["Linear"]: self._hookable_modules += [mod] info = None params = [mod.weight] if mod.bias is not None: params.append(mod.bias) param_groups.append( {"params": params, "info": info, "layer_type": mod_class} ) param_set.update(set(params)) param_groups.append( {"params": [p for p in net.parameters() if p not in param_set]} ) super().__init__(param_groups, {"lr": lr}) self.state["kl_clip"] = kl_clip def _compute_covs(self, group, state): x, gy = state["x"], state["gy"] # Computation of xxt x = x.data.T if len(group["params"]) == 2: x = torch.cat([x, torch.ones_like(x[:1])], dim=0) xxt = x @ x.T if "xxt" in state: xxt = state["xxt"] * (1.0 - self.alpha) + xxt * self.alpha # Computation of ggt gy = gy.data.T num_locations = 1 ggt = gy @ gy.T if "ggt" in state: ggt = state["ggt"] * (1.0 - self.alpha) + ggt * self.alpha return {"xxt": xxt, "ggt": ggt, "num_locations": num_locations} def _process_covs(self, state): xxt, ggt = state["xxt"], state["ggt"] # Regularizes and inverts pi = (torch.trace(xxt) * ggt.shape[0]) / (torch.trace(ggt) * xxt.shape[0]) eps = self.eps diag_xxt = torch.diag(torch.empty(xxt.shape[0]).fill_(torch.sqrt(eps * pi))) diag_ggt = torch.diag(torch.empty(ggt.shape[0]).fill_(torch.sqrt(eps / pi))) sa, ua = torch.symeig(xxt + diag_xxt, eigenvectors=True) sb, ub = torch.symeig(ggt + diag_ggt, eigenvectors=True) m2 = sb.unsqueeze(1) * sa.unsqueeze(0) return {"ua": ua, "ub": ub, "m2": m2} def _precond(self, weight, bias, group, state): g = weight.grad.data if bias is not None: gb = bias.grad.data g = torch.cat([g, gb.view(gb.shape[0], 1)], dim=1) bs = state["x"].size(0) ua, ub = state["ua"], state["ub"] projected = ub.T @ g @ ua m2 = projected ** 2 if "m2" in state: m2 = state["m2"] * self.alpha + (1.0 - self.alpha) * bs * m2 scaled = projected / (m2 + self.eps) g = ub @ scaled @ ua.T if bias is not None: gb = g[:, -1].reshape_as(bias) g = g[:, :-1] else: gb = None g = g.reshape_as(weight) return g, gb, {"m2": m2} ``` #### File: actor/policy/test_deterministic.py ```python import pytest import torch from gym.spaces import Box from torch import Tensor @pytest.fixture(scope="module") def module_cls(): from nnrl.nn.actor.policy.deterministic import MLPDeterministicPolicy return MLPDeterministicPolicy @pytest.fixture(params=(0.1, 1.2), ids=lambda x: f"NormBeta({x})") def norm_beta(request) -> float: return request.param @pytest.fixture def spec(module_cls, norm_beta): return module_cls.spec_cls(norm_beta=norm_beta) @pytest.fixture def action_space(cont_space: Box) -> Box: return cont_space @pytest.fixture def module(module_cls, obs_space, action_space, spec): return module_cls(obs_space, action_space, spec) def test_unconstrained_action(module, obs: Tensor, action_space, norm_beta): action_dim = action_space.shape[0] policy_out = module.unconstrained_action(obs) norms = policy_out.norm(p=1, dim=-1, keepdim=True) / action_dim assert policy_out.shape[-1] == action_dim assert policy_out.dtype == torch.float32 assert (norms <= (norm_beta + torch.finfo(torch.float32).eps)).all() ``` #### File: tests/nn/conftest.py ```python import numpy as np import pytest import torch from gym import spaces from torch import Tensor from nnrl.nn.actor import ( DeterministicPolicy, MLPContinuousPolicy, MLPDeterministicPolicy, ) from nnrl.nn.critic import ActionValueCritic from nnrl.utils import fake_space_samples @pytest.fixture(scope="module", params=((1,), (4,)), ids=("Obs1Dim", "Obs4Dim")) def obs_space(request): return spaces.Box(-10, 10, shape=request.param) @pytest.fixture(scope="module", params=((1,), (4,)), ids=("Act1Dim", "Act4Dim")) def action_space(request): return spaces.Box(-1, 1, shape=request.param) @pytest.fixture( params=(pytest.param(True, marks=pytest.mark.slow), False), ids=("TorchScript", "Eager"), scope="module", ) def torch_script(request): return request.param @pytest.fixture def batch_size() -> int: return 32 @pytest.fixture def obs(obs_space: spaces.Space, batch_size: int) -> Tensor: return fake_space_samples(obs_space, batch_size) @pytest.fixture def action(action_space: spaces.Space, batch_size: int) -> Tensor: return fake_space_samples(action_space, batch_size) @pytest.fixture def next_obs(obs_space: spaces.Space, batch_size: int) -> Tensor: return fake_space_samples(obs_space, batch_size) @pytest.fixture def rew(batch_size: int) -> Tensor: return torch.as_tensor(np.random.randn(batch_size).astype(np.float32)) @pytest.fixture def deterministic_policies(obs_space, action_space): spec = MLPDeterministicPolicy.spec_cls( units=(32,), activation="ReLU", norm_beta=1.2 ) policy = MLPDeterministicPolicy(obs_space, action_space, spec) target_policy = DeterministicPolicy.add_gaussian_noise(policy, noise_stddev=0.3) return policy, target_policy @pytest.fixture(params=(True, False), ids=(f"PiScaleDep({b})" for b in (True, False))) def policy_input_scale(request): return request.param @pytest.fixture def stochastic_policy(obs_space, action_space, policy_input_scale): config = {"encoder": {"units": (32,)}} mlp_spec = MLPContinuousPolicy.spec_cls.from_dict(config) return MLPContinuousPolicy( obs_space, action_space, mlp_spec, input_dependent_scale=policy_input_scale ) @pytest.fixture(params=(1, 2), ids=(f"Critics({n})" for n in (1, 2))) def action_critics(request, obs_space, action_space): config = { "encoder": {"units": [32]}, "double_q": request.param == 2, "parallelize": False, } spec = ActionValueCritic.spec_cls.from_dict(config) act_critic = ActionValueCritic(obs_space, action_space, spec) return act_critic.q_values, act_critic.target_q_values ``` #### File: nn/critic/test_action_value.py ```python import pytest import torch @pytest.fixture(scope="module") def module_cls(): from nnrl.nn.critic.action_value import ActionValueCritic return ActionValueCritic @pytest.fixture(params=(True, False), ids="DoubleQ SingleQ".split()) def double_q(request): return request.param @pytest.fixture(params=(True, False), ids=lambda x: f"Parallelize({x})") def parallelize(request): return request.param @pytest.fixture def spec(module_cls, double_q, parallelize): return module_cls.spec_cls(double_q=double_q, parallelize=parallelize) @pytest.fixture def module(module_cls, obs_space, action_space, spec): return module_cls(obs_space, action_space, spec) def test_module_creation(module, obs, action, spec): double_q = spec.double_q for attr in "q_values target_q_values".split(): assert hasattr(module, attr) expected_n_critics = 2 if double_q else 1 assert len(module.q_values) == expected_n_critics q_values, targets = module.q_values, module.target_q_values vals = [m(obs, action) for ensemble in (q_values, targets) for m in ensemble] for val in vals: assert val.shape == obs.shape[:-1] assert val.dtype == torch.float32 assert all( torch.allclose(p, t) for p, t in zip(q_values.parameters(), targets.parameters()) ) def test_script(module): torch.jit.script(module) ``` #### File: distributions/flows/test_affine_constant.py ```python import pytest import torch from torch import nn from nnrl.nn.distributions.flows import ActNorm, AffineConstantFlow @pytest.fixture(params=(True, False), ids=("LearnScale", "ConstScale")) def scale(request): return request.param @pytest.fixture(params=(True, False), ids=("LearnShift", "ConstShift")) def shift(request): return request.param @pytest.fixture(params=(True, False), ids=("AffineConst", "ActNorm")) def module(request, scale, shift): def make_mod(shape): mod = AffineConstantFlow(shape, scale, shift) return ActNorm(mod) if request.param else mod return make_mod @pytest.fixture(params=((1,), (2,), (4,))) def shape(request): return request.param @pytest.fixture(params=((), (1,), (4,))) def inputs(request, shape): input_shape = request.param + shape return torch.randn(*input_shape).requires_grad_() def test_affine_constant(module, inputs, torch_script): module = module(inputs.shape[-1:]) module = torch.jit.script(module) if torch_script else module scale = module.scale if "scale" in dir(module) else module.affine_const.scale latent, log_det = module(inputs) if isinstance(scale, nn.Parameter): log_det.sum().backward(retain_graph=True) assert scale.grad is not None latent.sum().backward() assert inputs.grad is not None latent = latent.detach().requires_grad_() input_, log_det = module(latent, reverse=True) assert torch.allclose(input_, inputs, atol=1e-6) if isinstance(scale, nn.Parameter): log_det.sum().backward(retain_graph=True) assert scale.grad is not None input_.sum().backward() assert latent.grad is not None ``` #### File: distributions/flows/test_couplings.py ```python import pytest import torch from nnrl.nn.distributions.flows.coupling import ( AdditiveCouplingTransform, AffineCouplingTransform, PiecewiseRQSCouplingTransform, ) from nnrl.nn.distributions.flows.masks import create_alternating_binary_mask from nnrl.nn.networks import MLP, ResidualNet PARITIES = (True, False) IN_SIZES = (2, 3) COUPLINGS = ( AffineCouplingTransform, AdditiveCouplingTransform, PiecewiseRQSCouplingTransform, ) @pytest.fixture(params=PARITIES, ids=(f"Parity({p})" for p in PARITIES)) def parity(request): return request.param @pytest.fixture(params=IN_SIZES, ids=(f"InSize({i})" for i in IN_SIZES)) def mask(request, parity): return create_alternating_binary_mask(request.param, even=parity) @pytest.fixture(params=(MLP, ResidualNet)) def transform_net_create_fn(request): return lambda i, o: request.param(i, o, 6) @pytest.fixture(params=COUPLINGS) def cls(request): return request.param @pytest.mark.filterwarnings("ignore:Inputs to the softmax are not scaled::nnrl") def test_creation(cls, mask, transform_net_create_fn): coupling = cls(mask, transform_net_create_fn) torch.jit.script(coupling) @pytest.mark.filterwarnings("ignore:Inputs to the softmax are not scaled::nnrl") def test_call(cls, mask, transform_net_create_fn): coupling = cls(mask, transform_net_create_fn) coupling = torch.jit.script(coupling) inputs = torch.randn(10, *mask.shape) params = {} out, logabsdet = coupling(inputs, params) latent, logdet = coupling(out, params, reverse=True) assert out.shape == inputs.shape assert latent.shape == inputs.shape assert logabsdet.shape == (10,) assert logdet.shape == (10,) assert torch.allclose(inputs, latent, atol=1e-5) assert torch.allclose(logabsdet, -logdet, atol=1e-5) ```
{ "source": "0xApeToshi/ERC721-snapshot", "score": 2 }
#### File: 0xApeToshi/ERC721-snapshot/main.py ```python import json import os from collections import deque from time import time import requests from dotenv import load_dotenv from web3 import Web3 class QueueWindow: def __init__(self, max_size): self.q = deque() self.max_size = max_size self._len = 0 def get_len(self): # len starts at 0 and increases up to max_size return self._len def get_sum(self): return sum(self.q) def __repr__(self): return str(list(self.q)) def add(self, element): """Add an element to the queue. Caps the max size.""" self.q.append(element) self._len += 1 if self._len == self.max_size: # Update the function, just for fun :) def add(element): self.q.append(element) self.q.popleft() self.add = add if __name__ == "__main__": load_dotenv() # Start & end tokenId (inclusive) START = 1 END = 15 TOTAL = END - START + 1 # Contract address (needs to be verified to get ABI) CONTRACT_ADDR = "0x1cBB182322Aee8ce9F4F1f98d7460173ee30Af1F" # Needs a .env file in the same folder INFURA_ID = os.getenv("INFURA_ID") url = f"https://api.etherscan.io/api?module=contract&action=getabi&address={CONTRACT_ADDR}" w3 = Web3(Web3.HTTPProvider(f"https://mainnet.infura.io/v3/{INFURA_ID}")) response = requests.get(url) ABI = json.loads(response.json().get("result")) contract = w3.eth.contract(w3.toChecksumAddress(CONTRACT_ADDR), abi=ABI) snapshot = {} window = QueueWindow(10) eta = "?" animation = "|/-\\" for i, tokenId in enumerate(range(START, END+1)): t1 = time() wallet_owner = contract.functions.ownerOf(tokenId).call() if snapshot.get(wallet_owner): # If the wallet owns multiple tokens snapshot[wallet_owner].append(tokenId) else: # If the wallet appears for the first time, # make a list and add the found token to it snapshot.update({wallet_owner: [tokenId]}) t2 = time() window.add(t2-t1) remaining = TOTAL - i - 1 # Find the average time and multiply by remaining if i % 10 == 0: eta = round(remaining * window.get_sum()/window.get_len(), 1) print( f"{animation[i % len(animation)]} {tokenId:5}/{END} | ETA: {eta:9.2f}s", end="\r") print("--> Saving to snapshot.txt") with open("snapshot.txt", "w") as f: f.write(json.dumps(snapshot)) print("--> Done!") ```
{ "source": "0xb00d1e/ioc_fetch", "score": 2 }
#### File: lib/decorators/auth.py ```python from functools import wraps from flask import request from ioc_fetch.api.lib.responses import * from ioc_fetch.api.models.user import User def require_roles(roles=[]): def require_roles_decorator(view_function): @wraps(view_function) def _require_roles(*args, **kwargs): key = request.args.get('key', '') if not key: key = request.headers.get('X-API-Key', '') if key: user = User.get_user_by_apikey(key) if not user: return make_error(UNAUTHORIZED) if not user.is_active: return make_error(FORBIDDEN) if user: kwargs['username'] = user.username if set(roles) <= set(user.role_names()): return view_function(*args, **kwargs) else: return make_error(FORBIDDEN) else: return make_error(UNAUTHORIZED) else: return make_error(UNAUTHORIZED) return view_function(*args, **kwargs) return _require_roles return require_roles_decorator ``` #### File: lib/responses/__init__.py ```python from flask import jsonify def make_error(code_message): response = jsonify({ 'status_code': code_message[0], 'message': code_message[1] }) response.status_code = code_message[0] return response INVALID_JSON_SENT = (400, 'invalid json was sent to the server') UNAUTHORIZED = (401, 'unauthorized') FORBIDDEN = (403, 'forbidden') USER_EXISTS = (429, 'user already exists') USER_NOT_FOUND = (404, 'user not found') ERROR_USER_NOT_FOUND = (400, 'user not found') ROLE_EXISTS = (429, 'role already exists') ROLE_NOT_FOUND = (404, 'role not found') ERROR_ROLE_NOT_FOUND = (400, 'role not found') INVALID_IPV4_SENT = (400, 'invalid ipv4 was sent to the server') INVALID_DOMAIN_SENT = (400, 'invalid domain was sent to the server') INVALID_MD5_SENT = (400, 'invalid md5 was sent to the server') INVALID_SHA1_SENT = (400, 'invalid sha1 was sent to the server') INVALID_SHA256_SENT = (400, 'invalid sha256 was sent to the server') ``` #### File: api/models/user.py ```python import secrets import string from datetime import datetime from flask_user import UserMixin from werkzeug.security import generate_password_hash from ioc_fetch.app import db from ioc_fetch.api.models.model import Model from ioc_fetch.api.models.role import Role class User(Model, UserMixin): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) is_active = db.Column(db.Boolean, nullable=False, server_default='0') created = db.Column(db.DateTime(timezone=True), nullable=False, default=datetime.utcnow) username = db.Column(db.Text, nullable=False, unique=True) password = db.Column(db.Text, nullable=False) api_key = db.Column(db.Text, nullable=False, unique=True) roles = db.relationship('Role', secondary='user_roles') def __init__(self, username, password, role_names): self.username = username self.password = <PASSWORD>password_<PASSWORD>(password) self.api_key = self.generate_api_key() for role in Role.get_by_names(role_names): self.roles.append(role) @classmethod def create(cls, username, password, role_names): new_user = cls(username, password, role_names) db.session.add(new_user) db.session.commit() return new_user @classmethod def get_user_by_apikey(cls, api_key): return db.session.query(cls).filter((cls.api_key == api_key)).first() def role_names(self): return [role.name for role in self.roles] def generate_api_key(self): return ''.join(secrets.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for _ in range(64)) def json(self): result = {} for column in self.__table__.columns: if column.name == 'password': continue result[column.name] = getattr(self, column.name) result['role_names'] = [r.name for r in self.roles] return result ``` #### File: api/sources/abuse_ip_db.py ```python import requests from ioc_fetch.api.sources.source import Source from ioc_fetch.lib.util import get_env_variable class AbuseIPDB(Source): def __init__(self): self.url = 'https://api.abuseipdb.com/api/v2/check' self.api_key = get_env_variable('ABUSE_IP_DB_API_KEY') def check_ipv4(self, ipv4): response = requests.get( f'{self.url}', headers={ 'Accept': 'application/json', 'Key': self.api_key }, params={ 'ipAddress': ipv4 } ) return response ``` #### File: api/views/domain.py ```python import requests import socket from flask import jsonify, request from ioc_fetch.api import api from ioc_fetch.api.lib.decorators.auth import require_roles from ioc_fetch.api.lib.responses import * from ioc_fetch.api.sources import all_sources from ioc_fetch.logger import get_logger logger = get_logger(__name__) @api.route('/domain/<domain>', methods=['GET']) @require_roles(roles=['user']) def get_domain(domain, **kwargs): if not is_valid_domain(domain): return make_error(INVALID_DOMAIN_SENT) result = {} for source in all_sources: try: response = source.check_domain(domain) result[source.__class__.__name__.lower()] = response.json() except NotImplementedError: continue except Exception as e: logger.exception(e) return jsonify(result) def is_valid_domain(domain): # TODO Add better validation # Distinguish between domain and ipv4 if '.' not in domain: return False return True ``` #### File: 0xb00d1e/ioc_fetch/app.py ```python import sys from pathlib import Path sys.path.insert(0, '..') from dotenv import load_dotenv from flask import Flask from flask_script import Manager from flask_migrate import Migrate, MigrateCommand #from extensions import db #from lib.util import CustomJSONEncoder from ioc_fetch.extensions import db from ioc_fetch.lib.util import CustomJSONEncoder def create_app(environment): app = Flask( __name__, instance_relative_config=True ) load_env_vars(Path(__file__).resolve().parent) configure_app(app, environment) configure_extensions(app) configure_addons(app) configure_blueprints(app) return app def configure_app(app, environment): app.url_map.strict_slashes = False if environment == 'test': app.config.from_object('ioc_fetch.config.TestConfig') elif environment == 'dev': app.config.from_object('ioc_fetch.config.DevConfig') elif environment == 'prod': app.config.from_object('ioc_fetch.config.ProdConfig') def load_env_vars(path): env_vars = Path(path) / '.env' load_dotenv(dotenv_path=env_vars) def configure_addons(app): app.json_encoder = CustomJSONEncoder def configure_blueprints(app): from ioc_fetch.api import api bps = [api] for bp in bps: app.register_blueprint(bp) def configure_extensions(app): db.init_app(app) def migrations(app, db): migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) manager.run() if __name__ == '__main__': app = create_app('dev') app.run(host='0.0.0.0', port=8080, debug=True) ``` #### File: 0xb00d1e/ioc_fetch/logger.py ```python import logging import os from logging.handlers import TimedRotatingFileHandler def get_logger(name): logger = logging.getLogger(name) if not logger.handlers: this_dir = os.path.dirname(os.path.abspath(__file__)) log_file = os.path.join(this_dir, 'logs', f'{name}.log') logger.setLevel(logging.DEBUG) file_handler = TimedRotatingFileHandler( log_file, when='d', interval=7, backupCount=52 ) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') file_handler.setFormatter(formatter) console_handler.setFormatter(formatter) logger.addHandler(file_handler) logger.addHandler(console_handler) return logger ``` #### File: 0xb00d1e/ioc_fetch/manage.py ```python import os import sys from getpass import getpass import sqlalchemy from flask_script import Manager from flask_migrate import Migrate, MigrateCommand sys.path.insert(0, '..') from ioc_fetch.app import create_app, db from ioc_fetch.api.models.role import Role from ioc_fetch.api.models.user import User app = create_app('prod') migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) @manager.command def seed(): print('[*] Creating roles...') roles = ['admin', 'user'] for role in roles: try: Role.create(role) print(f'[*] Created {role} role...') except sqlalchemy.exc.IntegrityError: db.session.rollback() print(f'[*] {role} role already exists') print('[*] Creating admin user...') user = db.session.query(User).filter(User.username=='admin').first() if not user: while True: password = getpass('[*] Enter new admin user password: ') password2 = getpass('[*] Enter password again: ') if password == password2: break print("[*] Passwords didn't match...") try: user = User.create('admin', password, roles) user.is_active = True db.session.commit() print('[*] admin user created') except sqlalchemy.exc.IntegrityError: print('[*] admin user already exists...') else: print('[*] admin user already exists...') if __name__ == '__main__': manager.run() ``` #### File: ioc_fetch/tests/__init__.py ```python import unittest from ioc_fetch.app import create_app, db from ioc_fetch.api.models import Role, User class AppTest(unittest.TestCase): def setUp(self): self.app = create_app('test') self.app_context = self.app.app_context() self.app_context.push() db.create_all() try: self.create_test_user(db) except Exception: db.session.rollback() user = db.session.query(User).filter(User.username=='test').first() self.key = user.api_key self.client = self.app.test_client() def create_test_user(self, db): r1 = Role('admin') r2 = Role('user') db.session.add(r1) db.session.add(r2) db.session.commit() u = User('test', 'test', ['admin', 'user']) db.session.add(u) u.is_active = True self.key = u.api_key db.session.commit() def tearDown(self): db.session.remove() db.drop_all() self.app_context.pop() def http_status_code(self, resource): response = self.client.get(resource) return response.status_code ```
{ "source": "0xb0b/a0", "score": 3 }
#### File: a0/two048/model.py ```python class Model: def __init__(self, trajectory=None): self.trajectory = [] if trajectory is None else trajectory[:] def update(self, observed_state): self.trajectory.append(observed_state) ``` #### File: a0/two048/test_game.py ```python import pytest from game import Game, init_randomness def rotate(state, size, num_rotations=1): # rotate the input state counter clockwise given number of times and return # the rotated state if num_rotations == 0: return state for _ in range(num_rotations): temp_state = state[:] for i, tile in enumerate(temp_state): state[size * (size - 1 - i % size) + i // size] = tile return state @pytest.mark.parametrize("input_state, rotated_state, size", [ ([3], [3], 1), ([1, 2, 3, 4], [2, 4, 1, 3], 2), ([1, 2, 3, 4, 5, 6, 7, 8, 9], [3, 6, 9, 2, 5, 8, 1, 4, 7], 3), ([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5], [3, 7, 1, 5, 2, 6, 0, 4, 1, 5, 9, 3, 0, 4, 8, 2], 4) ]) def test_rotate(input_state, rotated_state, size): assert rotate(input_state, size) == rotated_state @pytest.fixture def game_instance(): init_randomness() return Game() interact_state_examples = [ # (initial state, final state) # each example pair is defined for ActionSpace.RIGHT, all other actions with # correspondingly rotated states have to be tested in addition # empty state does not change ([0] * 16, [0] * 16), # non empty state does not change # 0 1 2 3 0 1 2 3 # 0 0 0 4 --> 0 0 0 4 # 5 2 1 6 5 2 1 6 # 0 0 0 0 0 0 0 0 ([0, 1, 2, 3, 0, 0, 0, 4, 5, 2, 1, 6, 0, 0, 0, 0], [0, 1, 2, 3, 0, 0, 0, 4, 5, 2, 1, 6, 0, 0, 0, 0]), # terminal state does not change # 3 1 2 3 3 1 2 3 # 1 2 8 4 --> 1 2 8 4 # 5 3 1 6 5 3 1 6 # 1 2 3 5 1 2 3 5 ([3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5], [3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5]), # tiles moved, no merging # 0 1 2 0 0 0 1 2 # 0 4 0 0 --> 0 0 0 4 # 5 2 1 0 0 5 2 1 # 6 0 3 0 0 0 6 3 ([0, 1, 2, 0, 0, 4, 0, 0, 5, 2, 1, 0, 6, 0, 3, 0], [0, 0, 1, 2, 0, 0, 0, 4, 0, 5, 2, 1, 0, 0, 6, 3]), # tiles moved and merged # 0 1 2 2 0 0 1 3 # 0 4 0 4 --> 0 0 0 5 # 2 2 1 0 0 0 3 1 # 3 0 3 7 0 0 4 7 ([0, 1, 2, 2, 0, 4, 0, 4, 2, 2, 1, 0, 3, 0, 3, 7], [0, 0, 1, 3, 0, 0, 0, 5, 0, 0, 3, 1, 0, 0, 4, 7]), # double merges # 1 1 2 2 0 0 2 3 # 0 5 4 4 --> 0 0 5 5 # 2 0 2 3 0 0 3 3 # 3 3 3 3 0 0 4 4 ([1, 1, 2, 2, 0, 5, 4, 4, 2, 0, 2, 3, 3, 3, 3, 3], [0, 0, 2, 3, 0, 0, 5, 5, 0, 0, 3, 3, 0, 0, 4, 4]), ] @pytest.mark.parametrize("initial_state, final_state", interact_state_examples) def test_change_state(game_instance, initial_state, final_state): # test that state is changed according to the rules # initial and final state parameters are defined for ActionSpace.RIGHT # all other actions are tested by rotating states # ActionSpace is expected to be ordered counter clockwise starting from # ActionSpace.RIGHT for action in game_instance.actions(): game_instance.set_state(initial_state[:]) game_instance.change_state(action) assert game_instance.get_state() == final_state rotate(initial_state, game_instance.size) rotate(final_state, game_instance.size) possible_actions_state_examples = [ # (state, possible actions) # empty state, no actions possible ([0] * 16, ()), # non empty state, all actions possible # 0 1 2 0 # 0 4 0 0 # 5 2 1 0 # 6 0 3 0 ([0, 1, 2, 0, 0, 4, 0, 0, 5, 2, 1, 0, 6, 0, 3, 0], ("RIGHT", "UP", "LEFT", "DOWN")), # all actions except RIGHT possible # 0 1 2 3 # 0 0 0 4 # 5 2 1 6 # 0 0 0 0 ([0, 1, 2, 3, 0, 0, 0, 4, 5, 2, 1, 6, 0, 0, 0, 0], ("UP", "LEFT", "DOWN")), # only actions LEFT and DOWN possible # 0 1 2 1 # 0 3 1 4 # 0 0 2 5 # 0 0 0 0 ([0, 1, 2, 1, 0, 3, 1, 4, 0, 0, 2, 5, 0, 0, 0, 0], ("LEFT", "DOWN")), # only actions LEFT and RIGHT possible # 0 1 0 3 # 0 3 0 1 # 0 2 0 2 # 0 5 0 4 ([0, 1, 0, 3, 0, 3, 0, 1, 0, 2, 0, 2, 0, 5, 0, 4], ("RIGHT", "LEFT")), # only action UP possible # 0 0 0 0 # 0 0 0 0 # 6 2 1 3 # 3 1 2 7 ([0, 0, 0, 0, 0, 0, 0, 0, 6, 2, 1, 3, 3, 1, 2, 7], ("UP",)), # terminal state # 3 1 2 3 # 1 2 8 4 # 5 3 1 6 # 1 2 3 5 ([3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5], ()) ] @pytest.mark.parametrize("state, possible_actions", possible_actions_state_examples) def test_get_possible_actions(game_instance, state, possible_actions): game_instance.set_state(state) assert game_instance.get_possible_actions() == [ game_instance.ActionSpace[name] for name in possible_actions] def test_is_terminal(game_instance): game_instance.set_state([0, 1, 2, 0, 0, 4, 0, 0, 5, 2, 1, 0, 6, 0, 3, 0]) assert not game_instance.is_finished() game_instance.set_state([3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5]) assert game_instance.is_finished() def test_generate_tile(game_instance): num_empty_tiles = game_instance.size * game_instance.size - 1 for _ in range(num_empty_tiles): game_instance.generate_tile() state = game_instance.get_state() for tile in state: assert tile == 1 or tile == 2 game_instance.generate_tile() assert game_instance.get_state() == state update_score_2048_state_examples = [ # (state, score) # tiles do not move, score does not change # 0 1 2 3 0 1 2 3 # 0 0 0 4 --> 0 0 0 4 # 5 2 1 6 5 2 1 6 # 0 0 0 0 0 0 0 0 ([0, 1, 2, 3, 0, 0, 0, 4, 5, 2, 1, 6, 0, 0, 0, 0], 0), # terminal state, score does not change # 3 1 2 3 3 1 2 3 # 1 2 8 4 --> 1 2 8 4 # 5 3 1 6 5 3 1 6 # 1 2 3 5 1 2 3 5 ([3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5], 0), # tiles moved, no merging, score does not change # 0 1 2 0 0 0 1 2 # 0 4 0 0 --> 0 0 0 4 # 5 2 1 0 0 5 2 1 # 6 0 3 0 0 0 6 3 ([0, 1, 2, 0, 0, 4, 0, 0, 5, 2, 1, 0, 6, 0, 3, 0], 0), # tiles moved and merged # 0 0 0 0 0 0 0 0 # 0 1 0 1 --> 0 0 0 2 # 0 0 0 0 0 0 0 0 # 0 0 0 0 0 0 0 0 ([0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 4), # tiles moved and merged # 0 1 2 2 0 0 1 3 # 0 4 0 4 --> 0 0 0 5 # 2 2 1 0 0 0 3 1 # 3 0 3 7 0 0 4 7 ([0, 1, 2, 2, 0, 4, 0, 4, 2, 2, 1, 0, 3, 0, 3, 7], 32 + 16 + 8 + 8), # double merges # 1 1 2 2 0 0 2 3 # 0 5 4 4 --> 0 0 5 5 # 2 0 2 3 0 0 3 3 # 3 3 3 3 0 0 4 4 ([1, 1, 2, 2, 0, 5, 4, 4, 2, 0, 2, 3, 3, 3, 3, 3], 32 + 16 + 16 + 8 + 8 + 4), ] @pytest.mark.parametrize("state, score", update_score_2048_state_examples) def test_score_2048(game_instance, state, score): game_instance.set_state(state) game_instance.set_value() game_instance.accept(game_instance.ActionSpace.RIGHT) assert game_instance.get_value() == score update_score_threes_state_examples = [ # (state, score) # state with only minimal tiles # 0 1 0 0 # 0 0 0 0 # 0 0 1 0 # 0 0 0 0 ([0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0), # non terminal state # 0 1 2 0 # 0 4 0 0 # 5 2 1 0 # 6 0 3 0 ([0, 1, 2, 0, 0, 4, 0, 0, 5, 2, 1, 0, 6, 0, 3, 0], 2 + 26 + 80 + 2 + 242 + 8), # terminal state # 3 1 2 3 # 1 2 8 4 # 5 3 1 6 # 1 2 3 5 ([3, 1, 2, 3, 1, 2, 8, 4, 5, 3, 1, 6, 1, 2, 3, 5], 8 + 2 + 8 + 2 + 2186 + 26 + 80 + 8 + 242 + 2 + 8 + 80), ] @pytest.mark.parametrize("state, score", update_score_threes_state_examples) def test_score_threes(state, score): game_instance = Game(state=state, scoring="threes") game_instance.update_score() assert game_instance.get_value() == score ```
{ "source": "0xb0b/problems", "score": 3 }
#### File: problems/Kattis_Paradox_test/Kattis_Paradox_B.py ```python import math def cut_evenly(weights, threshold): min_weight = min(weights) weights = [w / min_weight for w in weights] intervals = [None] * len(weights) base_cuts = 0 for i, weight in enumerate(weights): if weight > 1: a, b = weight * threshold, weight / threshold intervals[i] = (a, b) for coefficient in range(1, math.ceil(1 / (b - a)) + 1): a, b = coefficient * a, coefficient * b min_integer = math.ceil(a) if math.ceil(a) == int(a): min_integer += 1 if min_integer < b: base_cuts = max(base_cuts, coefficient - 1) break while True: coefficient = base_cuts + 1 min_total_cuts = 0 for interval in intervals: if interval is None: min_total_cuts += base_cuts else: a, b = coefficient * interval[0], coefficient * interval[1] min_integer = math.ceil(a) if min_integer == int(a): min_integer += 1 if min_integer < b: min_total_cuts += min_integer - 1 else: break else: break base_cuts += 1 continue return min_total_cuts if __name__ == "__main__": threshold, _ = input().strip().split(" ") threshold = float(threshold) veggies = [int(weight) for weight in input().strip().split(" ")] min_cuts = cut_evenly(veggies, threshold) print(min_cuts) ```
{ "source": "0xB0C5/SMMPix", "score": 3 }
#### File: 0xB0C5/SMMPix/dither.py ```python from PIL import Image import sys import random color_data = '''252 4 21 191 6 20 251 241 209 178 137 90 247 255 0 251 195 13 7 255 6 9 199 7 8 255 243 11 9 249 191 108 255 149 5 195 250 193 253 191 4 152 192 190 192 0 0 0 255 255 255''' palette = map(lambda x: tuple(map(int, x.split(' '))), color_data.split('\n')) def closest_palette_color(in_color): return palette[closest_palette_index(in_color)] def closest_palette_index((r,g,b)): # larger than any real distance^2 min_square_dist = float('inf') index = None for i,(r2,g2,b2) in enumerate(palette): dr = r - r2 dg = g - g2 db = b - b2 square_dist = dr*dr + dg*dg + db*db if square_dist < min_square_dist: min_square_dist = square_dist index = i return index def add_to_pixel(pixels, (x,y), scale, values): if y < 0 or y >= len(pixels): return if x < 0 or x >= len(pixels[0]): return r,g,b = pixels[y][x] r += scale * values[0] g += scale * values[1] b += scale * values[2] pixels[y][x] = (r,g,b) def dither(pixels, image): max_dist = 128 for y in xrange(len(pixels)): for x in xrange(len(pixels[0])): old_pixel = pixels[y][x] og_pixel = image.getpixel((x,y))[:3] diffs = tuple(a-b for a,b in zip(old_pixel,og_pixel)) dist = sum(d*d for d in diffs) ** 0.5 if dist > max_dist: old_pixel = tuple(a+d*max_dist/dist for (a,d) in zip(og_pixel, diffs)) new_pixel = closest_palette_color(old_pixel) pixels[y][x] = new_pixel quant_error = tuple(a-b for a,b in zip(old_pixel, new_pixel)) add_to_pixel(pixels, (x+1,y ), 7.0/16.0, quant_error) add_to_pixel(pixels, (x-1,y+1), 3.0/16.0, quant_error) add_to_pixel(pixels, (x ,y+1), 5.0/16.0, quant_error) add_to_pixel(pixels, (x+1,y+1), 1.0/16.0, quant_error) def map_to_nearest(pixels): for y in xrange(len(pixels)): for x in xrange(len(pixels[0])): pixels[y][x] = closest_palette_color(pixels[y][x]) return pixels def load_palette_indices(filename): image = Image.open(filename) pixels = [[image.getpixel((x,y))[:3] for x in xrange(image.width)] for y in xrange(image.height)] dither(pixels, image) #map_to_nearest(pixels) image_palette_indices = [[0 for x in xrange(image.width)] for y in xrange(image.height)] for y in xrange(image.height): for x in xrange(image.width): image_palette_indices[y][x] = closest_palette_index(pixels[y][x]) image.putpixel((x,y), tuple(int(v) for v in pixels[y][x])) parts = filename.split('.') extension = parts[-1] name = ''.join(parts[:-1]) image.save(name + '_dithered.' + extension, 'png') return image_palette_indices ```
{ "source": "0xbadc0ffe/AutoProdigit", "score": 3 }
#### File: 0xbadc0ffe/AutoProdigit/PROdigit.py ```python import requests import os from datetime import date, datetime, timedelta from getpass import getpass from time import sleep import json from urllib.parse import urlencode import re if os.name == "nt": CLEAR_STR = "cls" else: CLEAR_STR = "clear" def clear(): os.system(CLEAR_STR) def add_days(date_str,delta): date_str = get_date(date_str) + timedelta(days=delta) return date_str.strftime("%d/%m/%Y") def get_date(date_str): return date(*[ int(d) for d in date_str.split("/")][::-1]) def makequery(personal_data, booking_data, click, iddoc): pd = personal_data bd = booking_data sched = bd['hours'] name = personal_data["name"] surname = personal_data["surname"] CF = personal_data["CF"] query_form = { "__Click":f"{click}", "%%Surrogate_codiceedificio":"1", "codiceedificio":f"{bd['building']}", "%%Surrogate_aula":"1", "aula":f"{bd['classroom']} -- {bd['siram']}", "%%Surrogate_selezsettimana":"1", "selezsettimana":f"{bd['week']}", "%%Surrogate_dalleore1":"1", "dalleore1":f"{sched['lun'][0]}", "%%Surrogate_alleore1":"1", "alleore1":f"{sched['lun'][1]}", "%%Surrogate_dalleore2":"1", "dalleore2":f"{sched['mar'][0]}", "%%Surrogate_alleore2":"1", "alleore2":f"{sched['mar'][1]}", "%%Surrogate_dalleore3":"1", "dalleore3":f"{sched['mer'][0]}", "%%Surrogate_alleore3":"1", "alleore3":f"{sched['mer'][1]}", "%%Surrogate_dalleore4":"1", "dalleore4":f"{sched['gio'][0]}", "%%Surrogate_alleore4":"1", "alleore4":f"{sched['gio'][1]}", "%%Surrogate_dalleore5":"1", "dalleore5":f"{sched['ven'][0]}", "%%Surrogate_alleore5":"1", "alleore5":f"{sched['ven'][1]}", "%%Surrogate_dalleore6":"1", "dalleore6":f"{sched['sab'][0]}", "%%Surrogate_alleore6":"1", "alleore6":f"{sched['sab'][1]}", "%%Surrogate_dichiarazione":"1", "dichiarazione":":", "database":"prenotazioni/prenotaaule.nsf", "ruolodomino":"$$WebClient", "utente":f"{pd['matricola']}", "form":"prenotaposto-in-aula", "ruolo":"studente", "iddoc":f"{iddoc}", "cancella":"", "recorddeleted":"", "SaveOptions":"0", "matricola":f"{pd['matricola']}", "codicefiscale":f"{pd['CF']}", #"datanasc":"", "numerobadge":"", "corsodistudio":"", "cognome":f"{surname}", "nome":f"{name}", "codicecorso":"", "email":f"{pd['email']}", "facolta":"", "nuovo_documento":"0", "fila":"", "posto":"", "seriale":"", "codicesiram":f"{bd['siram']}", "webdb":"/prenotazioni/prenotaaule.nsf/", "Message":"", "cancellato":"NO", "flag":"0", "controllomatricole":f"{pd['range-mat']}#{bd['week']}", "numerosettimane":"", "appo":f"{bd['week']}#", "directoryaule":"prenotazioni/prenotaaule.nsf", "directory":"prenotazioni", "servername":"prodigit.uniroma1.it", "appo22":"", "systemreaders":"[admin]", "userreaders":f"uid={pd['matricola']}/ou=students/ou=users/dc=uniroma1/dc=it", "prenotaappo":"SI", "controllomatr":f"{bd['week']}#", "indirizzo":f"{bd['addr']}", "ubicazione":f"{bd['street-addr']}", "data1":f"{bd['week']}", "data2":f"{add_days(bd['week'],1)}", "data3":f"{add_days(bd['week'],2)}", "data4":f"{add_days(bd['week'],3)}", "data5":f"{add_days(bd['week'],4)}", "data6":f"{add_days(bd['week'],5)}", "$$HTMLFrontMatter":"<!DOCTYPE html>", "$$HTMLTagAttributes":"lang=\"it\"", "httpcookie":"1" } res = urlencode(query_form) return res # Returns next weekday "day" after the date "date". day=0 => monday, day=6 => sunday def next_weekday(date, day=0): return date + timedelta(days=(day-date.weekday()+7)%7) # format some fields of a booking_data dictionary def _format_bd(data): global siram_codes, addresses, classrooms try: if data['classroom'] not in classrooms[data['building']]: print(f"\nError: unlisted classroom \"{data['classroom']}\" in {data['building']}, be sure to use the EXACT same name used in Prodigit") input("\nPress Enter to exit") close() else: siram = siram_codes[data["building"]+"#"+data["classroom"]] except KeyError: print(f"\nError: unlisted building code \"{data['building']}\" in {data['building']}, be sure to use the EXACT same name used in Prodigit") input("\nPress Enter to exit") close() days = set(["lun", "mar", "mer", "gio", "ven", "sab"]) # TODO: MON TUE WED THU FRI SAT ? for day in data["hours"]: days -= set([day]) # if hours are uncorrectly settled, this changes them to default. Last if condition is to avoid "10:--" and similar from_h = data["hours"][day][0] to_h = data["hours"][day][1] if from_h == "" or from_h == ":" or (not ":" in from_h) or ("--" in from_h): data["hours"][day][0] = "--:--" data["hours"][day][1] = "--:--" continue if to_h == "" or to_h == ":" or (not ":" in to_h) or ("--" in to_h): data["hours"][day][0] = "--:--" data["hours"][day][1] = "--:--" # completing remaining days if not present. # TODO: ordering the data[hours] entries by day? it is not needed but I will consider for day in list(days): data["hours"][day] = ["--:--", "--:--"] # Set Siram code data["siram"] = siram # Set the week data to next monday today = date.today() data["week"] = next_weekday(today).strftime("%d/%m/%Y") # Set the building complex name and street-address if present on the boudling-info.json list data["addr"] = addresses[data["building"]]["addr"] data["street-addr"]= addresses[data["building"]]["street-addr"] # format some fields of a personal_data dictionary def _format_pd(data): if data["range-mat"] == "": mat = data["matricola"] data["range-mat"] = str(round(float("0."+mat[-2:]))*50)+"-"+str(round(float("0."+mat[-2:]))*50+49) if data["email"] == "": data["email"] = f"{data['surname'].lower()}.{data['<EMAIL>" def close(timesl=1): # close the program os.system(CLEAR_STR) print("\n\n\n Bye ,(è >è)/\n\n\n") sleep(timesl) os.system(CLEAR_STR) exit() def void_req(s, personal_data, iddoc ): book = { "classroom": "AULA A3", "building": "RM102", "week": "25/10/2021", "siram": "RM102-E01PR1L008", "hours": { "lun": [ "--:--", "--:--" ], "mar": [ "--:--", "--:--" ], "mer": [ "--:--", "--:--" ], "gio": [ "--:--", "--:--" ], "ven": [ "--:--", "--:--" ], "sab": [ "--:--", "--:--" ] }, "addr": "", "street-addr": "" } r = s.post(book_url, data=makequery(personal_data, book, click="$Refresh", iddoc=iddoc), timeout=5) return r # Retrieves some session and user data def get_data(s): try: r = s.get(book_url, timeout=5) #print(r.text) #input() except requests.exceptions.RequestException as e: print(f"\nError in connection with Prodigit\n") print(e) click_mark = "\_doClick\(\'[^$][^\']+\'\," m = re.search(click_mark, r.text) if m is not None: click = m.group(0)[10:-2] #print(f"Click: {click}") #click = click[:-4]+"1EEE" #print(f"Click: {click}") else: print(f"\nError in retrieving data from Prodigit\n") input("\n\nPress Enter to exit\n\n") close() cf_mark = "<input name=\"codicefiscale\" type=\"hidden\" value=\"[^\"]+" m = re.search(cf_mark, r.text) if m is not None: cf = m.group(0)[49:] #print(f"CF: {cf}") else: print(f"\nError in retrieving data from Prodigit\n") if personal_data["CF"] == "": input("\n\nPress Enter to exit\n\n") close() iddoc_mark = "<input name=\"iddoc\" type=\"hidden\" value=\"[^\"]+" m = re.search(iddoc_mark, r.text) if m is not None: iddoc = m.group(0)[41:] #print(f"iddoc: {iddoc}") else: print(f"\nError in retrieving data from Prodigit\n") input("\n\nPress Enter to exit\n\n") close() surname_mark = "<input name=\"cognome\" type=\"hidden\" value=\"[^\"]+" m = re.search(surname_mark, r.text) if m is not None: surname = m.group(0)[43:] #print(f"Surname: {surname}") else: print(f"\nError in retrieving data from Prodigit\n") input("\n\nPress Enter to exit\n\n") close() name_mark = "<input name=\"nome\" type=\"hidden\" value=\"[^\"]+" m = re.search(name_mark, r.text) if m is not None: name = m.group(0)[40:] #print(f"Name: {name}") else: print(f"\nError in retrieving data from Prodigit\n") input("\n\nPress Enter to exit\n\n") close() per_data = { "CF": cf, "name": name, "surname": surname, } _format_pd2(personal_data, per_data) r = void_req(s, personal_data, iddoc) click_mark = "\_doClick\(\'[^$][^\']+\'\," m = re.search(click_mark, r.text) if m is not None: click = m.group(0)[10:-2] #print(f"\nClick: {click}") else: print(f"\nError in retrieving data from Prodigit\n") input("\n\nPress Enter to exit\n\n") close() active_mark = "Le prenotazioni delle aule per le lezioni non sono attive" if active_mark in r.text: active = False #print(r.text) #input() else: active = True data = { "click": click, "CF": cf, "iddoc": iddoc, "active": active, "name": name, "surname": surname, } return data # format some fields of a personal_data dictionary def _format_pd2(personal_data, s_data): mat = personal_data["matricola"] personal_data["range-mat"] = str(round(float("0."+mat[-2:]))*50)+"-"+str(round(float("0."+mat[-2:]))*50+49) personal_data["email"] = f"{s_data['surname'].lower()}.{personal_data['<EMAIL>ricola']}<EMAIL>" personal_data["surname"] = s_data["surname"] personal_data["name"] = s_data["name"] personal_data["CF"] = s_data["CF"] booking_dict = { "classroom": "", "building": "", "hours": { "lun": [ "--:--", "--:--" ], "mar": [ "--:--", "--:--" ], "mer": [ "--:--", "--:--" ], "gio": [ "--:--", "--:--" ], "ven": [ "--:--", "--:--" ], "sab": [ "--:--", "--:--" ] } } if __name__=="__main__": clear() json_file_name = "config.json" buildings_info_json = "buildings-info.json" SLEEP_TIME = 0.5 with open(json_file_name, "r") as jfile: config_data = json.load(jfile) with open(buildings_info_json, "r") as jfile: buildings_info = json.load(jfile) personal_data = config_data["personal_data"] bookings = config_data["booking_list"] siram_codes = buildings_info["siram_codes"] addresses = buildings_info["addresses"] class_list = buildings_info["classrooms"] buildings_list = buildings_info["buildings"] classrooms = {} for key in siram_codes: classrooms[key.split("#")[0]]=[] for key in siram_codes: classrooms[key.split("#")[0]].append(key.split("#")[1]) url = "https://prodigit.uniroma1.it" book_url = "https://prodigit.uniroma1.it/prenotazioni/prenotaaule.nsf/prenotaposto-in-aula?" # User Agent user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36' headers = {'User-Agent': user_agent} login_data = { "Username": personal_data["matricola"], "Password": personal_data["password"] } user_not_setted = login_data['Username'] == "" if user_not_setted: login_data['Username'] = input("\nMatricola: ").strip() else: print(f"\nMatricola: {login_data['Username']}") pass_not_setted = login_data['Password'] == "" if pass_not_setted: login_data["Password"] = getpass("Password: ") #login_data["Password"] = input("Password: ") # if you prefer to not hide password entry clear() with requests.Session() as s: s.headers.update(headers) # Login try: clear() r = s.post(url+"/names.nsf?Login", data=login_data, timeout=5) #print(r.text) if "Autenticazione non effettuata" in r.text: print("\nAccess Failed\n") input("\n\nPress Enter to exit\n\n") close() else: print("\nLogged in!") input("\n\nPress Enter to start booking\n\n") except requests.exceptions.RequestException as e: print(f"\nError in Login\n") print(e) # Retrieving some session and user data s_data = get_data(s) # format data _format_pd2(personal_data, s_data) # Checking booking availability. Not avaible for now since not stable ''' if not s_data["active"]: clear() print("\n\n ,(t.t),(t.t),(t.t),(t.t),(t.t),(t.t),\n ,(t.t), ,(t.t),\n ,(t.t), Booking not available ,(t.t),\n ,(t.t), ,(t.t),\n ,(t.t),(t.t),(t.t),(t.t),(t.t),(t.t),") #print("\nBooking not available ,(t.t),") input("\n\nPress Enter to exit\n\n") close() ''' # Bookings for booking_data in bookings: try: if booking_data["classroom"] == "" or booking_data["building"] == "": continue clear() _format_bd(booking_data) r = s.post(book_url, data=makequery(personal_data, booking_data, click=s_data["click"], iddoc=s_data["iddoc"]), timeout=5) #print(makequery(personal_data, booking_data, click=s_data["click"], iddoc=s_data["iddoc"])) #print("\n\n\n\n") #print(r.text) #input() if "PRENOTAZIONI EFFETTUATE" in r.text: print(f"\nReservation successfully made for {booking_data['classroom']} at {booking_data['building']}\n") elif "Sovrapposizione in data" in r.text: print(f"\nReservation already (or partially) present for {booking_data['classroom']} at {booking_data['building']}\n") else: print(f"\nError in reservation for {booking_data['classroom']} at {booking_data['building']}\n") sleep(SLEEP_TIME) except requests.exceptions.RequestException as e: print(f"\nError in reservation\n") print(e) input("\n\nPress Enter to exit\n\n") close() ```
{ "source": "0xbadc0ffe/BruteAttitude", "score": 3 }
#### File: 0xbadc0ffe/BruteAttitude/testscript.py ```python import BruteAttitude as BF import crypt import hashlib # Test function, verify if crypt.cyrpt(guess) == hash, where crypt is the DES based hashing function def test(guess, hash): salt = hash[:2] hguess = crypt.crypt(guess, salt) print(f"[ {hash} ] {guess} ", end="\r") if hguess == hash: print(f"[ {hash} ] {guess} ") return True return False # Test if the md5 of guess+salt is equal to the hash def test2(guess, salt, hash): print(f"[ {hash} ] {guess} ", end="\r") if hashlib.md5((guess + salt).encode()).hexdigest() == hash: print(f"[ {hash} ] {guess} ") return True return False # Random test function def test3(guess): if len(guess) == 5: if guess.startswith("lol"): if guess.endswith("8"): print(guess) return True return False dictname = "10-million-password-list-top-1000000.txt" #charlist = "abcdefghijklmnopqrstuvwxyz0123456789@_-#" charlist = "abcdefghijklmnopqrstuvwxyz0123456789" #charlist = "abcdefghijklmnopqrstuvwxyz" iterset = [] with open(dictname) as file: iterset = [guess.strip() for guess in file.readlines()] psw = "lol" #hash = "7azfT5tIdyh0I" hash = crypt.crypt(psw,"ea") lam = lambda x: test(x,hash) print(f"### Dictionary attack on {hash}") input() if BF.brute_attitude(lam,mode="dictionary", filename=dictname, iterset=iterset): pass else: print(f"Failed to recover password from [ {hash} ]") print(f"\n### Brainless Bruteforce on {hash}") input() if BF.brute_attitude(lam, mode="brainless", dim=[1,4], charlist=charlist): pass else: print(f"Failed to recover password from [ {hash} ]") print(f"\n### Masked Dictionary attack on {hash} with default mask") input() if BF.brute_attitude(lam,mode="masked-dictionary", filename=dictname, iterset=iterset): #mask="$iter$iter####") pass else: print(f"Failed to recover password from [ {hash} ]") psw = "lollol" hash = crypt.crypt(psw,"ea") mask = "$iter1$iter2" print(f"\n### Multi Dictionary Mask attack {hash} with mask {mask}") input() iterset_dict = { "$iter1": iterset[:100000], # Only the first 100000 entries of the dictionary "$iter2": iterset[:20]} # Only the first 20 entries of the dictionary filename_dict = { "$iter1": dictname, "$iter2": dictname} #if BF.brute_attitude(lam,mode="MDM", filename_dict=filename_dict, mask=mask): if BF.brute_attitude(lam,mode="MDM", iterset_dict=iterset_dict, mask=mask): pass else: print(f"Failed to recover password from [ {hash} ]") ####### hash = 'f2b31b3a7a7c41093321d0c98c37f5ad' salt = '<PASSWORD>' lam2 = lambda x : test2(x,salt,hash) print(f"\n### Dictionary attack on {hash}") input() if BF.brute_attitude(lam2,mode="dictionary", filename=dictname, iterset=iterset): pass else: print(f"Failed to recover password from [ {hash} ]") print(f"\n### Brainless Bruteforce on {hash}") input() if BF.brute_attitude(lam2, mode="brainless", dim=[1,6], charlist="pabcdefghijklmnoqrstuvwxyz"): pass else: print(f"Failed to recover password from [ {hash} ]") ########## print(f"\n### Printing all 4 digits numbers") input() BF.brute_attitude(print, mode="brainless", dim=[1,5], charlist="0123456789") print(f"\n### Test3") input() BF.brute_attitude(test3, mode="D", filename=dictname) print() ```
{ "source": "0xbadc0ffe/DeepChaos", "score": 2 }
#### File: DeepChaos/models/NNmodel.py ```python import torch from torch.functional import Tensor import torch.nn as nn import torch.nn.functional as F from utils.algorithms import SLE, FLE import numpy as np def build_ESN(H=200, d=3, lambda_coeff=0.4, dym_sys=3, sigma_in=0.15, activation="Tanh", output_size="3", device="cpu"): model = ESN() model.build_model(H, d, lambda_coeff, dym_sys, sigma_in, activation, output_size=output_size, device=device) return model class ESN(nn.Module): def __init__( self, Win:torch.Tensor=None, W:torch.Tensor=None, activation:str = "Tanh", output_size: int = 3, h_0:torch.Tensor=None ) -> None: super().__init__() self.Win = Win self.W = W self.activation = activation self.h = h_0 if W is None: return if activation == "LeakyReLU": self.act = torch.nn.LeakyReLU() elif activation == "Tanh": self.act = torch.nn.Tanh() elif activation == "ELU": self.act = torch.nn.ELU(alpha=1.0, inplace=False) elif activation == "ModTanh": self.mod = nn.Linear(W.shape[0], 1) self.act = torch.nn.Tanh() elif activation == "PrModTanh": self.mod = nn.Linear(W.shape[0], 1) self.act = torch.nn.Tanh() elif activation == "ConvModTanh1" or activation == "ConvModTanh2": self.mod2 = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=(W.shape[0],1)) self.mod3 = nn.Linear(W.shape[0],1) self.act = torch.nn.Tanh() else: # Default activation function self.act = torch.nn.Tanh() self.fco = nn.Linear(W.shape[0], output_size) def forward(self, u: torch.Tensor, h_i: torch.Tensor ) -> torch.Tensor: #x = self.fci(u) xu = torch.einsum("ik, k -> i", self.Win, u) #x, h_o = self.resvoir(x, h_i) x = xu + torch.einsum("ij, j -> i", self.W, h_i) #h_o = torch.tanh(x) # x(n+1) = tanh(Win*u(n) + W*x(n)) h_o = self.act(x) if self.activation == "ModTanh": h_o = (torch.tanh(self.mod(h_o))+1)*h_o elif self.activation == "PrModTanh": pr = torch.tanh(torch.einsum("ij, j -> i", self.W, h_o) + torch.einsum("ik, k -> i", self.Win, self.fco(h_o))) h_o = (torch.tanh(self.mod(pr))+1)*h_o elif self.activation == "ConvModTanh1": h_conv = torch.einsum("ij, j -> i", self.W, h_o) h_conv = torch.einsum("i, ij, k-> jk", h_o, self.W, h_conv) h_conv = self.mod2(torch.reshape(h_conv,(1,1,self.W.shape[0],self.W.shape[0])))[0,0,0,...] h_o = (torch.tanh(self.mod3(h_conv))+1)*h_o elif self.activation == "ConvModTanh2": h_conv= self.W*h_o h_conv = self.mod2(torch.reshape(h_conv,(1,1,self.W.shape[0],self.W.shape[0])))[0,0,0,...] h_o = (torch.tanh(self.mod3(h_conv))+1)*h_o x = self.fco(h_o) return x, h_o # like forward but handle the hidden state internnally def step(self, u: torch.Tensor )-> torch.Tensor: x, h_i = self.forward(u, self.h) self.h = h_i return x, h_i def init_reservoir(self, H=200, d=3, lambda_coeff=0.4, device="cpu"): self.H = H W = torch.rand([H,H])*2 - 1 ind = np.diag_indices(W.shape[0]) W[ind[0],ind[1]] = 0 for i in range(W.shape[0]): for j in range(W.shape[1]): if torch.rand(1) > d/(H-1): W[i,j] = 0 cnt = 0 for i in range(W.shape[0]): for j in range(W.shape[1]): if abs(W[i,j]) > 0: cnt += 1 self.connectivity = cnt/H # Forcing largest eigenvalue norm to lambda to ensure ESP eig = SLE(W) #FLE(W) W = W*lambda_coeff/eig W.to(device) self.W = W # TODO: introduce probability for the i-th row of Win to depend on 1 output. def init_Win(self, H=None, dym_sys=3, sigma_in=0.15, device="cpu"): if H is None: H = self.H In_acc = (torch.rand([H], device=device)*2 - 1)*sigma_in if dym_sys == 1: Win = torch.ones([H,dym_sys], device=device) else: Win = torch.zeros([H,dym_sys], device=device) for i in range(In_acc.shape[0]): Win[i,torch.randint(high=dym_sys-1, size=[1], device=device)] = In_acc[i] self.Win = Win def set_activation(self, activation, H=None, device="cpu"): if H is None: H = self.H self.activation = activation if activation == "LeakyReLU": self.act = torch.nn.LeakyReLU() elif activation == "Tanh": self.act = torch.nn.Tanh() elif activation == "ELU": self.act = torch.nn.ELU(alpha=1.0, inplace=False) elif activation == "ModTanh": self.mod = nn.Linear(H, 1, device=device) self.act = torch.nn.Tanh() elif activation == "PrModTanh": self.mod = nn.Linear(H, 1, device=device) self.act = torch.nn.Tanh() elif activation == "ConvModTanh1" or activation == "ConvModTanh2": self.mod2 = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=(H,1), device=device) self.mod3 = nn.Linear(H,1, device=device) self.act = torch.nn.Tanh() else: # Default activation function self.act = torch.nn.Tanh() def set_fc_output(self, output_size=3, H=None, device="cpu"): if H is None: H = self.H self.fco = nn.Linear(H, output_size, device=device) def build_model(self, H=200, d=3, lambda_coeff=0.4, dym_sys=3, sigma_in=0.15, activation="Tanh", output_size="3", device="cpu"): self.init_reservoir(H=H, d=d, lambda_coeff=lambda_coeff, device=device) self.init_Win(dym_sys=dym_sys, sigma_in=sigma_in, device=device) self.set_activation(activation, device=device) self.set_fc_output(output_size, device=device) def rand_h0(self, range=1,H=None, device="cpu"): if H is None: H = self.H self.h_0 = (torch.rand([H], device=device)*2-1)*range return self.h_0 ``` #### File: DeepChaos/utils/nn_utils.py ```python import torch import torch.optim as optim import random import numpy as np def set_reproducibility(seed=42): # reproducibility stuff torch.manual_seed(seed) np.random.seed(seed) random.seed(0) torch.cuda.manual_seed(0) torch.backends.cudnn.deterministic = True # Note that this Deterministic mode can have a performance impact torch.backends.cudnn.benchmark = False def count_parameters(model: torch.nn.Module) -> int: """ Counts the number of trainable parameters of a module :param model: model that contains the parameters to count :returns: the number of parameters in the model """ return sum(p.numel() for p in model.parameters() if p.requires_grad) def get_model_optimizer(model: torch.nn.Module, opt_type:str) -> torch.optim.Optimizer: """ Encapsulate the creation of the model's optimizer, to ensure that we use the same optimizer everywhere :param model: the model that contains the parameter to optimize :returns: the model's optimizer """ if opt_type == "Adam": return optim.Adam(model.parameters(), lr=0.001, weight_decay=1e-5) elif opt_type == "SGD": return optim.SGD(model.parameters(), lr=0.01, momentum=0.1, weight_decay=1e-5) else: # default return optim.Adam(model.parameters(), lr=0.001, weight_decay=1e-5) ```
{ "source": "0xbadc0ffe/Denavit-Hartenberg", "score": 3 }
#### File: 0xbadc0ffe/Denavit-Hartenberg/dhtable.py ```python import numpy as np import os import time import platform if platform.system() == 'Windows': CLEAR_STR = "cls" else: CLEAR_STR = "clear" print(platform.system()) def mat_str(mat, numspace=4 ,trunc=False, large=False): # convert matrix in a string, eventually truncating its values (or making them int with # trunc = int or trunc = "int". The option large is made to fit any value length ... but it's large ... # numspace parameter specify the number of space IN which print the value, or another spacing in large mode res = "" if isinstance(trunc, int): if numspace < trunc + 3: numspace = trunc + 4 if large: distances=[] for row in mat: for i in range(len(row)): le = len(str(row[i])) if len(distances) < len(row): distances.append(le) else: if le > distances[i]: distances[i] = le for row in mat: res += "[" for i in range(len(row)): e = row[i] if trunc == "int" or trunc == int: e = int(e) elif isinstance(trunc, int) and trunc > 0: if e - int(e) >= float('0.'+'9'*trunc): # small correction for x.9999... elements, eg e=x.999314 trunc=3 => e= x+1 e = int(e) + 1 e = truncate(e, trunc) # removing the minus if e contains only zeros if e.replace('0','') == '-.': e = e[1:] # if x.000 => x if '.'+'0'*trunc in e: e = e[: -trunc - 1] if large: res += " "*(distances[i]+ numspace - len(str(e))) + f"{e}" else: res += " "*(numspace - len(str(e))) + f"{e}" res += " "*(2) + "]\n" return res def print_mat(mat, numspace=4 ,trunc=False, large=False): # stringify the matrix and print it print(mat_str(mat, numspace=numspace, trunc=trunc, large=large)) def truncate(f, n): #Truncates/pads a float f to n decimal places without rounding s = '{}'.format(f) if 'e' in s or 'E' in s: return '{0:.{1}f}'.format(f, n) i, p, d = s.partition('.') return '.'.join([i, (d+'0'*n)[:n]]) def hom_transf_matrix( joint1, joint2 ): # compute homogeneous transformation matrix between consecutive joints frame vectors # TODO, possible only if implementing joint reference parameters (from which to derive dh parameters) return def gen_dh_tabel(joint_list): # generates the Denavit–Hartenberg parameters table from the joint obj list # TABLE ENTRIES: theta, alpha, a, d dh_table = [] for joint in joint_list: dh_line = [joint.theta, joint.alpha, joint.a, joint.d] dh_table.append(dh_line) dh_table = np.array(dh_table) return dh_table def gen_hom_matrix_from_table(index, dh_table): # generate homogeneous transformation matrix from dh_table and joint index i = index hom_mat = np.array([[np.cos(dh_table[i,0]), -np.sin(dh_table[i,0]) * np.cos(dh_table[i,1]), np.sin(dh_table[i,0]) * np.sin(dh_table[i,1]), dh_table[i,2] * np.cos(dh_table[i,0])], [np.sin(dh_table[i,0]), np.cos(dh_table[i,0]) * np.cos(dh_table[i,1]), -np.cos(dh_table[i,0]) * np.sin(dh_table[i,1]), dh_table[i,2] * np.sin(dh_table[i,0])], [0, np.sin(dh_table[i,1]), np.cos(dh_table[i,1]), dh_table[i,3]], [0, 0, 0, 1]]) return hom_mat def input_joint_list(joint_list=[]): # a simple bash interface used to take the joints parameters in input global baseframe, effectorframe, b_e_changed print("\nHi, press Enter to start ...\n") input() os.system(CLEAR_STR) #joint_list = [] while(True): ans = { "1": True, "Y": True, "y": True, "yes": True, "0": False, "N": False, "n": False, "no": False, "2": "status", "S": "status", "s": "status", "status": "status", "3": "remove", "R": "remove", "r": "remove", "remove": "remove", "4": "baseframe", "B": "baseframe", "b": "baseframe", "baseframe": "baseframe", "5": "effectorframe", "E": "effectorframe", "e": "effectorframe", "effectorframe": "effectorframe" } print(f"\nCurrent number of joints: {len(joint_list)}") print("\nWould you like to add a new joint? \n\n") print("1/Y/yes: yes 0/N/no: no, compute \n\n") print("2/S/status: show status 3/R/remove: remove joint\n\n") print("4/B/baseframe: change base frame 5/E/effector: change effector frame\n\n\n") inp = input() try: sw = ans[inp] if isinstance(sw, str): if sw == "status": os.system(CLEAR_STR) if len(joint_list) > 0: print_joint_list(joint_list) else: print("\nJoint list is empty ...") print("\n\n[Base Frame transformation]:") print_mat(baseframe, trunc=3) print("\n\n[Effector Frame transformation]:") print_mat(effectorframe, trunc=3) input("\n\nPress Enter to return\n\n") os.system(CLEAR_STR) continue if sw == "remove": os.system(CLEAR_STR) if len(joint_list) > 0: print_joint_list(joint_list) while True: n = input("\n\nJoint to remove: ") try: n = int(n) if n-1 < len(joint_list): break else: print("Joint not in list") except ValueError: print("Wrong Fromat") joint_list.pop(n-1) # renaming joints for i in range(len(joint_list)): joint_list[i].num = i + 1 os.system(CLEAR_STR) print("\nJoint successfully removed!") input("\n\nPress Enter to return\n\n") os.system(CLEAR_STR) continue else: print("\nJoint list is empty ...") input("\n\nPress Enter to return\n\n") os.system(CLEAR_STR) continue if sw == "baseframe": # Takes hom. transformation matrix from baseframe to starting joint in input os.system(CLEAR_STR) print("\nDescribe the homogeneous transformation matrix from base frame to the starting joint\n\n") frame_mat = [] for i in range(4): while(True): r_i = input(f"[Row {i+1}] | ") ans = input("Enter to confirm, \"r\" to repeat\n") if ans != "r": try: r_i = r_i.split() if len(r_i) != 4 : print("Enter 4 numbers e.g 1 2.4 0 0.93\n") continue frame_mat_line = [] for number in r_i: frame_mat_line.append(float(number)) frame_mat.append(frame_mat_line) break except ValueError: print("Wrong number format\n") continue baseframe = np.array(frame_mat) b_e_changed = True os.system(CLEAR_STR) print() print_mat(baseframe, trunc=3) input("\nPress Enter to return ...") os.system(CLEAR_STR) continue if sw == "effectorframe": # Takes hom. transformation matrix from ending joint to effector frame in input os.system(CLEAR_STR) print("\nDescribe the homogeneous transformation matrix from the ending joint to the effector frame\n\n") frame_mat = [] for i in range(4): while(True): r_i = input(f"[Row {i+1}] | ") ans = input("Enter to confirm, \"r\" to repeat\n") if ans != "r": try: r_i = r_i.split() if len(r_i) != 4 : print("Enter 4 numbers e.g 1 2.4 0 0.93\n") continue frame_mat_line = [] for number in r_i: frame_mat_line.append(float(number)) frame_mat.append(frame_mat_line) break except ValueError: print("Wrong number format\n") continue effectorframe = np.array(frame_mat) b_e_changed = True os.system(CLEAR_STR) print() print_mat(effectorframe, trunc=3) input("\nPress Enter to return ...") os.system(CLEAR_STR) continue if sw: # Addin a joint os.system(CLEAR_STR) index = len(joint_list) + 1 print(f"\nJoint n° {index}:\n") while True: try: theta = input(f"\nTheta{index} (degrees): ") theta = float(theta) break except ValueError: print("\nWrong format\n") while True: try: alpha = input(f"\nAlpha{index} (degrees): ") alpha = float(alpha) break except ValueError: print("\nWrong format\n") while True: try: a = input(f"\nA{index} (cm): ") a = float(a) break except ValueError: print("\nWrong format") while True: try: d = input(f"\nD{index} (cm): ") d = float(d) break except ValueError: print("\nWrong format\n") while True: inp = input("\n\nConfirm? 1/Y/yes: yes 0/N/no: no\n\n") try: sw = ans[inp] break except KeyError: print("\n\nPlease use only the given possible answers") continue if sw: joint = Joint(index, theta, alpha, a, d) joint_list.append(joint) os.system(CLEAR_STR) continue else: if not len(joint_list): close() else: os.system(CLEAR_STR) return joint_list except KeyError: os.system(CLEAR_STR) print("\nPlease use only the given possible answers\n") input("\n\nPress Enter to return\n\n") os.system(CLEAR_STR) continue def close(timesl=1): # close the program os.system(CLEAR_STR) print("\n\n\n Bye ,(è >è)/\n\n\n") time.sleep(timesl) os.system(CLEAR_STR) exit() def print_joint_list(joint_list): # print the joint list print("\nJoint List:\n") for j in joint_list: print() print(j) print() def compute_all(joint_list, trunc=3, large=False): # Firstly it computes all the relative frames hom. transformation [Ai-1->i] # Then it generate the final transformation from the starting joint frame to the last joint frame # IF a baseframe or an effectorframe are given this function also computes the Hom transformation between # base and effector frames print_joint_list(joint_list) input("\n\nPress Enter to compute all homogeneous transformation matrices\n\n") os.system(CLEAR_STR) hom_list = [] for j in joint_list: print(f"\nMatrix A{j.num - 1}->{j.num}(q{j.num})\n") print_mat(j.hom_mat, trunc=trunc, large=large) hom_list.append(j.hom_mat) input("\nPress Enter to show next ...\n") os.system(CLEAR_STR) print(f"\nTransformation Frame {0} -> Frame {len(joint_list)}:\n") hom_0_n = hom_list[0] for h in hom_list[1:]: hom_0_n = hom_0_n @ h print_mat(hom_0_n, trunc=trunc, large=large) global baseframe, effectorframe, b_e_changed if b_e_changed: input("\nPress Enter to show next ...\n") os.system(CLEAR_STR) print(f"\nTransformation base frame -> effector frame:\n") hom_b_e = baseframe @ hom_0_n @ effectorframe print_mat(hom_b_e, trunc=trunc, large=large) return hom_b_e return hom_0_n class Joint(): # This class define the joint characteristics given by the D-H parameters def __init__(self, num, theta, alpha, a, d, give_deg2rad=True): self.num = num # joint number if give_deg2rad: self.theta = np.deg2rad(theta) # angle from xi-1 and xi around zi, from degrees self.alpha = np.deg2rad(alpha) # angle from zi-1 and zi around xi, from degrees else: self.theta = theta # angle from zi-1 and zi around xi self.alpha = alpha # angle from zi-1 and zi around xi self.a = a # distance of origin of frame i-1 to origin of frame i along xi-1 self.d = d # distance of origin of frame i-1 to origin of frame i along zi-1 self.hom_mat = None # homogeneous transformation matrix from frame i-1 to frame i self.gen_hom_matrix() def gen_hom_matrix(self): # generate the homogeneous transformation matrix hom_mat = np.array([[np.cos(self.theta), -np.sin(self.theta) * np.cos(self.alpha), np.sin(self.theta) * np.sin(self.alpha), self.a * np.cos(self.theta) ], [np.sin(self.theta), np.cos(self.theta) * np.cos(self.alpha), -np.cos(self.theta) * np.sin(self.alpha), self.a * np.sin(self.theta) ], [0, np.sin(self.alpha), np.cos(self.alpha), self.d ], [0, 0, 0, 1]]) self.hom_mat = hom_mat return hom_mat def __str__(self): # string representation th = truncate(self.theta, 4) al = truncate(self.alpha, 4) a = truncate(self.a, 1) d = truncate(self.d, 1) res = f"[Joint {self.num}] Theta: {th} [rad] Alpha: {al} [rad] A: {a} [cm] D: {d} [cm]" return res ''' Another way to compute stuff # create a Joint obj joint1 = Joint(1, 90, 90, 0, 2) # print a matrix truncating at 3rd decimal print_mat(joint1.hom_mat, trunc=3) # adding joint to joint list joint_list = [joint1] # generate dh table dh_table = gen_dh_tabel(joint_list) print(dh_table) # generate and print the homogeneous transf. matrix from the dh table and the hom_mat field of the Joint objects print_mat(gen_hom_matrix_from_table(0, dh_table), trunc=3) ''' ###### MAIN if __name__ == "__main__": # Initializing Base and Effector frames baseframe = np.identity(4) effectorframe = np.identity(4) # b_e_changed keeps track of baseframe or effectorframe changes b_e_changed = False # generate joint list and data joint_list = input_joint_list() # compute all transformations hom_0_n = compute_all(joint_list, trunc=3) input("\nPress Enter to exit\n\n") close() ``` #### File: 0xbadc0ffe/Denavit-Hartenberg/euler_angles.py ```python from eulerangles import euler2matrix, EulerAngleConvention, matrix2euler import eulerangles import numpy as np import os import sys import platform if platform.system() == 'Windows': CLEAR_STR = "cls" else: CLEAR_STR = "clear" def cos(x, deg=True): if deg: x = np.deg2rad(x) return np.cos(x) def sin(x, deg=True): if deg: x = np.deg2rad(x) return np.sin(x) def rot(i, ang): if i =="z" or i == "Z": return np.matrix([[cos(ang),-sin(ang),0],[sin(ang),cos(ang),0],[0,0,1]]) if i =="y" or i == "Y": return np.matrix([[cos(ang),0,sin(ang)],[0,1,0],[-sin(ang),0,cos(ang)]]) if i =="x" or i == "X": return np.matrix([[1,0,0],[0,cos(ang),-sin(ang)],[0,sin(ang),cos(ang)]]) else: return np.matrix([[1,0,0],[0,1,0],[0,0,1]]) os.system(CLEAR_STR) if len(sys.argv) > 1: if sys.argv[1] == "-i": matrix = [] np.set_printoptions(precision=4, suppress=True) conv = input("\nchoose convetion [eg ZYX]: ") try: inp = input("\nrow 1 : ").split() row = [] for i in inp: row.append(float(i)) matrix.append(row) inp = input("\nrow 2 : ").split() row = [] for i in inp: row.append(float(i)) matrix.append(row) inp = input("\nrow 3 : ").split() row = [] for i in inp: row.append(float(i)) matrix.append(row) except: print("\nFailure in matrix submission") exit() #matrix = np.matrix(matrix) #print(matrix) #eulers = matrix2euler([matrix], target_axes=conv, target_intrinsic=True, target_positive_ccw=True) try: eulers = matrix2euler(matrix, target_axes=conv, target_intrinsic=True, target_positive_ccw=True) except: print("failure") print(eulers) exit() if sys.argv[1] == "-aa": r = [] try: r.append(float(input("\nr1 : "))) r.append(float(input("\nr2 : "))) r.append(float(input("\nr3 : "))) ang = float(input("\nangle (degrees) : ")) R = [ [r[0]**2*(1-cos(ang))+cos(ang), r[0]*r[1]*(1-cos(ang))-r[2]*sin(ang), r[0]*r[2]*(1-cos(ang))+r[1]*sin(ang)], [ r[0]*r[1]*(1-cos(ang))+r[2]*sin(ang),r[1]**2*(1-cos(ang))+cos(ang), r[1]*r[2]*(1-cos(ang))-r[0]*sin(ang)],[r[0]*r[2]*(1-cos(ang))-r[1]*sin(ang), r[1]*r[2]*(1-cos(ang))+r[0]*sin(ang), r[2]**2*(1-cos(ang))+cos(ang)]] print() print(np.matrix(R)) print() except: print("\nFailure in data submission") exit() np.set_printoptions(precision=4, suppress=True) conv = input("\nchoose convetion [eg ZYX]: ") eulers=[] eulers.append(float(input("\nangle 1 (degrees): "))) eulers.append(float(input("\nangle 2 (degrees): "))) eulers.append(float(input("\nangle 3 (degrees): "))) if len(sys.argv) > 1: if sys.argv[1] == "-ext": try: rotation_matrix = euler2matrix(eulers, axes=conv, extrinsic=True, positive_ccw=True) except ValueError: print("\nWrong Euler angles convetion "+conv) exit() else: R = np.matrix([[1,0,0],[0,1,0],[0,0,1]]) for i in range(3): R = R @ rot(conv[i],eulers[i]) #print(rot(conv[i],eulers[i])) rotation_matrix = R base_matrix= np.matrix([[1,0,0],[0,1,0],[0,0,1]]) print() print(base_matrix @ rotation_matrix) print() ```
{ "source": "0xbadc0ffe/FedSimulate", "score": 2 }
#### File: 0xbadc0ffe/FedSimulate/FedSimulate.py ```python from __future__ import print_function, division from cProfile import label from logging import raiseExceptions from typing import Mapping, Union, Optional, Callable, Dict import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim import os from tqdm import tqdm, trange from torchsummary import summary import utils_alg import ssl ssl._create_default_https_context = ssl._create_unverified_context import platform import models import nn_utils import copy from timeit import default_timer as timer from datetime import timedelta import matplotlib.pyplot as plt def clean(): plat = platform.system() if plat == "Windows": os.system("cls") else: os.system("clear") class FedDevice(): def __init__(self, trainer:Union[nn_utils.Trainer, nn_utils.MFTrainer], state_dict, tag:str, pk:float, mask_weights:torch.tensor=None, nk=1): self.trainer = trainer self.state_dict=copy.deepcopy(state_dict) self.tag = tag # Device label self.pk = pk # probability to be picked for a training round self.nk = nk # we can assign here the Device weight (e.g number of examples/tot or some other weighing logic) self.mask_weights = mask_weights self.major_class = self.eval_major_class() def __str__(self): return f"Device {self.tag} | Rounds Completed: {self.trainer.rounds_completed}" def round_fit(self, model): acc, loss = self.trainer.round_fit_from_checkpoint(model, checkpoint=self.state_dict) self.state_dict = copy.deepcopy(model.state_dict()) return acc, loss def load_state_dict(self, state_dict): self.state_dict = copy.deepcopy(state_dict) def set_mu(self, mu): self.trainer.mu=mu def free(self): self.state_dict=None def eval_major_class(self): if self.mask_weights is None or not (self.mask_weights-1).any(): return None # all weights are 1 else: return torch.argmax(self.mask_weights) class FedServer(): def __init__(self, model, trainer, tag:str="server", weights_generator:Union[Callable,str]=None): self.model = model self.state_dict=model.state_dict() self.tag = tag self.trainer = trainer self.updates_cnt = 0 if weights_generator is None or weights_generator == "average": self.gen_method = self.dicts_avg self.weights_generator = "average" elif weights_generator == "first": self.gen_method = self.dicts_first self.weights_generator = "first" elif weights_generator == "top-k_avg": self.gen_method = self.dicts_top_k_avg self.weights_generator = "top-k_avg" else: self.weights_generator = "custom" self.gen_method = weights_generator def __str__(self): return f"Device {self.tag} | Rounds Completed: {self.updates}" def round_fit(self, model): acc, loss = self.trainer.round_fit_from_checkpoint(model, checkpoint=self.state_dict) self.state_dict = copy.deepcopy(model.state_dict()) return acc, loss def update(self, *args): result = self.gen_method(*args) self.updates_cnt += 1 if self.weights_generator == "custom": self.load_state_dict(result) # Takes the average of the dicts as the new server state dict. def dicts_avg(self, wk_list): if wk_list is None or len(wk_list) == 0: self.model.load_state_dict(self.state_dict) return None if len(wk_list) == 1: self.state_dict = copy.deepcopy(wk_list[0]) self.model.load_state_dict(self.state_dict) return self.state_dict # cloning first element in state_dict self.state_dict = copy.deepcopy(wk_list[0]) for key in wk_list[0]: tot = wk_list[0][key] for client_wk in wk_list[1:]: tot = tot + client_wk[key] self.state_dict[key] = tot/len(wk_list) # cloning result in model_dict self.model.load_state_dict(self.state_dict) return self.state_dict # Pick the first of the list as the new server state dict. # If the list is given already ordered by accuracy/loss or wethever this # will be like picking the most fitting trained instance. # ALERT: is not advisible to use this when heterogenious clients data is # involved or when the single clients trainings differ. # dicts_avg can be also used in this way by giving a singleton list with # the maximal state_dict def dicts_first(self, wk_list): if wk_list is None or len(wk_list) == 0: self.model.load_state_dict(self.state_dict) return None else: self.state_dict = copy.deepcopy(wk_list[0]) self.model.load_state_dict(self.state_dict) return self.state_dict def dicts_top_k_avg(self, wk_dict, perform, K): if wk_dict is None or len(wk_dict) == 0: self.model.load_state_dict(self.state_dict) return None elif len(wk_dict) == 1: self.state_dict = copy.deepcopy(list(wk_dict.values())[0]) self.model.load_state_dict(self.state_dict) return self.state_dict else: K = max(1,K) top_devs = {k: v for k, v in sorted(perform.items(), key=lambda item: item[1])[::-1]} top_devs = list(top_devs.keys())[:K] top_k_weights = [ wk_dict[tag] for tag in top_devs] self.state_dict = copy.deepcopy(top_k_weights[0]) for key in top_k_weights[0]: tot = top_k_weights[0][key] for client_wk in top_k_weights[1:]: tot = tot + client_wk[key] self.state_dict[key] = tot/len(top_k_weights) # len(..) can differ by K if top_devs is shorter self.model.load_state_dict(self.state_dict) return self.state_dict def load_state_dict(self, state_dict): self.model.load_state_dict(state_dict) self.state_dict = copy.deepcopy(state_dict) def test(self): return self.trainer.test(self.model) def set_mu(self, mu): self.trainer.mu=mu def update_weights(devices_list, server_weights): for dev in devices_list: dev.load_state_dict(server_weights) def update_mu(devices_list, mu): for dev in devices_list: dev.set_mu(mu) def free_all(devices_list): for dev in devices_list: dev.free() nn_utils.set_reproducibility() clean() ##### Dataset n_channels = 3 input_size_w = 32 input_size_h = 32 input_size = input_size_w*input_size_h ##### Model Hyper params # Multi Layer Perceptron # n_hidden = 9 #model = models.MLP(input_size, n_channels, n_hidden, models.CIFAR10_output_size) # Convolutional Nerual Network n_features = 12 model = models.CNN(input_size, n_channels, n_features, models.CIFAR10_output_size) ##### Training Hyper params device = torch.device("cpu") #torch.device("cuda:0" if torch.cuda.is_available() else "cpu") train_dict = { "device": device, "output_dim": models.CIFAR10_output_size, # 10 "epochs": 1, "batch_size" : 128, "batch_size_val" : 1000, "data_transform" : "RGB", "opt_name" : "Adam", "lr": 0.003, "momentum": 0.1, "scheduler_bool": True, "gamma": 0.9, #"perm": nn_utils.permute_pixels, "mu": 0.005 # if mu=0 => FedAvg } model.to(device) test_trainer = False if test_trainer: #trainer = nn_utils.Trainer(model=model, train_dict=train_dict) # Model-based trainer trainer = nn_utils.MFTrainer(train_dict=train_dict) # Model-free trainer print(trainer.fit(model)) #### FedAVG/Prox Hyper params emulated_devices = 200 rounds = 15 train_loner = True pool = 20 # pool = emulated_devices => FedAvg p_uniform = pool/emulated_devices # uniform probability to be choosed adaptive_mu = False adaptive_phase = 5 mu_inc = 0.1 # Synthetic Data Heterogeneity (alpha = beta = 0 homogeneous case) # Imbalance follows this power law : clip(exp(vals*-alpha*numb_of_classes)+beta, min=0) alpha = 0.09 # power factor beta = 0 #0.2 # constant factor devices_list = [] w_generators = ["average", "first", "top-k_avg"] weights_generator = w_generators[0] pick_top_k = pool # for top-k_avg fn_list = ["uniform", "normal"] sample_prob_fn = fn_list[0] if sample_prob_fn == "uniform": sample_prob = lambda : np.random.uniform(0,1) # Note: this is not so usefull in an unifrom device probability scenario elif sample_prob_fn == "normal": from scipy.stats import norm norm_mean = 0.5 sigma = 0.3/emulated_devices sample_prob = lambda : norm.cdf(np.random.uniform(-4,4)) #np.random.normal(norm_mean, sigma) # Test Sampling #utils_alg.test_sampling(pool, emulated_devices, sample_prob) # Using a single data loaders pair for the homegenous case may improve the # performances but it could also not be ideal for some specfic models: # https://stackoverflow.com/questions/60311307/how-does-one-reset-the-dataloader-in-pytorch train_loader, test_loader, train_dataset, test_dataset = models.get_CIFARloaders(train_dict["batch_size"],train_dict["batch_size_val"],train_dict["data_transform"], ret_datasets=True) data_loaders = (train_loader, test_loader) # Server Device Initialization trainer = nn_utils.MFTrainer(data_loaders=data_loaders, train_dict=train_dict) # this is needed only for the testing phase server = FedServer(model, trainer, tag="server", weights_generator=weights_generator) if train_loner: train_dict_loner = copy.deepcopy(train_dict) train_dict_loner["mu"] = 0 # Loner Device used for comparison (weights do not update with server) trainer = nn_utils.MFTrainer(data_loaders=data_loaders, train_dict=train_dict_loner) loner = FedDevice(trainer=trainer, state_dict=server.state_dict, tag="loner", pk=1) # Initializating devices to emulate for i in range(emulated_devices): # resetting state_dict is not necessary since they are gonna train after a global model update by the server # nn_utils.reset_model_params(model) # initial_state_dict = model.state_dict() # Note: hard_mask=True slows the Decives Initialization but is more realistic (expecially when simulating few devices) train_loader, mask_weights = utils_alg.SIP(train_dataset, torch.arange(models.CIFAR10_output_size), train_dict["batch_size"], alpha=alpha, beta=beta, hard_mask=True) data_loaders = (train_loader, data_loaders[1]) trainer = nn_utils.MFTrainer(data_loaders=data_loaders, train_dict=train_dict) dev = FedDevice(trainer=trainer, state_dict=None, tag=str(i), pk=p_uniform, mask_weights=mask_weights) devices_list.append(dev) print(f"Building Federation Clients (devices): {i}/{emulated_devices}", end="\r") # Test initial accuracy test_out, test_string = devices_list[0].trainer.test(model) init_loss = test_out["loss_averager"](None).detach().numpy() print("\n\n"+test_string) # Testing FedAvg seq_runs = 0 # counts the number of sequential model training (counting the loner device also) start_time = timer() # timer to get the total elapsed time sampled = [] # store at each round the number of sampled devices (mean should be the pool value) server_acc = [] # store at each round the server accuracy mean_client_acc = [] # store at each round the mean of clients' accuracy server_loss = [] # store at each round the server loss best_dev = [] # store at each round the client device with best accuracy tot_masks = torch.zeros(mask_weights.shape) # store the sum of the weights of the different masks # Initializing accuracy of the untrained model server_acc.append(test_out["accuracy"]) mean_client_acc.append(test_out["accuracy"]) if train_loner: loner_loss = [] # store at each round the loner loss loner_acc = [] # store at each round the loner device accuracy # Initializing accuracy of the untrained model loner_acc.append(test_out["accuracy"]) for round in range(1,rounds+1): round_weights = {} round_sampled_devices = [] # Sampling phase for dev in devices_list: if sample_prob() <= dev.pk: round_sampled_devices.append(dev) tot_masks += dev.mask_weights sampled_len = len(round_sampled_devices) sampled.append(sampled_len) update_weights(round_sampled_devices, server.state_dict) # more efficient, we update only this round working devices print("\n##########################################\n") sampled_len = len(round_sampled_devices) print(f"\n\n## Round {round}/{rounds} | Selected: {sampled_len}\n") # Training sum_acc = 0 max_acc = 0 bdev = None # best device tag client_perform = {} for i, dev in enumerate(round_sampled_devices): print(f"Training Client {i+1}/{sampled_len}:\n") acc, _ = dev.round_fit(server.model) client_perform[dev.tag] = acc if acc > max_acc: max_acc = acc bdev = int(dev.tag) sum_acc += acc print(str(dev) + f"/{round} | Accuracy: {acc} % | Major class: {dev.major_class} | Device hash: {nn_utils.state_hash(dev.state_dict)}\n") # print(f"\nDevice hash: {nn_utils.state_hash(dev.state_dict)}\n") print("-----------------------------\n") round_weights[dev.tag] = dev.state_dict seq_runs += 1 if sampled_len != 0: mean_acc = sum_acc/sampled_len best_dev.append(bdev) else: if len(mean_client_acc)!=0: mean_acc = mean_client_acc[-1] else: mean_acc = sum_acc mean_client_acc.append(mean_acc) if train_loner: # Training the loner print(f"Training Loner device:\n") acc, lon_loss = loner.round_fit(server.model) loner_acc.append(acc) loner_loss.append(lon_loss.numpy()) print(str(loner) + f"/{round} | Accuracy: {acc} % | Device hash: {nn_utils.state_hash(loner.state_dict)}\n") print("-----------------------------\n") seq_runs+=1 # Updating server weights if weights_generator == "average": server.update(list(round_weights.values())) elif weights_generator == "first": if bdev is not None: server.update([round_weights[str(bdev)]]) elif weights_generator == "top-k_avg": server.update(round_weights, client_perform, pick_top_k) else: raise Exception("Unknwown weights generation policy") # Testing server test_out, test_string = server.test() server_acc.append(test_out["accuracy"]) round_server_loss = test_out["loss_averager"](None).detach().numpy() server_loss.append(round_server_loss) print(f"\n\n** Round {round}/{rounds} completed **\n") print("Sever " + test_string+"\n") print(f"Server hash: {nn_utils.state_hash(server.state_dict)}") # must be equal print(f"Model hash: {nn_utils.state_hash(server.model.state_dict())}\n") # must be equal # Adaptive mu if adaptive_mu and round % adaptive_phase == 0: if init_loss - round_server_loss > 0: update_mu(devices_list, max(0,server.trainer.mu-mu_inc)) server.set_mu(max(0,server.trainer.mu-mu_inc)) else: update_mu(devices_list, max(0,server.trainer.mu+mu_inc)) server.set_mu(max(0,server.trainer.mu-mu_inc)) # Free (None overwrite) the selected devices state_dict to keep memory occupancy low # Since every device has its own copy of state_dict we would end with high memory allocated free_all(round_sampled_devices) end_time = timer() print(f"\n\n###########################################\n") print(f"\nDevices: {emulated_devices} [ Alpha: {alpha} | Beta: {beta} ]") print(f"\nAvg pool per round: {sum(sampled)/rounds} [sample prob fn: {sample_prob_fn} | Expected pool: {pool}]") print(f"\nFinal Mu: {server.trainer.mu} | Weights generator: {server.weights_generator}") if weights_generator == w_generators[2]: print(f"Avg of top {pick_top_k} clients") print(f"\nRunned trainings: {seq_runs} [Rounds: {rounds}]\n") print(f"Sever | Rounds completed: {rounds} | Accuracy: {test_out['accuracy']} % | Device hash: {nn_utils.state_hash(server.state_dict)}") print(f"Clients avg | Rounds completed: {rounds} | Accuracy: {np.round(mean_client_acc[-1],2)} % | Device hash: --- ") if train_loner: print(str(loner) + f" | Accuracy: {acc} % | Device hash: {nn_utils.state_hash(loner.state_dict)}\n") print(f"Elapsed time: {timedelta(seconds=end_time-start_time)}") print(f"\n\nTraining Dictionary: {train_dict}") # Sampled devices per round plt.figure(1) plt.plot(range(1,rounds+1), sampled, label="sampled clients") plt.plot(range(1,rounds+1), [pool]*len(sampled), label="expected avg") plt.title(f"Sampled clients [tot: {np.sum(sampled)}]") plt.xlabel("round") plt.ylabel(f"#") plt.legend() # Accuracy plt.figure(2) plt.plot(server_acc, color="red", label="server") plt.plot(mean_client_acc, color="blue",linestyle="--", label="clients-avg") if train_loner: plt.plot(loner_acc, color="green", label="loner") plt.title(f"Accuracy") plt.xlabel("round") plt.ylabel(f"%") plt.legend() # Test Loss (Server vs loner, clients Loss is not comparable) plt.figure(3) plt.plot(server_loss, color="red", label="server") if train_loner: plt.plot(loner_loss, color="green", label="loner") plt.title(f"Test Loss") plt.xlabel("round") plt.ylabel(f"Loss") plt.legend() # Devices usage histogram plt.figure(4) # hist_data = [] # for dev in devices_list: # hist_data = hist_data + [int(dev.tag)]*dev.trainer.rounds_completed # plt.hist(hist_data, emulated_devices) hist_data = {} for dev in devices_list: hist_data[int(dev.tag)]=dev.trainer.rounds_completed plt.bar(hist_data.keys(), hist_data.values()) plt.title("Devices usage") plt.xlabel("Device Tag") plt.ylabel("Usage") # Best Devices plt.figure(5) plt.hist(best_dev, emulated_devices) plt.title(f"Best Devices per round") plt.xlabel("Device Tag") plt.ylabel(f"Round Winner counter") # Distribution of Data plt.figure(6) #plt.plot(tot_masks.numpy()/np.sum(sampled)) plt.bar(np.arange(len(tot_masks)), tot_masks.numpy()/np.sum(sampled)) plt.title(f"Average clients training data usage for each class") plt.xlabel("Class") plt.ylabel(f"%") major_classes = [] for dev in devices_list: if dev.major_class is not None: major_classes.append(dev.major_class) if len(major_classes)>1: plt.figure(7) plt.title("Major Classes Distribution") plt.bar(np.arange(len(major_classes)), major_classes) plt.ylabel("Class") plt.xlabel("Device Tag") plt.show() ``` #### File: 0xbadc0ffe/FedSimulate/models.py ```python from __future__ import print_function, division from typing import OrderedDict, Union, Optional, Callable, Dict, Tuple import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torchvision import datasets, transforms import ssl ssl._create_default_https_context = ssl._create_unverified_context def get_CIFARloaders(batch_size=128, batch_size_val=1000, data_transform: Union[str, bool]="RGB", ret_datasets=False) -> Tuple[torch.utils.data.DataLoader, torch.utils.data.DataLoader]: if data_transform: image_transforms_gray = transforms.Compose( [ transforms.Grayscale(num_output_channels=1), transforms.ToTensor(), transforms.Normalize(mean=(0.47,), std=(0.251,)), ] ) image_transforms_RGB = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize(mean=(0.47,), std=(0.251,)), ] ) if data_transform == "RGB": image_transforms = image_transforms_RGB elif data_transform == "GRAY": image_transforms = image_transforms_gray else: image_transforms = None train_dataset = datasets.CIFAR10( "data", train=True, download=True, transform=image_transforms ) train_loader = torch.utils.data.DataLoader( train_dataset, batch_size=batch_size, shuffle=True, ) test_dataset = datasets.CIFAR10( "data", train=False, transform=image_transforms ) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=batch_size_val, shuffle=True, ) if ret_datasets: return train_loader, test_loader, train_dataset, test_dataset else: return train_loader, test_loader # Specify the number of classes in CIFAR10 CIFAR10_output_size = 10 # there are 10 classes CIFAR10_output_classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') class MLP(nn.Module): def __init__( self, input_size: int, input_channels: int, n_hidden: int, output_size: int ) -> None: """ Simple MLP model :param input_size: number of pixels in the image :param input_channels: number of color channels in the image :param n_hidden: size of the hidden dimension to use :param output_size: expected size of the output """ super().__init__() self.name= "MLP" self.network = nn.Sequential( nn.Linear(input_size * input_channels, n_hidden), nn.ReLU(), nn.Linear(n_hidden, n_hidden), nn.ReLU(), nn.Linear(n_hidden, output_size), ) def forward(self, x: torch.Tensor) -> torch.Tensor: """ :param x: batch of images with size [batch, 1, w, h] :returns: predictions with size [batch, output_size] """ x = x.view(x.shape[0], -1) o = self.network(x) return o class CNN(nn.Module): def __init__( self, input_size: int, input_channels: int, n_feature: int, output_size: int ) -> None: """ Simple model that uses convolutions :param input_size: number of pixels in the image :param input_channels: number of color channels in the image :param n_feature: size of the hidden dimensions to use :param output_size: expected size of the output """ super().__init__() self.name="CNN" self.n_feature = n_feature self.conv1 = nn.Conv2d( in_channels=input_channels, out_channels=n_feature, kernel_size=3 ) self.conv2 = nn.Conv2d(n_feature, n_feature, kernel_size=3) self.conv3 = nn.Conv2d(n_feature, n_feature, kernel_size=3) self.conv4 = nn.Conv2d(n_feature, n_feature, kernel_size=2) self.fc1 = nn.Linear(n_feature * 5 * 5, 10) self.fc2 = nn.Linear(10, 10) def forward(self, x: torch.Tensor, return_conv1: bool = False, return_conv2: bool = False, return_conv3: bool = False, return_conv4: bool = False ) -> torch.Tensor: """ :param x: batch of images with size [batch, 1, w, h] :param return_conv1: if True return the feature maps of the first convolution :param return_conv2: if True return the feature maps of the second convolution :param return_conv3: if True return the feature maps of the third convolution :returns: predictions with size [batch, output_size] """ x = self.conv1(x) if return_conv1: return x x = F.relu(x) x = F.max_pool2d(x, kernel_size=2) x = self.conv2(x) if return_conv2: return x x = F.relu(x) # Not so easy to keep track of shapes... right? # An useful trick while debugging is to feed the model a fixed sample batch # and print the shape at each step, just to be sure that they match your expectations. # print(x.shape) x = self.conv3(x) if return_conv3: return x x = F.relu(x) #x = F.max_pool2d(x, kernel_size=2) # comment if add conv4 x = self.conv4(x) if return_conv4: return x x = F.relu(x) x = F.max_pool2d(x, kernel_size=2) x = x.view(x.shape[0], -1) x = self.fc1(x) x = F.relu(x) x = self.fc2(x) return x def permute_pixels(images: torch.Tensor, perm: Optional[torch.Tensor]) -> torch.Tensor: """ Permutes the pixel in each image in the batch :param images: a batch of images with shape [batch, channels, w, h] :param perm: a permutation with shape [w * h] :returns: the batch of images permuted according to perm """ if perm is None: return images batch_size = images.shape[0] n_channels = images.shape[1] w = images.shape[2] h = images.shape[3] images = images.view(batch_size, n_channels, -1) images = images[..., perm] images = images.view(batch_size, n_channels, w, h) return images ```
{ "source": "0xbadc0ffe/Genetic-Control", "score": 3 }
#### File: 0xbadc0ffe/Genetic-Control/my_gen_alg.py ```python import random import string class genes: def __init__(self, genes): self.genes = genes def __str__(self): return " ".join(str(gene) for gene in self.genes) class bleb: def __init__(self, genes, generation=None, fitness=None, name="generic bleb", data=None, str_type=1): self.genes = genes self.generation = generation self.fitness = fitness self.name = name self.data = data self.str_type = str_type def fit(self, fitfunc): self.fitness = fitfunc(self) # self.genes def mutate(self, mutation): new_genes = [] for gene in self.genes: gene = mutation(gene) new_genes.append(gene) self.genes = new_genes def mutate_cust(self, mutation): self.genes = mutation(self.genes) def __str__(self): if self.fitness is None: fitt = "None" # return self.name+"| Fitness:"+" "*(25-len(str(fitt)))+fitt+ "| " + " ".join(str(gene) for gene in self.genes) else: fitt = str(self.fitness) if self.str_type == 1: return self.name+"| Fitness:"+" "*(25-len(str(fitt)))+fitt+ "| " + " ".join(str(gene)[:min(8,len(str(gene)))] for gene in self.genes) elif self.str_type == 2: zstr = "["+" ".join(str(gene)[:min(8,len(str(gene)))] for gene in self.genes[0])+"]" pstr = "["+" ".join(str(gene)[:min(8,len(str(gene)))] for gene in self.genes[1])+"]" return self.name+"| Fitness:"+" "*(25-len(str(fitt)))+fitt+ "| " + zstr+", "+pstr #return self.name+"| Fitness:"+" "*(25-len(str(fitt)))+fitt+ "| "+str(len(self.genes)) #return self.name+"| Fitness:"+" "*(25-len(str(fitt)))+fitt+ "| " + " ".join(str(self.genes[key]) for key in self.genes) class population: def __init__(self, pop_number, spawn_genes, mating, selection, generation=0, st_population=None, str_type=1): self.population = [] self.generation = generation self.pop_number = pop_number self.spawn_genes = spawn_genes self.str_type=str_type if st_population is None: for i in range(self.pop_number): self.population.append(bleb(spawn_genes(), generation=self.generation, name="bleb"+" "*(8-len(hex(i)[2:])-len(str(self.generation)))+hex(i)[2:]+"or-"+str(self.generation)+" "*6, str_type=self.str_type)) else: self.population = st_population self.selection = selection self.mating = mating def fit_all(self, fitfunc): for bleb in self.population: bleb.fit(fitfunc) def evolve(self, fitfunc, mutation): self.fit_all(fitfunc) self.population = self.selection(self.population) self.mating(self.population, 2, self.generation) self.generation += 1 for bleb in self.population: #bleb.mutate(mutation) bleb.mutate_cust(mutation) bleb.generation += 1 def fill(self): to_fill = self.pop_number - len(self.population) if to_fill < 0: raise Exception("popultation outgrowing") elif to_fill > 0: for i in range(to_fill): self.population.append(bleb(self.spawn_genes(), generation=self.generation, name="bleb"+" "*(8-len(hex(self.pop_number-i)[2:])-len(str(self.generation)))+str(hex(self.pop_number-i)[2:])+"sp-"+str(self.generation)+" "*6, str_type=self.str_type)) # name =bleb/r{f} hex(i){s/o}-gen(gen_f_b64) def pickbest(self): return selection(self.population, 1)[0] def print(self): print("Generation: "+str(self.generation)+"\n") for b in self.population: print(str(b)) def selection(population, selected_numb): pop = population.copy() def fitness(elem): return elem.fitness # 1 #list.sort(pop, key=fitness, reverse=True) # 2 list.sort(pop, key=fitness) if selected_numb < len(pop): pop = pop[:selected_numb] return pop def mating(population, parents_num): ''' if parents_num%2 !=0: parents_num = parents_num-1 parents_list = population[:parents_num] for i in range(0,len(parent_list),2): ''' pass def spawn_genes(): return [random.choice(string.printable), random.choice(string.printable)] #return genes([random.choice(string.printable), random.choice(string.printable)]) def fitfunc(bleb): # 1 #return ord(bleb.genes[0]) + ord(bleb.genes[1]) # 2 return abs(ord(bleb.genes[0]) - ord(bleb.genes[1])) def mutation(genes): new_genes = [] for gene in genes: new_genes.append(chr((ord(gene)+random.randint(-10, 10))%128)) return new_genes if __name__=="__main__": pop = population(10, spawn_genes, mating, lambda x: selection(x,5)) pop.fit_all(fitfunc) pop.print() best_fit = 1 while best_fit != 0: print("\n\n-------------------\n\n") pop.evolve(fitfunc, mutation) pop.fill() pop.fit_all(fitfunc) pop.print() print("\n") best = pop.pickbest() print(best) best_fit = best.fitness print() ```
{ "source": "0xBADCA7/guppy-proxy", "score": 2 }
#### File: guppy-proxy/guppyproxy/gui.py ```python import random from guppyproxy.reqlist import ReqBrowser, ReqListModel from guppyproxy.repeater import RepeaterWidget from guppyproxy.interceptor import InterceptorWidget from guppyproxy.decoder import DecoderWidget from guppyproxy.settings import SettingsWidget from guppyproxy.shortcuts import GuppyShortcuts from guppyproxy.macros import MacroWidget from PyQt5.QtWidgets import QWidget, QTabWidget, QVBoxLayout, QTableView from PyQt5.QtCore import Qt class GuppyWindow(QWidget): titles = ( "Guppy Proxy", ) def __init__(self, client): QWidget.__init__(self) self.client = client self.initUi() def initUi(self): self.setFocusPolicy(Qt.StrongFocus) self.shortcuts = GuppyShortcuts(self) self.tabWidget = QTabWidget() self.repeaterWidget = RepeaterWidget(self.client) self.interceptorWidget = InterceptorWidget(self.client) self.macroWidget = MacroWidget(self.client) self.historyWidget = ReqBrowser(self.client, repeater_widget=self.repeaterWidget, macro_widget=self.macroWidget, is_client_context=True, update=True) self.decoderWidget = DecoderWidget() self.settingsWidget = SettingsWidget(self.client) self.settingsWidget.datafileLoaded.connect(self.historyWidget.reset_to_scope) self.history_ind = self.tabWidget.count() self.tabWidget.addTab(self.historyWidget, "History") self.repeater_ind = self.tabWidget.count() self.tabWidget.addTab(self.repeaterWidget, "Repeater") self.interceptor_ind = self.tabWidget.count() self.tabWidget.addTab(self.interceptorWidget, "Interceptor") self.decoder_ind = self.tabWidget.count() self.tabWidget.addTab(self.decoderWidget, "Decoder") self.macro_ind = self.tabWidget.count() self.tabWidget.addTab(self.macroWidget, "Macros") self.settings_ind = self.tabWidget.count() self.tabWidget.addTab(self.settingsWidget, "Settings") self.mainLayout = QVBoxLayout(self) self.mainLayout.addWidget(self.tabWidget) self.setWindowTitle(random.choice(GuppyWindow.titles)) self.show() def show_hist_tab(self): self.tabWidget.setCurrentIndex(self.history_ind) def show_repeater_tab(self): self.tabWidget.setCurrentIndex(self.repeater_ind) def show_interceptor_tab(self): self.tabWidget.setCurrentIndex(self.interceptor_ind) def show_decoder_tab(self): self.tabWidget.setCurrentIndex(self.decoder_ind) def show_active_macro_tab(self): self.tabWidget.setCurrentIndex(self.macro_ind) self.macroWidget.show_active() def show_int_macro_tab(self): self.tabWidget.setCurrentIndex(self.macro_ind) self.macroWidget.show_int() def close(self): self.interceptorWidget.close() ``` #### File: guppy-proxy/guppyproxy/reqlist.py ```python import threading import shlex from guppyproxy.util import max_len_str, query_to_str, display_error_box, display_info_box, display_req_context, hostport, method_color, sc_color, DisableUpdates, host_color from guppyproxy.proxy import HTTPRequest, RequestContext, InvalidQuery, SocketClosed, time_to_nsecs, ProxyThread from guppyproxy.reqview import ReqViewWidget from guppyproxy.reqtree import ReqTreeView from PyQt5.QtWidgets import QWidget, QTableWidget, QTableWidgetItem, QGridLayout, QHeaderView, QAbstractItemView, QVBoxLayout, QHBoxLayout, QComboBox, QTabWidget, QPushButton, QLineEdit, QStackedLayout, QToolButton, QCheckBox, QLabel, QTableView, QMenu from PyQt5.QtCore import pyqtSlot, pyqtSignal, QObject, QVariant, Qt, QAbstractTableModel, QModelIndex, QItemSelection, QSortFilterProxyModel from itertools import groupby, count def get_field_entry(): dropdown = QComboBox() dropdown.addItem("Anywhere", "all") dropdown.addItem("Req. Body", "reqbody") dropdown.addItem("Rsp. Body", "rspbody") dropdown.addItem("Any Body", "body") # dropdown.addItem("WSMessage", "wsmessage") dropdown.addItem("Req. Header", "reqheader") dropdown.addItem("Rsp. Header", "rspheader") dropdown.addItem("Any Header", "header") dropdown.addItem("Method", "method") dropdown.addItem("Host", "host") dropdown.addItem("Path", "path") dropdown.addItem("URL", "url") dropdown.addItem("Status", "statuscode") dropdown.addItem("Tag", "tag") dropdown.addItem("Any Param", "param") dropdown.addItem("URL Param", "urlparam") dropdown.addItem("Post Param", "postparam") dropdown.addItem("Rsp. Cookie", "rspcookie") dropdown.addItem("Req. Cookie", "reqcookie") dropdown.addItem("Any Cookie", "cookie") # dropdown.addItem("After", "") # dropdown.addItem("Before", "") # dropdown.addItem("TimeRange", "") # dropdown.addItem("Id", "") return dropdown def get_string_cmp_entry(): dropdown = QComboBox() dropdown.addItem("cnt.", "contains") dropdown.addItem("cnt. (rgx)", "containsregexp") dropdown.addItem("is", "is") dropdown.addItem("len. >", "lengt") dropdown.addItem("len. <", "lenlt") dropdown.addItem("len. =", "leneq") return dropdown class StringCmpWidget(QWidget): returnPressed = pyqtSignal() def __init__(self, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) layout = QHBoxLayout() self.cmp_entry = get_string_cmp_entry() self.text_entry = QLineEdit() self.text_entry.returnPressed.connect(self.returnPressed) layout.addWidget(self.cmp_entry) layout.addWidget(self.text_entry) self.setLayout(layout) self.layout().setContentsMargins(0, 0, 0, 0) def get_value(self): str_cmp = self.cmp_entry.itemData(self.cmp_entry.currentIndex()) str_val = self.text_entry.text() return [str_cmp, str_val] def reset(self): self.cmp_entry.setCurrentIndex(0) self.text_entry.setText("") def dt_sort_key(r): if r.time_start: return time_to_nsecs(r.time_start) return 0 class StringKVWidget(QWidget): returnPressed = pyqtSignal() def __init__(self, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) self.str2_shown = False self.str1 = StringCmpWidget() self.str2 = StringCmpWidget() self.str1.returnPressed.connect(self.returnPressed) self.str2.returnPressed.connect(self.returnPressed) self.toggle_button = QToolButton() self.toggle_button.setText("+") self.toggle_button.clicked.connect(self._show_hide_str2) layout = QHBoxLayout() layout.addWidget(self.str1) layout.addWidget(self.str2) layout.addWidget(self.toggle_button) self.str2.setVisible(self.str2_shown) self.setLayout(layout) self.layout().setContentsMargins(0, 0, 0, 0) @pyqtSlot() def _show_hide_str2(self): if self.str2_shown: self.toggle_button.setText("+") self.str2_shown = False else: self.toggle_button.setText("-") self.str2_shown = True self.str2.setVisible(self.str2_shown) def get_value(self): retval = self.str1.get_value() if self.str2_shown: retval += self.str2.get_value() return retval def reset(self): self.str1.reset() self.str2.reset() class DropdownFilterEntry(QWidget): # a widget that lets you enter filters using ezpz dropdowns/text boxes filterEntered = pyqtSignal(list) def __init__(self, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) layout = QHBoxLayout() confirm = QToolButton() confirm.setText("OK") confirm.setToolTip("Apply the entered filter") self.field_entry = get_field_entry() # stack containing widgets for string, k/v, date, daterange self.str_cmp_entry = StringCmpWidget() self.kv_cmp_entry = StringKVWidget() self.inv_entry = QCheckBox("inv") # date # daterange self.entry_layout = QStackedLayout() self.entry_layout.setContentsMargins(0, 0, 0, 0) self.current_entry = 0 self.entry_layout.addWidget(self.str_cmp_entry) self.entry_layout.addWidget(self.kv_cmp_entry) # add date # 2 # add daterange # 3 confirm.clicked.connect(self.confirm_entry) self.str_cmp_entry.returnPressed.connect(self.confirm_entry) self.kv_cmp_entry.returnPressed.connect(self.confirm_entry) self.field_entry.currentIndexChanged.connect(self._display_value_widget) layout.addWidget(confirm) layout.addWidget(self.inv_entry) layout.addWidget(self.field_entry) layout.addLayout(self.entry_layout) self.setLayout(layout) self.setContentsMargins(0, 0, 0, 0) self._display_value_widget() @pyqtSlot() def _display_value_widget(self): # show the correct value widget in the value stack layout field = self.field_entry.itemData(self.field_entry.currentIndex()) self.current_entry = 0 if field in ("all", "reqbody", "rspbody", "body", "wsmessage", "method", "host", "path", "url", "statuscode", "tag"): self.current_entry = 0 elif field in ("reqheader", "rspheader", "header", "param", "urlparam" "postparam", "rspcookie", "reqcookie", "cookie"): self.current_entry = 1 # elif for date # elif for daterange self.entry_layout.setCurrentIndex(self.current_entry) def get_value(self): val = [] if self.inv_entry.isChecked(): val.append("inv") field = self.field_entry.itemData(self.field_entry.currentIndex()) val.append(field) if self.current_entry == 0: val += self.str_cmp_entry.get_value() elif self.current_entry == 1: val += self.kv_cmp_entry.get_value() # elif for date # elif for daterange return [val] # no support for OR @pyqtSlot() def confirm_entry(self): phrases = self.get_value() self.filterEntered.emit(phrases) self.str_cmp_entry.reset() self.kv_cmp_entry.reset() # reset date # reset date range class TextFilterEntry(QWidget): # a text box that can be used to enter filters filterEntered = pyqtSignal(list) def __init__(self, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) layout = QHBoxLayout() self.textEntry = QLineEdit() self.textEntry.returnPressed.connect(self.confirm_entry) self.textEntry.setToolTip("Enter the filter here and press return to apply it") layout.addWidget(self.textEntry) self.setLayout(layout) self.layout().setContentsMargins(0, 0, 0, 0) @pyqtSlot() def confirm_entry(self): args = shlex.split(self.textEntry.text()) phrases = [list(group) for k, group in groupby(args, lambda x: x == "OR") if not k] self.filterEntered.emit(phrases) self.textEntry.setText("") class FilterEntry(QWidget): # a widget that lets you switch between filter entries filterEntered = pyqtSignal(list) def __init__(self, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) self.current_entry = 0 self.max_entries = 2 self.text_entry = TextFilterEntry() dropdown_entry = DropdownFilterEntry() self.text_entry.filterEntered.connect(self.filterEntered) dropdown_entry.filterEntered.connect(self.filterEntered) self.entry_layout = QStackedLayout() self.entry_layout.addWidget(dropdown_entry) self.entry_layout.addWidget(self.text_entry) swap_button = QToolButton() swap_button.setText(">") swap_button.setToolTip("Switch between dropdown and text entry") swap_button.clicked.connect(self.next_entry) hlayout = QHBoxLayout() hlayout.addWidget(swap_button) hlayout.addLayout(self.entry_layout) self.setLayout(hlayout) self.layout().setContentsMargins(0, 0, 0, 0) self.layout().setSpacing(0) @pyqtSlot() def next_entry(self): self.current_entry += 1 self.current_entry = self.current_entry % self.max_entries self.entry_layout.setCurrentIndex(self.current_entry) def set_entry(self, entry): self.current_entry = entry self.current_entry = self.current_entry % self.max_entries self.entry_layout.setCurrentIndex(self.current_entry) class FilterListWidget(QTableWidget): # list part of the filter tab def __init__(self, *args, **kwargs): self.client = kwargs.pop("client") QTableWidget.__init__(self, *args, **kwargs) self.context = RequestContext(self.client) # Set up table self.setColumnCount(1) self.horizontalHeader().hide() self.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch) self.verticalHeader().hide() self.verticalHeader().setSectionResizeMode(QHeaderView.ResizeToContents) #self.setSelectionMode(QAbstractItemView.NoSelection) #self.setEditTriggers(QAbstractItemView.NoEditTriggers) def append_fstr(self, fstr): args = shlex.split(fstr) phrase = [list(group) for k, group in groupby(args, lambda x: x == "OR") if not k] self.context.apply_phrase(phrase) self._append_fstr_row(fstr) def set_query(self, query): self.context.set_query(query) self.redraw_table() def pop_phrase(self): self.context.pop_phrase() self.redraw_table() def clear_phrases(self): self.context.set_query([]) self.redraw_table() def _append_fstr_row(self, fstr): row = self.rowCount() self.insertRow(row) self.setItem(row, 0, QTableWidgetItem(fstr)) def redraw_table(self): self.setRowCount(0) query = self.context.query for p in query: condstrs = [' '.join(l) for l in p] fstr = ' OR '.join(condstrs) self._append_fstr_row(fstr) def get_query(self): return self.context.query class FilterEditor(QWidget): # a widget containing a list of filters and the ability to edit the filters in the list filtersEdited = pyqtSignal(list) builtin_filters = ( ('No Images', ['inv', 'path', 'containsregexp', r'(\.png$|\.jpg$|\.jpeg$|\.gif$|\.ico$|\.bmp$|\.svg$)']), ('No JavaScript/CSS/Fonts', ['inv', 'path', 'containsregexp', r'(\.js$|\.css$|\.woff$)']), ) def __init__(self, *args, **kwargs): self.client = kwargs.pop("client") QWidget.__init__(self, *args, **kwargs) layout = QVBoxLayout() # Manage bar manage_bar = QHBoxLayout() pop_button = QPushButton("Pop") pop_button.setToolTip("Remove the most recently applied filter") clear_button = QPushButton("Clear") clear_button.setToolTip("Remove all active filters") scope_reset_button = QPushButton("Scope") scope_reset_button.setToolTip("Set the active filters to the current scope") scope_save_button = QPushButton("Save Scope") scope_save_button.setToolTip("Set the scope to the current filters. Any messages that don't match the active filters will be ignored by the proxy.") self.builtin_combo = QComboBox() self.builtin_combo.addItem("Apply a built-in filter", None) for desc, filt in FilterEditor.builtin_filters: self.builtin_combo.addItem(desc, filt) self.builtin_combo.currentIndexChanged.connect(self._apply_builtin_filter) manage_bar.addWidget(clear_button) manage_bar.addWidget(pop_button) manage_bar.addWidget(scope_reset_button) manage_bar.addWidget(scope_save_button) manage_bar.addWidget(self.builtin_combo) manage_bar.addStretch() mbar_widget = QWidget() mbar_widget.setLayout(manage_bar) pop_button.clicked.connect(self.pop_phrase) clear_button.clicked.connect(self.clear_phrases) scope_reset_button.clicked.connect(self.reset_to_scope) scope_save_button.clicked.connect(self.save_scope) # Filter list self.filter_list = FilterListWidget(client=self.client) # Filter entry self.entry = FilterEntry() self.entry.setMaximumHeight(self.entry.sizeHint().height()) self.entry.filterEntered.connect(self.apply_phrase) layout.addWidget(mbar_widget) layout.addWidget(self.filter_list) layout.addWidget(self.entry) self.setLayout(layout) self.layout().setSpacing(0) self.layout().setContentsMargins(0, 0, 0, 0) @pyqtSlot() def save_scope(self): query = self.filter_list.get_query() self.client.set_scope(query) display_info_box("Scope updated") @pyqtSlot() def reset_to_scope(self): query = self.client.get_scope().filter self.filter_list.set_query(query) self.filtersEdited.emit(self.filter_list.get_query()) @pyqtSlot() def clear_phrases(self): self.filter_list.clear_phrases() self.filtersEdited.emit(self.filter_list.get_query()) @pyqtSlot() def pop_phrase(self): self.filter_list.pop_phrase() self.filtersEdited.emit(self.filter_list.get_query()) @pyqtSlot(list) def apply_phrase(self, phrase): fstr = query_to_str([phrase]) try: self.filter_list.append_fstr(fstr) except InvalidQuery as e: display_error_box("Could not add filter:\n\n%s" % e) return self.filtersEdited.emit(self.filter_list.get_query()) @pyqtSlot(int) def _apply_builtin_filter(self, ind): phrase = self.builtin_combo.itemData(ind) if phrase: self.apply_phrase([phrase]) self.builtin_combo.setCurrentIndex(0) def set_is_text(self, is_text): if is_text: self.entry.set_entry(1) else: self.entry.set_entry(0) class ReqListModel(QAbstractTableModel): requestsLoading = pyqtSignal() requestsLoaded = pyqtSignal() HD_ID = 0 HD_VERB = 1 HD_HOST = 2 HD_PATH = 3 HD_SCODE = 4 HD_REQLEN = 5 HD_RSPLEN = 6 HD_TIME = 7 HD_TAGS = 8 HD_MNGL = 9 def __init__(self, client, *args, **kwargs): QAbstractTableModel.__init__(self, *args, **kwargs) self.client = client self.header_order = [ self.HD_ID, self.HD_VERB, self.HD_HOST, self.HD_PATH, self.HD_SCODE, self.HD_REQLEN, self.HD_RSPLEN, self.HD_TIME, self.HD_TAGS, self.HD_MNGL, ] self.table_headers = { self.HD_ID: "ID", self.HD_VERB: "Method", self.HD_HOST: "Host", self.HD_PATH: "Path", self.HD_SCODE: "S-Code", self.HD_REQLEN: "Req Len", self.HD_RSPLEN: "Rsp Len", self.HD_TIME: "Time", self.HD_TAGS: "Tags", self.HD_MNGL: "Mngl", } self.reqs = [] self.sort_enabled = False self.header_count = len(self.header_order) self.row_count = len(self.reqs) def headerData(self, section, orientation, role): if role == Qt.DisplayRole and orientation == Qt.Horizontal: hd = self.header_order[section] return self.table_headers[hd] return QVariant() def rowCount(self, parent): return self.row_count def columnCount(self, parent): return self.header_count def _gen_req_row(self, req): MAX_PATH_LEN = 60 MAX_TAG_LEN = 40 reqid = self.client.get_reqid(req) method = req.method host = hostport(req) path = max_len_str(req.url.path, MAX_PATH_LEN) reqlen = str(req.content_length) tags = max_len_str(', '.join(sorted(req.tags)), MAX_TAG_LEN) if req.response: scode = str(req.response.status_code) + ' ' + req.response.reason rsplen = str(req.response.content_length) else: scode = "--" rsplen = "--" if req.time_start and req.time_end: time_delt = req.time_end - req.time_start reqtime = ("%.2f" % time_delt.total_seconds()) else: reqtime = "--" if req.unmangled and req.response and req.response.unmangled: manglestr = "q/s" elif req.unmangled: manglestr = "q" elif req.response and req.response.unmangled: manglestr = "s" else: manglestr = "N/A" return (req, reqid, method, host, path, scode, reqlen, rsplen, reqtime, tags, manglestr) def data(self, index, role): if role == Qt.BackgroundColorRole: req = self.reqs[index.row()][0] if index.column() == 2: return host_color(hostport(req)) elif index.column() == 4: if req.response: return sc_color(str(req.response.status_code)) elif index.column() == 1: return method_color(req.method) return QVariant() elif role == Qt.DisplayRole: rowdata = self.reqs[index.row()] return rowdata[index.column()+1] return QVariant() def _sort_reqs(self): def skey(rowdata): return dt_sort_key(rowdata[0]) if self.sort_enabled: self.reqs = sorted(self.reqs, key=skey, reverse=True) self.row_count = len(self.reqs) def _req_ind(self, req=None, reqid=None): if not reqid: reqid = self.client.get_reqid(req) for ind, rowdata in zip(count(), self.reqs): req = rowdata[0] if self.client.get_reqid(req) == reqid: return ind return -1 def _emit_all_data(self): self.dataChanged.emit(self.createIndex(0, 0), self.createIndex(len(self.header_order), len(self.reqs))) def _set_requests(self, reqs): self.reqs = [self._gen_req_row(req) for req in reqs] self.row_count = len(self.reqs) def set_requests(self, reqs): self.beginResetModel() self._set_requests(reqs) self._sort_reqs() self._emit_all_data() self.endResetModel() def clear(self): self.beginResetModel() self.reqs = [] self.row_count = len(self.reqs) self._emit_all_data() self.endResetModel() def add_request(self, req): self.beginResetModel() self.reqs.append(self._gen_req_row(req)) self.row_count = len(self.reqs) self._sort_reqs() self._emit_all_data() self.endResetModel() def add_requests(self, reqs): self.beginResetModel() for req in reqs: self.reqs.append(self._gen_req_row(req)) self.row_count = len(self.reqs) self._sort_reqs() self._emit_all_data() self.endResetModel() def update_request(self, req): self.beginResetModel() ind = self._req_ind(req) if ind < 0: return self.reqs[ind] = self._gen_req_row(req) self.row_count = len(self.reqs) self._emit_all_data() self.endResetModel() def delete_request(self, req=None, reqid=None): self.beginResetModel() ind = self._req_ind(req, reqid) if ind < 0: return self.reqs = self.reqs[:ind] + self.reqs[(ind+1):] self.row_count = len(self.reqs) self._emit_all_data() self.endResetModel() def has_request(self, req=None, reqid=None): if self._req_ind(req, reqid) < 0: return False return True def get_requests(self): return [row[0] for row in self.reqs] def disable_sort(self): self.sort_enabled = False def enable_sort(self): self.sort_enabled = True self._sort_reqs() def req_by_ind(self, ind): return self.reqs[ind][0] class ReqTableFilter(QSortFilterProxyModel): def __init__(self, parentView, model): QSortFilterProxyModel.__init__(self) self.view = parentView self.view.verticalScrollBar().valueChanged.connect(self.updateMaxRows) self.minrows = 100 self.model = model self.maxrows = self.minrows def filterAcceptsRow(self, sourceRow, sourceParent): # ~~lets just take the performance problems for now~~ we found performance problems if sourceRow > self.maxrows: return False return True @pyqtSlot(int) def updateMaxRows(self, val): vscroll = self.view.verticalScrollBar() maxmaxrows = self.model.rowCount(None) if vscroll.maximum() > 0: viewperc = float(val)/float(vscroll.maximum()) else: viewperc = 0 if viewperc > 0.75: self.maxrows += 50 if self.maxrows > maxmaxrows: self.maxrows = maxmaxrows if viewperc < 0.5: self.maxrows -= 100 if self.maxrows < self.minrows: self.maxrows = self.minrows self.invalidateFilter() class ReqBrowser(QWidget): # Widget containing request viewer, tabs to view list of reqs, filters, and (evevntually) site map # automatically updated with requests as they're saved def __init__(self, client, repeater_widget=None, macro_widget=None, reload_reqs=True, update=False, filter_tab=True, is_client_context=False): QWidget.__init__(self) self.client = client self.filters = [] self.reload_reqs = reload_reqs self.mylayout = QGridLayout() self.mylayout.setSpacing(0) self.mylayout.setContentsMargins(0, 0, 0, 0) # reqtable updater if update: self.updater = ReqListUpdater(self.client) else: self.updater = None # reqtable/search self.listWidg = ReqTableWidget(client, repeater_widget=repeater_widget, macro_widget=macro_widget) if self.updater: self.updater.add_reqlist_widget(self.listWidg) self.listWidg.requestsSelected.connect(self.update_viewer) # Filter widget self.filterWidg = FilterEditor(client=self.client) self.filterWidg.filtersEdited.connect(self.listWidg.set_filter) if is_client_context: self.filterWidg.filtersEdited.connect(self.set_client_context) self.filterWidg.reset_to_scope() # Tree widget self.treeWidg = ReqTreeView() # add tabs self.listTabs = QTabWidget() self.listTabs.addTab(self.listWidg, "List") self.tree_ind = self.listTabs.count() self.listTabs.addTab(self.treeWidg, "Tree") if filter_tab: self.listTabs.addTab(self.filterWidg, "Filters") self.listTabs.currentChanged.connect(self._tab_changed) # reqview self.reqview = ReqViewWidget(info_tab=True, param_tab=True, tag_tab=True) self.reqview.set_tags_read_only(False) self.reqview.tag_widg.tagsUpdated.connect(self._tags_updated) self.listWidg.req_view_widget = self.reqview self.mylayout.addWidget(self.reqview, 0, 0, 3, 1) self.mylayout.addWidget(self.listTabs, 4, 0, 2, 1) self.setLayout(self.mylayout) def show_filters(self): self.listTabs.setCurrentIndex(2) def show_history(self): self.listTabs.setCurrentIndex(0) def show_tree(self): self.listTabs.setCurrentIndex(1) @pyqtSlot(list) def set_client_context(self, query): self.client.context.set_query(query) @pyqtSlot() def reset_to_scope(self): self.filterWidg.reset_to_scope() @pyqtSlot(list) def update_viewer(self, reqs): self.reqview.set_request(None) if len(reqs) > 0: if self.reload_reqs: reqh = reqs[0] req = self.client.req_by_id(reqh.db_id) else: req = reqs[0] self.reqview.set_request(req) @pyqtSlot(list) def update_filters(self, query): self.filters = query @pyqtSlot(HTTPRequest) def add_request_item(self, req): self.listWidg.add_request_item(req) self.treeWidg.add_request_item(req) @pyqtSlot(list) def set_requests(self, reqs): self.listWidg.set_requests(reqs) self.treeWidg.set_requests(reqs) @pyqtSlot(int) def _tab_changed(self, i): if i == self.tree_ind: self.treeWidg.set_requests(self.listWidg.get_requests()) @pyqtSlot(set) def _tags_updated(self, tags): req = self.reqview.req req.tags = tags if req.db_id: reqid = self.client.get_reqid(req) self.client.clear_tag(reqid) for tag in tags: self.client.add_tag(reqid, tag) def set_filter_is_text(self, is_text): self.filterWidg.set_is_text(is_text) class ReqListUpdater(QObject): newRequest = pyqtSignal(HTTPRequest) requestUpdated = pyqtSignal(HTTPRequest) requestDeleted = pyqtSignal(str) def __init__(self, client): QObject.__init__(self) self.mtx = threading.Lock() self.client = client self.reqlist_widgets = [] self.t = ProxyThread(target=self.run_updater) self.t.start() def add_reqlist_widget(self, widget): self.mtx.acquire() try: self.newRequest.connect(widget.add_request) self.requestUpdated.connect(widget.update_request) self.requestDeleted.connect(widget.delete_request) self.reqlist_widgets.append(widget) finally: self.mtx.release() def run_updater(self): conn = self.client.new_conn() try: try: for msg in conn.watch_storage(): self.mtx.acquire() try: if msg["Action"] == "NewRequest": self.newRequest.emit(msg["Request"]) elif msg["Action"] == "RequestUpdated": self.requestUpdated.emit(msg["Request"]) elif msg["Action"] == "RequestDeleted": self.requestDeleted.emit(msg["MessageId"]) finally: self.mtx.release() except SocketClosed: return finally: conn.close() def stop(self): self.conn.close() class ReqTableWidget(QWidget): requestsChanged = pyqtSignal(list) requestsSelected = pyqtSignal(list) def __init__(self, client, repeater_widget=None, macro_widget=None, *args, **kwargs): QWidget.__init__(self, *args, **kwargs) self.allow_save = False self.client = client self.repeater_widget = repeater_widget self.macro_widget = macro_widget self.query = [] self.req_view_widget = None self.setLayout(QStackedLayout()) self.layout().setContentsMargins(0, 0, 0, 0) self.tableModel = ReqListModel(self.client) self.tableView = QTableView() self.tableProxy = ReqTableFilter(self.tableView, self.tableModel) self.tableProxy.setSourceModel(self.tableModel) self.tableView.setModel(self.tableProxy) self.tableView.verticalHeader().setSectionResizeMode(QHeaderView.ResizeToContents) self.tableView.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents) self.tableView.verticalHeader().hide() self.tableView.setSelectionBehavior(QAbstractItemView.SelectRows) #self.tableView.setSelectionMode(QAbstractItemView.SingleSelection) self.tableView.horizontalHeader().setStretchLastSection(True) self.tableView.selectionModel().selectionChanged.connect(self.on_select_change) self.tableModel.dataChanged.connect(self._paint_view) self.requestsChanged.connect(self.set_requests) self.requestsSelected.connect(self._updated_selected_request) self.selected_reqs = [] self.layout().addWidget(self.tableView) self.layout().addWidget(QLabel("<b>Loading requests from data file...</b>")) @pyqtSlot(HTTPRequest) def add_request(self, req): with DisableUpdates(self.tableView): if req.db_id != "": reqid = self.client.get_reqid(req) if self.client.check_request(self.query, reqid=reqid): self.tableModel.add_request(req) if req.unmangled and req.unmangled.db_id != "" and self.tableModel.has_request(req.unmangled): self.tableModel.delete_request(req.unmangled) else: if self.client.check_request(self.query, req=req): self.tableModel.add_request(req) @pyqtSlot() def clear(self): self.tableModel.clear() def get_requests(self): return self.tableModel.get_requests() @pyqtSlot(list) def set_requests(self, reqs, check_filter=True): to_add = [] for req in reqs: if req.db_id != "": reqid = self.client.get_reqid(req) if self.client.check_request(self.query, reqid=reqid): to_add.append(req) else: if self.client.check_request(self.query, req=req): to_add.append(req) with DisableUpdates(self.tableView): self.clear() self.tableModel.disable_sort() self.tableModel.add_requests(to_add) self.tableModel.enable_sort() self.set_loading(False) @pyqtSlot(HTTPRequest) def update_request(self, req): with DisableUpdates(self.tableView): self.tableModel.update_request(req) if req.db_id != "": if req.unmangled and req.unmangled.db_id != "": self.tableModel.delete_request(reqid=self.client.get_reqid(req.unmangled)) @pyqtSlot(str) def delete_request(self, reqid): with DisableUpdates(self.tableView): self.tableModel.delete_request(reqid=reqid) @pyqtSlot(list) def set_filter(self, query): self.query = query self.set_loading(True) self.client.query_storage_async(self.requestsChanged, self.query, headers_only=True) @pyqtSlot(list) def _updated_selected_request(self, reqs): if len(reqs) > 0: self.selected_reqs = reqs else: self.selected_reqs = [] @pyqtSlot(QItemSelection, QItemSelection) def on_select_change(self, newSelection, oldSelection): reqs = [] added = set() for rowidx in self.tableView.selectionModel().selectedRows(): row = rowidx.row() if row not in added: reqs.append(self.tableModel.req_by_ind(row)) added.add(row) self.requestsSelected.emit(reqs) def get_selected_request(self): # load the full request if len(self.selected_reqs) > 0: return self.client.req_by_id(self.client.get_reqid(self.selected_reqs[0])) else: return None def get_all_requests(self): return [self.client.req_by_id(self.client.get_reqid(req)) for req in self.tableModel.get_requests()] def contextMenuEvent(self, event): if len(self.selected_reqs) > 1: menu = QMenu(self) macroAction = menu.addAction("Add to active macro input") if self.allow_save: saveAction = menu.addAction("Save requests to history") action = menu.exec_(self.mapToGlobal(event.pos())) if action == macroAction: if self.macro_widget: self.macro_widget.add_requests(self.selected_reqs) if self.allow_save and action == saveAction: for req in self.selected_reqs: self.client.save_new(req) elif len(self.selected_reqs) == 1: req = self.get_selected_request() display_req_context(self, self.client, req, event, repeater_widget=self.repeater_widget, req_view_widget=self.req_view_widget, macro_widget=self.macro_widget, save_option=self.allow_save) def set_loading(self, is_loading): with DisableUpdates(self.tableView): if is_loading: self.layout().setCurrentIndex(1) else: self.layout().setCurrentIndex(0) @pyqtSlot(QModelIndex, QModelIndex) def _paint_view(self, indA, indB): self.tableView.repaint() @pyqtSlot() def delete_selected(self): with DisableUpdates(self.tableView): for req in self.selected_reqs: self.tableModel.delete_request(req=req) ```
{ "source": "0xbadd/breakout", "score": 3 }
#### File: breakout/src/block.py ```python from colors import BLUE, BROWN, GREEN, ORANGE, RED, YELLOW from entity import Entity from wall import WALL_SIZE BLOCK_WIDTH = 80 BLOCK_HEIGHT = 20 NUM_BLOCKS_X = 9 NUM_BLOCKS_Y = 6 class Block(Entity): def __init__(self, x, y, width, height, color, value): super().__init__(x, y, width, height, color) self.value = value def __eq__(self, other): return (self.x, self.y, self.width, self.height) == ( other.x, other.y, other.width, other.height, ) def move(): pass def update(): pass def init_blocks(): blocks = [] for row in range(0, NUM_BLOCKS_Y): for col in range(0, NUM_BLOCKS_X): block_x = col * BLOCK_WIDTH block_y = row * BLOCK_HEIGHT block_color = None value = None if row == 0: block_color = RED value = 7 elif row == 1: block_color = ORANGE value = 5 elif row == 2: block_color = BROWN value = 4 elif row == 3: block_color = YELLOW value = 3 elif row == 4: block_color = GREEN value = 2 else: block_color = BLUE value = 1 blocks.append( Block( WALL_SIZE + block_x, WALL_SIZE * 2 + 48 + block_y, BLOCK_WIDTH, BLOCK_HEIGHT, block_color, value, ) ) return blocks ``` #### File: breakout/src/main.py ```python import pygame from game import Game from input_handlers import handle_keys from window import init_screen from game_states import GameStates FPS = 60 def main(): pygame.init() screen = init_screen() game = Game() running = True game_state = GameStates.MAIN_MENU clock = pygame.time.Clock() while running: keys = pygame.key.get_pressed() action = handle_keys(keys, game_state) quit = action.get("quit") if quit: running = False if game_state == GameStates.MAIN_MENU or game_state == GameStates.GAME_OVER: new_game = action.get("new_game") if new_game: if game_state == GameStates.GAME_OVER: game = Game() game_state = GameStates.PLAYING elif game_state == GameStates.PLAYING: results = game.update(action) lost = results.get("lost") if lost: game_state = GameStates.GAME_OVER game.render(screen, game_state) clock.tick(FPS) if __name__ == "__main__": main() ``` #### File: breakout/src/velocity.py ```python class Velocity: def __init__(self, x=0, y=0): self.x = x self.y = y def is_moving(self): return self.x != 0 and self.y != 0 def reverse(self): self.x *= -1 self.y *= -1 def reverse_x(self): self.x *= -1 def reverse_y(self): self.y *= -1 ```
{ "source": "0xBADEAFFE/Ice", "score": 3 }
#### File: Ice/ice/emulators.py ```python import os def emulator_rom_exe(emulator): """Generates a command string that will launch `emulator` (using the format provided by the user). The return value of this function should be suitable to use as the `Exe` field of a Steam shortcut""" # We don't know if the user put quotes around the emulator location. If # so, we dont want to add another pair and screw things up. return normalize(emulator.location) def emulator_rom_launch_options(emulator, rom): """Generates the launch options string that will launch `rom` with `emulator` (using the format provided by the user). The return value of this function should be suitable to use as the `LaunchOptions` field of a Steam shortcut""" # The user didn't give us the ROM information, but screw it, I already # have some code to add quotes to a string, might as well use it. quoted_rom = normalize(rom.path) # The format string contains a bunch of specifies that users can use to # substitute values in at runtime. Right now the only supported values are: # %r - The location of the ROM (so the emulator knows what to launch) # %fn - The ROM filename without its extension (for emulators that utilize separate configuration files) # # More may be added in the future, but for now this is what we support return (emulator.format .replace("%r", quoted_rom) .replace("%fn", os.path.splitext(os.path.basename(rom.path))[0]) ) def emulator_startdir(emulator): """Returns the directory which stores the emulator. The return value of this function should be suitable to use as the 'StartDir' field of a Steam shortcut""" return os.path.dirname(emulator.location) def normalize(string): """Normalizing the strings is just removing any leading/trailing quotes. The beautiful thing is that strip does nothing if it doesnt contain quotes, so normalizing it then adding quotes should do what I want 100% of the time """ return "\"%s\"" % string.strip("\"") ``` #### File: Ice/tests/test_communityids.py ```python import sys import os import unittest from nose_parameterized import parameterized from ice import communityids KNOWN_ID_PAIRS = [ # Meris608 (40586375, 76561198000852103), # Jankenking (49642724, 76561198009908452), ] class TestCommunityIds(unittest.TestCase): @parameterized.expand(KNOWN_ID_PAIRS) def test_id64_from_id32(self, id32, id64): """Tests converting an id32 into an id64""" self.assertEqual(communityids.id64_from_id32(id32), id64) @parameterized.expand(KNOWN_ID_PAIRS) def test_id32_from_id64(self, id32, id64): """Tests converting an id64 into an id32""" self.assertEqual(communityids.id32_from_id64(id64), id32) ```
{ "source": "0xBaphy/mediasite-ripper", "score": 3 }
#### File: 0xBaphy/mediasite-ripper/main.py ```python from requests import request from aigpy import download import os import shutil def Download(Name: str, Stream: str): tool = download.DownloadTool(Name, [Stream + '/qualityLevels()']) check, err = tool.start(True) if check: shutil.move(Name, Name+'.mp4') else: print("[!] Download failed. (" + str(err) + ")") class ChildContent(object): def __init__(self, name, Id, url): self.Name = name self.Id = Id self.Url = url class FolderChild(object): def __init__(self, details: dict): self.Name = details['CurrentFolder']['Name'] self.Id = details['CurrentFolder']['Id'] self.ParentId = details['CurrentFolder']['ParentCatalogFolderId'] x = [] for d in details['PresentationDetailsList']: x.append(ChildContent(d['Name'], d['Id'], d['PlayerUrl'])) self.Contents = x class ParentFolder(object): def __init__(self, details: dict): self.Name = details['CurrentFolder']['Name'] self.Id = details['CurrentFolder']['Id'] self.DynamicId = details['CurrentFolder']['DynamicFolderId'] self.Folders = list class CatalogDetails(object): def __init__(self, details: dict): self.Name = details['CatalogDetails']['Name'] self.Id = details['CatalogDetails']['Id'] class MediaSite(object): def __init__(self, catalog): res = request("GET", catalog).text self.CatalogId = res[res.find("CatalogId") + 12:res.find("',", 1)] def CatalogDetails(self) -> dict: endpoint = "https://mediasite.osu.edu/Mediasite/Catalog/Data/GetCatalogDetails" payload = { "IsViewPage": False, "CatalogId": self.CatalogId, "CurrentFolderId": "", "Url": "", "PreviewKey": None, "AuthTicket": None } res = request("POST", endpoint, json=payload) return res.json() def ParentFolderDetails(self, Id: str) -> ParentFolder: endpoint = "https://mediasite.osu.edu/Mediasite/Catalog/Data/GetPresentationsForFolder" payload ={ "IsViewPage": False, "IsNewFolder":True, "AuthTicket": None, "CatalogId": self.CatalogId, "CurrentFolderId": Id, "RootDynamicFolderId": self.CatalogId } res = request("POST", endpoint, json=payload).json() return ParentFolder(res) def PresentationFolder(self, Id: str) -> FolderChild: endpoint = "https://mediasite.osu.edu/Mediasite/Catalog/Data/GetPresentationsForFolder" payload = { "IsViewPage": False, "IsNewFolder": True, "AuthTicket": None, "CatalogId": self.CatalogId, "CurrentFolderId": Id, "RootDynamicFolderId": self.CatalogId, "ItemsPerPage": 40, "PageIndex": 0, "PermissionMask": "Execute", "CatalogSearchType": "SearchInFolder", "SortBy": "Date", "SortDirection": "Ascending", "StartDate": None, "EndDate": None, "StatusFilterList": None, "PreviewKey": None, "Tags": [] } res = request("POST", endpoint, json=payload).json() return FolderChild(res) def GetStream(self, url: str) -> str: stream = "" headers = {'Content-type': "application/json; charset=utf-8"} endpoint = "https://mediasite.osu.edu/Mediasite/PlayerService/PlayerService.svc/json/GetPlayerOptions" payload = { "getPlayerOptionsRequest": { "ResourceId": url[41:-1].split('?')[0], "QueryString":"?" + url[41:-1].split('?')[1], "UseScreenReader": False, "UrlReferrer":"" } } res = request("POST", endpoint, json=payload, headers=headers).json() urls = res['d']['Presentation']['Streams'][0]['VideoUrls'] for u in urls: if u['Location'] != "": stream = u['Location'].split('?')[0] break return stream def main(): Lessons = input("[*] Input the catalog URL >") client = MediaSite(Lessons) catalog = client.CatalogDetails() CurrentPath = catalog['CatalogDetails']['Name'] if not os.path.isdir(CurrentPath): os.mkdir(CurrentPath) for c in catalog['NavigationFolders']: if c['Type'] == 2: pass folder = client.PresentationFolder(c['DynamicFolderId']) Dir = os.path.join(CurrentPath, folder.Name) if not os.path.isdir(Dir): os.mkdir(Dir) for video in folder.Contents: stream = client.GetStream(video.Url) name = os.path.join(Dir, video.Name) if os.path.isfile(name + ".mp4"): print("[i] File already exists, skipping:", name) else: print("[i] Downloading:", name) Download(name, stream) if __name__ == '__main__': main() ```
{ "source": "0xbase12/kazoo", "score": 3 }
#### File: kazoo/recipe/lock.py ```python import sys try: from time import monotonic as now except ImportError: from time import time as now import uuid import six from kazoo.exceptions import ( CancelledError, KazooException, LockTimeout, NoNodeError ) from kazoo.protocol.states import KazooState from kazoo.retry import ( ForceRetryError, KazooRetry, RetryFailedError ) class _Watch(object): def __init__(self, duration=None): self.duration = duration self.started_at = None def start(self): self.started_at = now() def leftover(self): if self.duration is None: return None else: elapsed = now() - self.started_at return max(0, self.duration - elapsed) class Lock(object): """Kazoo Lock Example usage with a :class:`~kazoo.client.KazooClient` instance: .. code-block:: python zk = KazooClient() zk.start() lock = zk.Lock("/lockpath", "my-identifier") with lock: # blocks waiting for lock acquisition # do something with the lock Note: This lock is not *re-entrant*. Repeated calls after already acquired will block. This is an exclusive lock. For a read/write lock, see :class:`WriteLock` and :class:`ReadLock`. """ # Node name, after the contender UUID, before the sequence # number. Involved in read/write locks. _NODE_NAME = "__lock__" # Node names which exclude this contender when present at a lower # sequence number. Involved in read/write locks. _EXCLUDE_NAMES = ["__lock__"] def __init__(self, client, path, identifier=None): """Create a Kazoo lock. :param client: A :class:`~kazoo.client.KazooClient` instance. :param path: The lock path to use. :param identifier: Name to use for this lock contender. This can be useful for querying to see who the current lock contenders are. """ self.client = client self.path = path # some data is written to the node. this can be queried via # contenders() to see who is contending for the lock self.data = str(identifier or "").encode('utf-8') self.node = None self.wake_event = client.handler.event_object() # props to Netflix Curator for this trick. It is possible for our # create request to succeed on the server, but for a failure to # prevent us from getting back the full path name. We prefix our # lock name with a uuid and can check for its presence on retry. self.prefix = uuid.uuid4().hex + self._NODE_NAME self.create_path = self.path + "/" + self.prefix self.create_tried = False self.is_acquired = False self.assured_path = False self.cancelled = False self._retry = KazooRetry(max_tries=None, sleep_func=client.handler.sleep_func) self._lock = client.handler.lock_object() def _ensure_path(self): self.client.ensure_path(self.path) self.assured_path = True def cancel(self): """Cancel a pending lock acquire.""" self.cancelled = True self.wake_event.set() def acquire(self, blocking=True, timeout=None, ephemeral=True): """ Acquire the lock. By defaults blocks and waits forever. :param blocking: Block until lock is obtained or return immediately. :type blocking: bool :param timeout: Don't wait forever to acquire the lock. :type timeout: float or None :param ephemeral: Don't use ephemeral znode for the lock. :type ephemeral: bool :returns: Was the lock acquired? :rtype: bool :raises: :exc:`~kazoo.exceptions.LockTimeout` if the lock wasn't acquired within `timeout` seconds. .. warning:: When :attr:`ephemeral` is set to False session expiration will not release the lock and must be handled separately. .. versionadded:: 1.1 The timeout option. .. versionadded:: 2.4.1 The ephemeral option. """ def _acquire_lock(): got_it = self._lock.acquire(False) if not got_it: raise ForceRetryError() return True retry = self._retry.copy() retry.deadline = timeout # Ensure we are locked so that we avoid multiple threads in # this acquistion routine at the same time... locked = self._lock.acquire(False) if not locked and not blocking: return False if not locked: # Lock acquire doesn't take a timeout, so simulate it... try: locked = retry(_acquire_lock) except RetryFailedError: return False already_acquired = self.is_acquired try: gotten = False try: gotten = retry(self._inner_acquire, blocking=blocking, timeout=timeout, ephemeral=ephemeral) except RetryFailedError: pass except KazooException: # if we did ultimately fail, attempt to clean up exc_info = sys.exc_info() if not already_acquired: self._best_effort_cleanup() self.cancelled = False six.reraise(exc_info[0], exc_info[1], exc_info[2]) if gotten: self.is_acquired = gotten if not gotten and not already_acquired: self._best_effort_cleanup() return gotten finally: self._lock.release() def _watch_session(self, state): self.wake_event.set() return True def _inner_acquire(self, blocking, timeout, ephemeral=True): # wait until it's our chance to get it.. if self.is_acquired: if not blocking: return False raise ForceRetryError() # make sure our election parent node exists if not self.assured_path: self._ensure_path() node = None if self.create_tried: node = self._find_node() else: self.create_tried = True if not node: node = self.client.create(self.create_path, self.data, ephemeral=ephemeral, sequence=True) # strip off path to node node = node[len(self.path) + 1:] self.node = node while True: self.wake_event.clear() # bail out with an exception if cancellation has been requested if self.cancelled: raise CancelledError() children = self._get_sorted_children() try: our_index = children.index(node) except ValueError: # pragma: nocover # somehow we aren't in the children -- probably we are # recovering from a session failure and our ephemeral # node was removed raise ForceRetryError() predecessor = self.predecessor(children, our_index) if not predecessor: return True if not blocking: return False # otherwise we are in the mix. watch predecessor and bide our time predecessor = self.path + "/" + predecessor self.client.add_listener(self._watch_session) try: self.client.get(predecessor, self._watch_predecessor) except NoNodeError: pass # predecessor has already been deleted else: self.wake_event.wait(timeout) if not self.wake_event.isSet(): raise LockTimeout("Failed to acquire lock on %s after " "%s seconds" % (self.path, timeout)) finally: self.client.remove_listener(self._watch_session) def predecessor(self, children, index): for c in reversed(children[:index]): if any(n in c for n in self._EXCLUDE_NAMES): return c return None def _watch_predecessor(self, event): self.wake_event.set() def _get_sorted_children(self): children = self.client.get_children(self.path) # Node names are prefixed by a type: strip the prefix first, which may # be one of multiple values in case of a read-write lock, and return # only the sequence number (as a string since it is padded and will # sort correctly anyway). # # In some cases, the lock path may contain nodes with other prefixes # (eg. in case of a lease), just sort them last ('~' sorts after all # ASCII digits). def _seq(c): for name in ["__lock__", "__rlock__"]: idx = c.find(name) if idx != -1: return c[idx + len(name):] # Sort unknown node names eg. "lease_holder" last. return '~' children.sort(key=_seq) return children def _find_node(self): children = self.client.get_children(self.path) for child in children: if child.startswith(self.prefix): return child return None def _delete_node(self, node): self.client.delete(self.path + "/" + node) def _best_effort_cleanup(self): try: node = self.node or self._find_node() if node: self._delete_node(node) except KazooException: # pragma: nocover pass def release(self): """Release the lock immediately.""" return self.client.retry(self._inner_release) def _inner_release(self): if not self.is_acquired: return False try: self._delete_node(self.node) except NoNodeError: # pragma: nocover pass self.is_acquired = False self.node = None return True def contenders(self): """Return an ordered list of the current contenders for the lock. .. note:: If the contenders did not set an identifier, it will appear as a blank string. """ # make sure our election parent node exists if not self.assured_path: self._ensure_path() children = self._get_sorted_children() contenders = [] for child in children: try: data, stat = self.client.get(self.path + "/" + child) contenders.append(data.decode('utf-8')) except NoNodeError: # pragma: nocover pass return contenders def __enter__(self): self.acquire() def __exit__(self, exc_type, exc_value, traceback): self.release() class WriteLock(Lock): """Kazoo Write Lock Example usage with a :class:`~kazoo.client.KazooClient` instance: .. code-block:: python zk = KazooClient() zk.start() lock = zk.WriteLock("/lockpath", "my-identifier") with lock: # blocks waiting for lock acquisition # do something with the lock The lock path passed to WriteLock and ReadLock must match for them to communicate. The write lock can not be acquired if it is held by any readers or writers. Note: This lock is not *re-entrant*. Repeated calls after already acquired will block. This is the write-side of a shared lock. See :class:`Lock` for a standard exclusive lock and :class:`ReadLock` for the read-side of a shared lock. """ _NODE_NAME = "__lock__" _EXCLUDE_NAMES = ["__lock__", "__rlock__"] class ReadLock(Lock): """Kazoo Read Lock Example usage with a :class:`~kazoo.client.KazooClient` instance: .. code-block:: python zk = KazooClient() zk.start() lock = zk.ReadLock("/lockpath", "my-identifier") with lock: # blocks waiting for outstanding writers # do something with the lock The lock path passed to WriteLock and ReadLock must match for them to communicate. The read lock blocks if it is held by any writers, but multiple readers may hold the lock. Note: This lock is not *re-entrant*. Repeated calls after already acquired will block. This is the read-side of a shared lock. See :class:`Lock` for a standard exclusive lock and :class:`WriteLock` for the write-side of a shared lock. """ _NODE_NAME = "__rlock__" _EXCLUDE_NAMES = ["__lock__"] class Semaphore(object): """A Zookeeper-based Semaphore This synchronization primitive operates in the same manner as the Python threading version only uses the concept of leases to indicate how many available leases are available for the lock rather than counting. Note: This lock is not meant to be *re-entrant*. Example: .. code-block:: python zk = KazooClient() semaphore = zk.Semaphore("/leasepath", "my-identifier") with semaphore: # blocks waiting for lock acquisition # do something with the semaphore .. warning:: This class stores the allowed max_leases as the data on the top-level semaphore node. The stored value is checked once against the max_leases of each instance. This check is performed when acquire is called the first time. The semaphore node needs to be deleted to change the allowed leases. .. versionadded:: 0.6 The Semaphore class. .. versionadded:: 1.1 The max_leases check. """ def __init__(self, client, path, identifier=None, max_leases=1): """Create a Kazoo Lock :param client: A :class:`~kazoo.client.KazooClient` instance. :param path: The semaphore path to use. :param identifier: Name to use for this lock contender. This can be useful for querying to see who the current lock contenders are. :param max_leases: The maximum amount of leases available for the semaphore. """ # Implementation notes about how excessive thundering herd # and watches are avoided # - A node (lease pool) holds children for each lease in use # - A lock is acquired for a process attempting to acquire a # lease. If a lease is available, the ephemeral node is # created in the lease pool and the lock is released. # - Only the lock holder watches for children changes in the # lease pool self.client = client self.path = path # some data is written to the node. this can be queried via # contenders() to see who is contending for the lock self.data = str(identifier or "").encode('utf-8') self.max_leases = max_leases self.wake_event = client.handler.event_object() self.create_path = self.path + "/" + uuid.uuid4().hex self.lock_path = path + '-' + '__lock__' self.is_acquired = False self.assured_path = False self.cancelled = False self._session_expired = False def _ensure_path(self): result = self.client.ensure_path(self.path) self.assured_path = True if result is True: # node did already exist data, _ = self.client.get(self.path) try: leases = int(data.decode('utf-8')) except (ValueError, TypeError): # ignore non-numeric data, maybe the node data is used # for other purposes pass else: if leases != self.max_leases: raise ValueError( "Inconsistent max leases: %s, expected: %s" % (leases, self.max_leases) ) else: self.client.set(self.path, str(self.max_leases).encode('utf-8')) def cancel(self): """Cancel a pending semaphore acquire.""" self.cancelled = True self.wake_event.set() def acquire(self, blocking=True, timeout=None): """Acquire the semaphore. By defaults blocks and waits forever. :param blocking: Block until semaphore is obtained or return immediately. :type blocking: bool :param timeout: Don't wait forever to acquire the semaphore. :type timeout: float or None :returns: Was the semaphore acquired? :rtype: bool :raises: ValueError if the max_leases value doesn't match the stored value. :exc:`~kazoo.exceptions.LockTimeout` if the semaphore wasn't acquired within `timeout` seconds. .. versionadded:: 1.1 The blocking, timeout arguments and the max_leases check. """ # If the semaphore had previously been canceled, make sure to # reset that state. self.cancelled = False try: self.is_acquired = self.client.retry( self._inner_acquire, blocking=blocking, timeout=timeout) except KazooException: # if we did ultimately fail, attempt to clean up self._best_effort_cleanup() self.cancelled = False raise return self.is_acquired def _inner_acquire(self, blocking, timeout=None): """Inner loop that runs from the top anytime a command hits a retryable Zookeeper exception.""" self._session_expired = False self.client.add_listener(self._watch_session) if not self.assured_path: self._ensure_path() # Do we already have a lease? if self.client.exists(self.create_path): return True w = _Watch(duration=timeout) w.start() lock = self.client.Lock(self.lock_path, self.data) try: gotten = lock.acquire(blocking=blocking, timeout=w.leftover()) if not gotten: return False while True: self.wake_event.clear() # Attempt to grab our lease... if self._get_lease(): return True if blocking: # If blocking, wait until self._watch_lease_change() is # called before returning self.wake_event.wait(w.leftover()) if not self.wake_event.isSet(): raise LockTimeout( "Failed to acquire semaphore on %s " "after %s seconds" % (self.path, timeout)) else: return False finally: lock.release() def _watch_lease_change(self, event): self.wake_event.set() def _get_lease(self, data=None): # Make sure the session is still valid if self._session_expired: raise ForceRetryError("Retry on session loss at top") # Make sure that the request hasn't been canceled if self.cancelled: raise CancelledError("Semaphore cancelled") # Get a list of the current potential lock holders. If they change, # notify our wake_event object. This is used to unblock a blocking # self._inner_acquire call. children = self.client.get_children(self.path, self._watch_lease_change) # If there are leases available, acquire one if len(children) < self.max_leases: self.client.create(self.create_path, self.data, ephemeral=True) # Check if our acquisition was successful or not. Update our state. if self.client.exists(self.create_path): self.is_acquired = True else: self.is_acquired = False # Return current state return self.is_acquired def _watch_session(self, state): if state == KazooState.LOST: self._session_expired = True self.wake_event.set() # Return true to de-register return True def _best_effort_cleanup(self): try: self.client.delete(self.create_path) except KazooException: # pragma: nocover pass def release(self): """Release the lease immediately.""" return self.client.retry(self._inner_release) def _inner_release(self): if not self.is_acquired: return False try: self.client.delete(self.create_path) except NoNodeError: # pragma: nocover pass self.is_acquired = False return True def lease_holders(self): """Return an unordered list of the current lease holders. .. note:: If the lease holder did not set an identifier, it will appear as a blank string. """ if not self.client.exists(self.path): return [] children = self.client.get_children(self.path) lease_holders = [] for child in children: try: data, stat = self.client.get(self.path + "/" + child) lease_holders.append(data.decode('utf-8')) except NoNodeError: # pragma: nocover pass return lease_holders def __enter__(self): self.acquire() def __exit__(self, exc_type, exc_value, traceback): self.release() ``` #### File: kazoo/kazoo/retry.py ```python import logging import random import time from kazoo.exceptions import ( ConnectionClosedError, ConnectionLoss, KazooException, OperationTimeoutError, SessionExpiredError, ) log = logging.getLogger(__name__) class ForceRetryError(Exception): """Raised when some recipe logic wants to force a retry.""" class RetryFailedError(KazooException): """Raised when retrying an operation ultimately failed, after retrying the maximum number of attempts. """ class InterruptedError(RetryFailedError): """Raised when the retry is forcibly interrupted by the interrupt function""" class KazooRetry(object): """Helper for retrying a method in the face of retry-able exceptions""" RETRY_EXCEPTIONS = ( ConnectionLoss, OperationTimeoutError, ForceRetryError ) EXPIRED_EXCEPTIONS = ( SessionExpiredError, ) def __init__(self, max_tries=1, delay=0.1, backoff=2, max_jitter=0.8, max_delay=60, ignore_expire=True, sleep_func=time.sleep, deadline=None, interrupt=None): """Create a :class:`KazooRetry` instance for retrying function calls :param max_tries: How many times to retry the command. -1 means infinite tries. :param delay: Initial delay between retry attempts. :param backoff: Backoff multiplier between retry attempts. Defaults to 2 for exponential backoff. :param max_jitter: Additional max jitter period to wait between retry attempts to avoid slamming the server. :param max_delay: Maximum delay in seconds, regardless of other backoff settings. Defaults to one minute. :param ignore_expire: Whether a session expiration should be ignored and treated as a retry-able command. :param interrupt: Function that will be called with no args that may return True if the retry should be ceased immediately. This will be called no more than every 0.1 seconds during a wait between retries. """ self.max_tries = max_tries self.delay = delay self.backoff = backoff self.max_jitter = int(max_jitter * 100) self.max_delay = float(max_delay) self._attempts = 0 self._cur_delay = delay self.deadline = deadline self._cur_stoptime = None self.sleep_func = sleep_func self.retry_exceptions = self.RETRY_EXCEPTIONS self.interrupt = interrupt if ignore_expire: self.retry_exceptions += self.EXPIRED_EXCEPTIONS def reset(self): """Reset the attempt counter""" self._attempts = 0 self._cur_delay = self.delay self._cur_stoptime = None def copy(self): """Return a clone of this retry manager""" obj = KazooRetry(max_tries=self.max_tries, delay=self.delay, backoff=self.backoff, max_jitter=self.max_jitter / 100.0, max_delay=self.max_delay, sleep_func=self.sleep_func, deadline=self.deadline, interrupt=self.interrupt) obj.retry_exceptions = self.retry_exceptions return obj def __call__(self, func, *args, **kwargs): """Call a function with arguments until it completes without throwing a Kazoo exception :param func: Function to call :param args: Positional arguments to call the function with :params kwargs: Keyword arguments to call the function with The function will be called until it doesn't throw one of the retryable exceptions (ConnectionLoss, OperationTimeout, or ForceRetryError), and optionally retrying on session expiration. """ self.reset() while True: try: if self.deadline is not None and self._cur_stoptime is None: self._cur_stoptime = time.time() + self.deadline return func(*args, **kwargs) except ConnectionClosedError: raise except self.retry_exceptions: # Note: max_tries == -1 means infinite tries. if self._attempts == self.max_tries: raise RetryFailedError("Too many retry attempts") self._attempts += 1 sleeptime = self._cur_delay + ( random.randint(0, self.max_jitter) / 100.0) if self._cur_stoptime is not None and \ time.time() + sleeptime >= self._cur_stoptime: raise RetryFailedError("Exceeded retry deadline") if self.interrupt: while sleeptime > 0: # Break the time period down and sleep for no # longer than 0.1 before calling the interrupt if sleeptime < 0.1: self.sleep_func(sleeptime) sleeptime -= sleeptime else: self.sleep_func(0.1) sleeptime -= 0.1 if self.interrupt(): raise InterruptedError() else: self.sleep_func(sleeptime) self._cur_delay = min(self._cur_delay * self.backoff, self.max_delay) ``` #### File: kazoo/tests/test_cache.py ```python import uuid from mock import patch, call, Mock from nose.tools import eq_, ok_, assert_not_equal, raises from kazoo.testing import KazooTestCase from kazoo.exceptions import KazooException from kazoo.recipe.cache import TreeCache, TreeNode, TreeEvent class KazooTreeCacheTests(KazooTestCase): def setUp(self): super(KazooTreeCacheTests, self).setUp() self._event_queue = self.client.handler.queue_impl() self._error_queue = self.client.handler.queue_impl() self.path = None self.cache = None def tearDown(self): super(KazooTreeCacheTests, self).tearDown() if not self._error_queue.empty(): try: raise self._error_queue.get() except FakeException: pass def make_cache(self): if self.cache is None: self.path = '/' + uuid.uuid4().hex self.cache = TreeCache(self.client, self.path) self.cache.listen(lambda event: self._event_queue.put(event)) self.cache.listen_fault(lambda error: self._error_queue.put(error)) self.cache.start() return self.cache def wait_cache(self, expect=None, since=None, timeout=10): started = since is None while True: event = self._event_queue.get(timeout=timeout) if started: if expect is not None: eq_(event.event_type, expect) return event if event.event_type == since: started = True if expect is None: return def spy_client(self, method_name): method = getattr(self.client, method_name) return patch.object(self.client, method_name, wraps=method) def test_start(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) stat = self.client.exists(self.path) eq_(stat.version, 0) eq_(self.cache._state, TreeCache.STATE_STARTED) eq_(self.cache._root._state, TreeNode.STATE_LIVE) @raises(KazooException) def test_start_started(self): self.make_cache() self.cache.start() @raises(KazooException) def test_start_closed(self): self.make_cache() self.cache.start() self.cache.close() self.cache.start() def test_close(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/foo/bar/baz', makepath=True) for _ in range(3): self.wait_cache(TreeEvent.NODE_ADDED) self.cache.close() # nothing should be published since tree closed ok_(self._event_queue.empty()) # tree should be empty eq_(self.cache._root._children, {}) eq_(self.cache._root._data, None) eq_(self.cache._state, TreeCache.STATE_CLOSED) # node state should not be changed assert_not_equal(self.cache._root._state, TreeNode.STATE_DEAD) def test_children_operation(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/test_children', b'test_children_1') event = self.wait_cache(TreeEvent.NODE_ADDED) eq_(event.event_type, TreeEvent.NODE_ADDED) eq_(event.event_data.path, self.path + '/test_children') eq_(event.event_data.data, b'test_children_1') eq_(event.event_data.stat.version, 0) self.client.set(self.path + '/test_children', b'test_children_2') event = self.wait_cache(TreeEvent.NODE_UPDATED) eq_(event.event_type, TreeEvent.NODE_UPDATED) eq_(event.event_data.path, self.path + '/test_children') eq_(event.event_data.data, b'test_children_2') eq_(event.event_data.stat.version, 1) self.client.delete(self.path + '/test_children') event = self.wait_cache(TreeEvent.NODE_REMOVED) eq_(event.event_type, TreeEvent.NODE_REMOVED) eq_(event.event_data.path, self.path + '/test_children') eq_(event.event_data.data, b'test_children_2') eq_(event.event_data.stat.version, 1) def test_subtree_operation(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/foo/bar/baz', makepath=True) for relative_path in ('/foo', '/foo/bar', '/foo/bar/baz'): event = self.wait_cache(TreeEvent.NODE_ADDED) eq_(event.event_type, TreeEvent.NODE_ADDED) eq_(event.event_data.path, self.path + relative_path) eq_(event.event_data.data, b'') eq_(event.event_data.stat.version, 0) self.client.delete(self.path + '/foo', recursive=True) for relative_path in ('/foo/bar/baz', '/foo/bar', '/foo'): event = self.wait_cache(TreeEvent.NODE_REMOVED) eq_(event.event_type, TreeEvent.NODE_REMOVED) eq_(event.event_data.path, self.path + relative_path) def test_get_data(self): cache = self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/foo/bar/baz', b'@', makepath=True) self.wait_cache(TreeEvent.NODE_ADDED) self.wait_cache(TreeEvent.NODE_ADDED) self.wait_cache(TreeEvent.NODE_ADDED) with patch.object(cache, '_client'): # disable any remote operation eq_(cache.get_data(self.path).data, b'') eq_(cache.get_data(self.path).stat.version, 0) eq_(cache.get_data(self.path + '/foo').data, b'') eq_(cache.get_data(self.path + '/foo').stat.version, 0) eq_(cache.get_data(self.path + '/foo/bar').data, b'') eq_(cache.get_data(self.path + '/foo/bar').stat.version, 0) eq_(cache.get_data(self.path + '/foo/bar/baz').data, b'@') eq_(cache.get_data(self.path + '/foo/bar/baz').stat.version, 0) def test_get_children(self): cache = self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/foo/bar/baz', b'@', makepath=True) self.wait_cache(TreeEvent.NODE_ADDED) self.wait_cache(TreeEvent.NODE_ADDED) self.wait_cache(TreeEvent.NODE_ADDED) with patch.object(cache, '_client'): # disable any remote operation eq_(cache.get_children(self.path + '/foo/bar/baz'), frozenset()) eq_(cache.get_children(self.path + '/foo/bar'), frozenset(['baz'])) eq_(cache.get_children(self.path + '/foo'), frozenset(['bar'])) eq_(cache.get_children(self.path), frozenset(['foo'])) @raises(ValueError) def test_get_data_out_of_tree(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.cache.get_data('/out_of_tree') @raises(ValueError) def test_get_children_out_of_tree(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.cache.get_children('/out_of_tree') def test_get_data_no_node(self): cache = self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) with patch.object(cache, '_client'): # disable any remote operation eq_(cache.get_data(self.path + '/non_exists'), None) def test_get_children_no_node(self): cache = self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) with patch.object(cache, '_client'): # disable any remote operation eq_(cache.get_children(self.path + '/non_exists'), None) def test_session_reconnected(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) self.client.create(self.path + '/foo') event = self.wait_cache(TreeEvent.NODE_ADDED) eq_(event.event_data.path, self.path + '/foo') with self.spy_client('get_async') as get_data: with self.spy_client('get_children_async') as get_children: # session suspended self.lose_connection(self.client.handler.event_object) self.wait_cache(TreeEvent.CONNECTION_SUSPENDED) # There are a serial refreshing operation here. But NODE_ADDED # events will not be raised because the zxid of nodes are the # same during reconnecting. # connection restore self.wait_cache(TreeEvent.CONNECTION_RECONNECTED) # wait for outstanding operations while self.cache._outstanding_ops > 0: self.client.handler.sleep_func(0.1) # inspect in-memory nodes _node_root = self.cache._root _node_foo = self.cache._root._children['foo'] # make sure that all nodes are refreshed get_data.assert_has_calls([ call(self.path, watch=_node_root._process_watch), call(self.path + '/foo', watch=_node_foo._process_watch), ], any_order=True) get_children.assert_has_calls([ call(self.path, watch=_node_root._process_watch), call(self.path + '/foo', watch=_node_foo._process_watch), ], any_order=True) def test_root_recreated(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) # remove root node self.client.delete(self.path) event = self.wait_cache(TreeEvent.NODE_REMOVED) eq_(event.event_type, TreeEvent.NODE_REMOVED) eq_(event.event_data.data, b'') eq_(event.event_data.path, self.path) eq_(event.event_data.stat.version, 0) # re-create root node self.client.ensure_path(self.path) event = self.wait_cache(TreeEvent.NODE_ADDED) eq_(event.event_type, TreeEvent.NODE_ADDED) eq_(event.event_data.data, b'') eq_(event.event_data.path, self.path) eq_(event.event_data.stat.version, 0) self.assertTrue( self.cache._outstanding_ops >= 0, 'unexpected outstanding ops %r' % self.cache._outstanding_ops) def test_exception_handler(self): error_value = FakeException() error_handler = Mock() with patch.object(TreeNode, 'on_deleted') as on_deleted: on_deleted.side_effect = [error_value] self.make_cache() self.cache.listen_fault(error_handler) self.cache.close() error_handler.assert_called_once_with(error_value) def test_exception_suppressed(self): self.make_cache() self.wait_cache(since=TreeEvent.INITIALIZED) # stoke up ConnectionClosedError self.client.stop() self.client.close() self.client.handler.start() # keep the async completion self.wait_cache(since=TreeEvent.CONNECTION_LOST) with patch.object(TreeNode, 'on_created') as on_created: self.cache._root._call_client('exists', '/') self.cache._root._call_client('get', '/') self.cache._root._call_client('get_children', '/') self.wait_cache(since=TreeEvent.INITIALIZED) on_created.assert_not_called() eq_(self.cache._outstanding_ops, 0) class FakeException(Exception): pass ```
{ "source": "0xbc/chiasm-shell", "score": 2 }
#### File: chiasm-shell/chiasm_shell/assembler.py ```python from __future__ import absolute_import import logging import re from chiasm_shell.backend import Backend l = logging.getLogger('chiasm_shell.assembler') try: import keystone as ks except ImportError as e: l.error("*** KEYSTONE IMPORT FAILURE ***") l.error("If you thought you'd already installed keystone-engine,") l.error("please ensure that you've got CMake and any other") l.error("Keystone dependencies installed on your system and") l.error("then try and build it/pip install it again.") l.error("Consult http://www.keystone-engine.org/docs/ for specifics.") raise e class Assembler(Backend): """ Assembler - uses keystone to print opcodes from assembly input """ def __init__(self): """ Create a new Assembler instance. """ self._ks = None self._last_encoding = None self._arch = None self.mode = None self.modes = None self.valid_archs = None Backend.__init__(self) def _init_backend(self): """ _init_backend is responsible for setting the prompt, custom init stuff. """ self.prompt = 'asm> ' self._build_dicts() self._arch = ('x86', '32') self._set_arch(*self._arch) self._last_encoding = None def _build_dicts(self): """ Build dicts of valid arch and known mode values. """ regex_arch = re.compile(r'^KS_ARCH_\S+$') regex_mode = re.compile(r'^KS_MODE_\S+$') d = ks.__dict__ self.valid_archs = {a: d[a] for a in d.keys() if re.match(regex_arch, a) and ks.ks_arch_supported(d[a])} self.modes = {m: d[m] for m in d.keys() if re.match(regex_mode, m)} def clear_state(self): self._last_encoding = None def _set_arch(self, arch, *modes): """ Try and set the current architecture """ try: a = self.valid_archs[''.join(['KS_ARCH_', arch.upper()])] if a is None: l.error("Invalid architecture selected - run lsarch for valid options") return False ms = [self.modes[''.join(['KS_MODE_', m.upper()])] for m in modes] except KeyError: l.error("ERROR: Invalid architecture or mode string specified") return False try: _ks = ks.Ks(a, sum(ms)) self._arch = (arch, modes) l.debug("Architecture set to %s, mode(s): %s", arch, ', '.join(modes)) self._ks = _ks except ks.KsError as e: l.error("ERROR: %s", e) return False return True def get_arch(self): return "{}, mode(s): {}".format(self._arch[0], ', '.join(self._arch[1])) def default(self, line): """ Default behaviour - if no other commands are detected, try and assemble the current input according to the currently set architecture. :param line: Current line's text to try and assemble. """ try: encoding, dummy_insn_count = self._ks.asm(line) self._last_encoding = encoding l.info("".join('\\x{:02x}'.format(opcode) for opcode in encoding)) except ks.KsError as e: l.error("ERROR: %s", e) def do_lsarch(self, dummy_args): """ Lists the architectures available in the installed version of keystone. """ for a in self.valid_archs: l.info(a[8:].lower()) def do_setarch(self, args): """ Set the current architecture. :param args: Lowercase string representing the requested architecture. """ a = args.split() if len(a) < 2: l.error("Need to specify at least arch and one mode") return arch = a[0] modes = a[1:] if self._set_arch(arch, *modes) is True: l.info("Architecture set to %s, mode(s): %s", arch, ', '.join(modes)) def do_lsmodes(self, dummy_args): """ Lists the known modes across all architectures. Note that not all modes apply to all architectures. """ for a in sorted(self.modes): l.info(a[8:].lower()) def do_count(self, dummy_args): """ Prints the number of bytes emitted by the last successful encoding (or nothing if no successful encodings have occurred yet.) """ if self._last_encoding is not None: l.info(len(self._last_encoding)) ``` #### File: chiasm-shell/chiasm_shell/backend.py ```python from __future__ import absolute_import from cmd import Cmd import logging from chiasm_shell.config import get_backends l = logging.getLogger('chiasm_shell.backend') class Backend(Cmd): """ Backend - common functions shared by both assembler and disassembler. """ def __init__(self): """ Create a new Backend instance. """ Cmd.__init__(self) self._init_backend() self.launch_module = None def _init_backend(self): """ _init_backend is responsible for setting the prompt """ raise NotImplementedError("Backends need to implement _init_backend") def clear_state(self): """ Optional interface to reset internal backend state. """ pass def default(self, line): raise NotImplementedError("Backends need to implement default hanlders") def get_arch(self): """ Optional interface to display the current architecture. """ pass def do_quit(self, dummy_args): """ Quits chiasm shell - return to system prompt. """ raise SystemExit def do_exit(self, dummy_args): """ Quits chiasm shell - return to system prompt. """ raise SystemExit def cmdloop(self, intro=None): """ Overridden cmdloop to catch CTRL-Cs """ try: Cmd.cmdloop(self, intro) except KeyboardInterrupt: l.info("type \'quit\' or \'exit\' to exit") self.cmdloop() def do_switch(self, arg): """ Switch to another chiasm backend (type lsbackends to see what's available). """ if arg.strip() == '': l.error("usage: switch <backend>") return False backends = get_backends() if arg in backends: new_backend = backends[arg] new_backend.clear_state() self.launch_module = backends[arg] return True # True = quit this backend's loop else: l.error("backend %s not found", arg) self.launch_module = None def do_lsbackends(self, dummy_args): """ List the chiasm backends currently available. """ l.info(", ".join(get_backends().keys())) def postcmd(self, stop, line): """ Just overridden for debugging purposes. """ l.debug("i'm in postcmd, line is %s", line) return stop ``` #### File: chiasm-shell/chiasm_shell/disassembler.py ```python from __future__ import absolute_import import logging import re import binascii import capstone as cs from chiasm_shell.backend import Backend l = logging.getLogger('chiasm_shell.disassembler') class Disassembler(Backend): """ Disassembler - uses caspstone to print assembly from opcode input """ def __init__(self): """ Create a new Disassembler instance. """ self._last_decoding = None self._cs = None self._firstaddr = None self._arch = None self.valid_archs = None self.modes = None Backend.__init__(self) def _init_backend(self): """ _init_backend is responsible for setting the prompt, custom init stuff. """ self.prompt = 'disasm> ' self._build_dicts() self._arch = ('x86', '32') self._set_arch(*self._arch) self._last_decoding = None self._firstaddr = 0x1000 def _build_dicts(self): """ Build dicts of valid arch and known mode values. """ regex_arch = re.compile(r'^CS_ARCH_\S+$') regex_mode = re.compile(r'^CS_MODE_\S+$') d = cs.__dict__ self.valid_archs = {a: d[a] for a in d.keys() if re.match(regex_arch, a) and cs.cs_support(d[a])} self.modes = {m: d[m] for m in d.keys() if re.match(regex_mode, m)} def clear_state(self): self._last_decoding = None def _set_arch(self, arch, *modes): """ Try and set the current architecture """ try: a = self.valid_archs[''.join(['CS_ARCH_', arch.upper()])] if a is None: l.error("Invalid architecture selected - run lsarch for valid options") return False ms = [self.modes[''.join(['CS_MODE_', m.upper()])] for m in modes] except KeyError: l.error("ERROR: Invalid architecture or mode string specified") return False try: _cs = cs.Cs(a, sum(ms)) self._arch = (arch, modes) l.debug("Architecture set to %s, mode(s): %s", arch, ', '.join(modes)) self._cs = _cs except cs.CsError as e: l.error("ERROR: %s", e) return False return True def get_arch(self): return "{}, mode(s): {}".format(self._arch[0], ', '.join(self._arch[1])) def default(self, line): """ Default behaviour - if no other commands are detected, try and disassemble the current input according to the currently set architecture and modes.. :param line: Current line's text to try and disassemble. """ # quick, brittle hack to enforce backslash encoding for now regex = re.compile('^(\\\\x[a-fA-F0-9]{2})+$') if not regex.match(line.strip()): l.error("\\xXX\\xXX... is the only valid input format (XX = hex digits)") return try: self._last_decoding = [] stripped_line = re.sub(r'\\x([0-9a-fA-F]+)', r'\1', line) for (addr, size, mn, op_str) in \ self._cs.disasm_lite(binascii.a2b_hex(stripped_line), self._firstaddr): self._last_decoding.append((addr, size, mn, op_str)) disas_str = "0x{:x}:\t{}\t{}".format(addr, mn, op_str) l.info(disas_str) except cs.CsError as e: l.error("ERROR: %s", e) except ValueError: l.error("\\xXX\\xXX... is the only valid input format (XX = hex digits)") def do_lsarch(self, dummy_args): """ Lists the architectures available in the installed version of keystone. """ for a in self.valid_archs: l.info(a[8:].lower()) def do_setarch(self, args): """ Set the current architecture. :param args: Lowercase string representing the requested architecture. """ a = args.split() if len(a) < 2: l.error("Need to specify at least arch and one mode") return arch = a[0] modes = a[1:] if self._set_arch(arch, *modes) is True: l.info("Architecture set to %s, mode(s): %s", arch, ', '.join(modes)) def do_lsmodes(self, dummy_args): """ Lists the known modes across all architectures. Note that not all modes apply to all architectures. """ for a in sorted(self.modes): l.info(a[8:].lower()) def do_setfirstaddr(self, args): """ Sets the hex address of the first instruction in the buffer to be disassembled. """ a = args.split() if len(a) < 1: return try: addr = int(a[0], 16) self._firstaddr = addr except ValueError: l.error("Input not recognised as a valid hex value - start address not changed") ```
{ "source": "0xbe7a/PFERD", "score": 3 }
#### File: PFERD/PFERD/errors.py ```python import logging from typing import Any, Callable, TypeVar, cast from rich.console import Console from .logging import PrettyLogger LOGGER = logging.getLogger(__name__) PRETTY = PrettyLogger(LOGGER) class FatalException(Exception): """ A fatal exception occurred. Recovery is not possible. """ TFun = TypeVar('TFun', bound=Callable[..., Any]) def swallow_and_print_errors(function: TFun) -> TFun: """ Decorates a function, swallows all errors, logs them and returns none if one occurred. """ def inner(*args: Any, **kwargs: Any) -> Any: # pylint: disable=broad-except try: return function(*args, **kwargs) except FatalException as error: PRETTY.error(str(error)) return None except Exception as error: Console().print_exception() return None return cast(TFun, inner) def retry_on_io_exception(max_retries: int, message: str) -> Callable[[TFun], TFun]: """ Decorates a function and retries it on any exception until the max retries count is hit. """ def retry(function: TFun) -> TFun: def inner(*args: Any, **kwargs: Any) -> Any: for i in range(0, max_retries): # pylint: disable=broad-except try: return function(*args, **kwargs) except IOError as error: PRETTY.warning(f"Error duing operation '{message}': {error}") PRETTY.warning( f"Retrying operation '{message}'. Remaining retries: {max_retries - 1 - i}") return cast(TFun, inner) return retry ``` #### File: PFERD/PFERD/organizer.py ```python import filecmp import logging import os import shutil from enum import Enum from pathlib import Path, PurePath from typing import Callable, List, Optional, Set from .download_summary import DownloadSummary from .location import Location from .logging import PrettyLogger from .utils import prompt_yes_no LOGGER = logging.getLogger(__name__) PRETTY = PrettyLogger(LOGGER) class ConflictType(Enum): """ The type of the conflict. A file might not exist anymore and will be deleted or it might be overwritten with a newer version. FILE_OVERWRITTEN: An existing file will be updated MARKED_FILE_OVERWRITTEN: A file is written for the second+ time in this run FILE_DELETED: The file was deleted """ FILE_OVERWRITTEN = "overwritten" MARKED_FILE_OVERWRITTEN = "marked_file_overwritten" FILE_DELETED = "deleted" class FileConflictResolution(Enum): """ The reaction when confronted with a file conflict: DESTROY_EXISTING: Delete/overwrite the current file KEEP_EXISTING: Keep the current file DEFAULT: Do whatever the PFERD authors thought is sensible PROMPT: Interactively ask the user """ DESTROY_EXISTING = "destroy" KEEP_EXISTING = "keep" DEFAULT = "default" PROMPT = "prompt" FileConflictResolver = Callable[[PurePath, ConflictType], FileConflictResolution] def resolve_prompt_user(_path: PurePath, conflict: ConflictType) -> FileConflictResolution: """ Resolves conflicts by asking the user if a file was written twice or will be deleted. """ if conflict == ConflictType.FILE_OVERWRITTEN: return FileConflictResolution.DESTROY_EXISTING return FileConflictResolution.PROMPT class FileAcceptException(Exception): """An exception while accepting a file.""" class Organizer(Location): """A helper for managing downloaded files.""" def __init__(self, path: Path, conflict_resolver: FileConflictResolver = resolve_prompt_user): """Create a new organizer for a given path.""" super().__init__(path) self._known_files: Set[Path] = set() # Keep the root dir self._known_files.add(path.resolve()) self.download_summary = DownloadSummary() self.conflict_resolver = conflict_resolver def accept_file(self, src: Path, dst: PurePath) -> Optional[Path]: """ Move a file to this organizer and mark it. Returns the path the file was moved to, to allow the caller to adjust the metadata. As you might still need to adjust the metadata when the file was identical (e.g. update the timestamp), the path is also returned in this case. In all other cases (ignored, not overwritten, etc.) this method returns None. """ # Windows limits the path length to 260 for *some* historical reason # If you want longer paths, you will have to add the "\\?\" prefix in front of # your path... # See: # https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation if os.name == 'nt': src_absolute = Path("\\\\?\\" + str(src.resolve())) dst_absolute = Path("\\\\?\\" + str(self.resolve(dst))) else: src_absolute = src.resolve() dst_absolute = self.resolve(dst) if not src_absolute.exists(): raise FileAcceptException("Source file does not exist") if not src_absolute.is_file(): raise FileAcceptException("Source is a directory") LOGGER.debug("Copying %s to %s", src_absolute, dst_absolute) if self._is_marked(dst): PRETTY.warning(f"File {str(dst_absolute)!r} was already written!") conflict = ConflictType.MARKED_FILE_OVERWRITTEN if self._resolve_conflict("Overwrite file?", dst_absolute, conflict, default=False): PRETTY.ignored_file(dst_absolute, "file was written previously") return None # Destination file is directory if dst_absolute.exists() and dst_absolute.is_dir(): prompt = f"Overwrite folder {dst_absolute} with file?" conflict = ConflictType.FILE_OVERWRITTEN if self._resolve_conflict(prompt, dst_absolute, conflict, default=False): shutil.rmtree(dst_absolute) else: PRETTY.warning(f"Could not add file {str(dst_absolute)!r}") return None # Destination file exists if dst_absolute.exists() and dst_absolute.is_file(): if filecmp.cmp(str(src_absolute), str(dst_absolute), shallow=False): # Bail out, nothing more to do PRETTY.ignored_file(dst_absolute, "same file contents") self.mark(dst) return dst_absolute prompt = f"Overwrite file {dst_absolute}?" conflict = ConflictType.FILE_OVERWRITTEN if not self._resolve_conflict(prompt, dst_absolute, conflict, default=True): PRETTY.ignored_file(dst_absolute, "user conflict resolution") return None self.download_summary.add_modified_file(dst_absolute) PRETTY.modified_file(dst_absolute) else: self.download_summary.add_new_file(dst_absolute) PRETTY.new_file(dst_absolute) # Create parent dir if needed dst_parent_dir: Path = dst_absolute.parent dst_parent_dir.mkdir(exist_ok=True, parents=True) # Move file shutil.move(str(src_absolute), str(dst_absolute)) self.mark(dst) return dst_absolute def mark(self, path: PurePath) -> None: """Mark a file as used so it will not get cleaned up.""" absolute_path = self.resolve(path) self._known_files.add(absolute_path) LOGGER.debug("Tracked %s", absolute_path) def _is_marked(self, path: PurePath) -> bool: """ Checks whether a file is marked. """ absolute_path = self.resolve(path) return absolute_path in self._known_files def cleanup(self) -> None: """Remove all untracked files in the organizer's dir.""" LOGGER.debug("Deleting all untracked files...") self._cleanup(self.path) def _cleanup(self, start_dir: Path) -> None: if not start_dir.exists(): return paths: List[Path] = list(start_dir.iterdir()) # Recursively clean paths for path in paths: if path.is_dir(): self._cleanup(path) else: if path.resolve() not in self._known_files: self._delete_file_if_confirmed(path) # Delete dir if it was empty and untracked dir_empty = len(list(start_dir.iterdir())) == 0 if start_dir.resolve() not in self._known_files and dir_empty: start_dir.rmdir() def _delete_file_if_confirmed(self, path: Path) -> None: prompt = f"Do you want to delete {path}" if self._resolve_conflict(prompt, path, ConflictType.FILE_DELETED, default=False): self.download_summary.add_deleted_file(path) path.unlink() else: PRETTY.ignored_file(path, "user conflict resolution") def _resolve_conflict( self, prompt: str, path: Path, conflict: ConflictType, default: bool ) -> bool: if not self.conflict_resolver: return prompt_yes_no(prompt, default=default) result = self.conflict_resolver(path, conflict) if result == FileConflictResolution.DEFAULT: return default if result == FileConflictResolution.KEEP_EXISTING: return False if result == FileConflictResolution.DESTROY_EXISTING: return True return prompt_yes_no(prompt, default=default) ```
{ "source": "0xBEEFCAFE/yamaha_lipsync", "score": 2 }
#### File: rxv-master/rxv/__init__.py ```python from __future__ import division, absolute_import, print_function from .rxv import RXV from . import ssdp __all__ = ['RXV'] def find(): """Find all Yamah receivers on local network using SSDP search""" return [RXV(ctrl_url=ri.ctrl_url, model_name=ri.model_name) for ri in ssdp.discover()] ``` #### File: yamaha_lipsync/rxv-master/setup.py ```python from __future__ import absolute_import, division, print_function import sys from setuptools import setup, find_packages from setuptools.command.test import test as TestCommand class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import tox errno = tox.cmdline(self.test_args) sys.exit(errno) setup( name='rxv', version='0.1.2', description='Automation Library for Yamaha RX-V473, RX-V573, RX-V673, RX-V773 receivers', long_description=open('README.rst').read(), author='<NAME>', url="https://github.com/wuub/rxv", license='MIT', author_email='<EMAIL>', packages=find_packages(), install_requires=['requests'], tests_require=['tox'], zip_safe=False, cmdclass={'test': Tox}, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries", "Topic :: Home Automation", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: Implementation :: PyPy" ] ) ```
{ "source": "0xbenhur/vuln-flask-web-app", "score": 2 }
#### File: vulns/iframe_injection/iframe_injection.py ```python from flask import render_template, send_file def iframe_injection_page(request, app): iframe_url = request.args.get('page') return render_template("iframe_injection.html", iframe_url=iframe_url) ``` #### File: vulns/xssinjection/xss_stored.py ```python from flask import render_template def xss_stored_page(request, app): messages = app.db_helper.execute_read('SELECT * FROM messages', {}) messages = list(map(lambda it: it[0], messages)) return render_template('xss-stored.html', messages=messages) def xss_stored_api(request, app): message = request.form['message'] result = app.db_helper.execute_write('INSERT INTO messages (message) VALUES (:msg)', { 'msg': message }) return xss_stored_page(request, app) ```
{ "source": "0xBenjamin/proxytools", "score": 2 }
#### File: proxytools/proxytools/cleansocks.py ```python import argparse import concurrent.futures import os import re import sys import time def xrange(x,y): return iter(range(x,y)) sys.dont_write_bytecode = True def is_proxy(proxy): return re.match('^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$', proxy) def test_proxy(proxy): global g ip, port = proxy.split(':') try: sock = socks.socksocket() sock.set_proxy(socks.SOCKS5, ip, int(port)) sock.settimeout(args.timeout) sock.connect(('www.google.com', 80)) except: # print('BAD | ' + proxy) # continue f = 1 else: print('GOOD | ' + proxy) g.append(proxy) finally: sock.close() parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]') parser.add_argument('input', help='file to scan') parser.add_argument('output', help='file to output') parser.add_argument('-t', '--threads', help='number of threads (default: 100)', default=100, type=int) parser.add_argument('-x', '--timeout', help='socket timeout seconds (default: 15)', default=15, type=int) args = parser.parse_args() try: import socks except ImportError: raise SystemExit('missing pysocks module (https://pypi.python.org/pypi/pysocks)') if not os.path.isfile(args.input): raise SystemExit('no such input file') for x in range(1, 40000000, 20000): print('Iteration: {} until {}'.format(str(x), str(x+20000))) with open(args.input) as f: deduped = [] r = f.readlines()[x:x+20000] h = [] for u in r: u = u.rstrip()+':1080' deduped.append(u) g = [] with concurrent.futures.ThreadPoolExecutor(max_workers=args.threads) as executor: checks = {executor.submit(test_proxy, proxy): proxy for proxy in deduped} for future in concurrent.futures.as_completed(checks): checks[future] # checks(future) with open(args.output, 'a') as k: ws = str() if len(g) > 0: for i in g: ws = ws+i+'\n' k.write(ws) g = [] time.sleep(0.1) ```
{ "source": "0xberkay/bilgprogramla-sinavlar", "score": 2 }
#### File: 0xberkay/bilgprogramla-sinavlar/final.py ```python try: #renkler var mı diye bakıyorum from renkler import * #import olmazsa pip install renk except ImportError: from pip._internal import main as pip pip(['install', '--user', 'renk']) from renkler import * #kendi yaptığım renk kütüphanesini import ediyirom https://github.com/bksec/renkler try: #terminaltables var mı diye kontrol ediyorum from terminaltables import * #import olmazsa pip install terminaltables except ImportError: from pip._internal import main as pip pip(['install', '--user', 'terminaltables']) from terminaltables import * #Tablo kütüphanesini import ediyorum try: #terminaltables var mı diye kontrol ediyorum import matplotlib.pyplot as plt import matplotlib as mpl #import olmazsa pip install matplotlib except ImportError: #matplotlib varmı diye kontrol ediyorum from pip._internal import main as pip # 7.sorudaki kullandığım kütüphane pip(['install', '--user', 'matplotlib']) import matplotlib.pyplot as plt import matplotlib as mpl import os,time # os kütüphanesi ve time import ettim def sil(): # bazı yerlerde güzel görünsün diye üsteki yazılan değerleri silmek için modül yazdım sistem = os.name #işletim sisteminizi öğrendim if os.name == 'nt': #windows için sil = os.system('cls') #terminal temizleme komutu else: #linux için sil = os.system('clear') #terminal temizleme komutu sil # In[1]: ## PROBLEMLER ### ### Problem 1 Sözlukler (20 Puan) ### # Spesifikasyonuna göre aşağıdaki kayıt birleştir fonksiyonunu yazınız. def kayit_birlestir(d1, d2):#fonksiyon # Cevap birlesmis_sozluk = {} # boş bir sözlük oluşturuyorum for anahtar in d1: # d1 içinde her anahtar için if anahtar in d2: # eğer anahtar d2deyse yeni_deger = [d1[anahtar][0] + d2[anahtar][0] , d1[anahtar][1] + d2[anahtar][1]] # yeni değer = anahtar d1 ve d2 nin elamanlarının toplamı yapıyorum else:# eğer anahtar d2de değilse yeni_deger = d1[anahtar] # yeni değere d1in anahtarını ekliyorum birlesmis_sozluk[anahtar] = yeni_deger #birleşmiş sözlüğe yeni değeri ekliyoruz for anahtar in d2:# d1 içinde her anahtar için if anahtar not in birlesmis_sozluk: # eğer anahtar birleşmiş sözlükte değilse birlesmis_sozluk[anahtar] = d2[anahtar] #d2 yi birleşmiş sözlüğe eşitliyorum return birlesmis_sozluk # birleşmiş sözlüğü return ediyoruz def birSoru(): # SORUYU ÇALIŞTIRMAK İÇİN ==> birSoru() import vur # küçük bir vurma animasyonu ana dizin vur.py d1 = {"GS": [8,1], "FB": [6,3] } # D1 sözlüğümüz d2 = {"GS": [2,0], "BJK": [5,4] } # D2 sözlüğümüz print(f"{kirmizi} {d1} + {d2} = {kayit_birlestir(d1,d2)} {sifirla}") #sözlükleri fonksiyonda çağırar print ettiriyorum #ve renk kütüpyanesini kullanıyorum #birSoru() #çalıştırma komutu # In[2]: ### ### Problem 2 While Döngüleri (15 Puan) ### # Cevap # def gerisayim_n(dan_gerisay, kadar_gerisay): for i in range(dan_gerisay-(dan_gerisay%kadar_gerisay),-1,-kadar_gerisay): #dan gerisaydan başlayıp diziyi -kadar gerisay kadar artırıp dan gerisayın kadar ggerisayına göre modunu alıp döngü oluşturuyorum print(rastgeleRenkler(mavi,kirmizi,sari,mor,pembe,yesil,beyaz,gri,lacivert,turuncu),i,sifirla) # kendi renkler kütüphanemi kullanarak rastgele renkler oluşurup sayıları print ediyorum def ikiSoru(): #ikinciyi soruyu çalıştırmak için ikiSoru() print(f"{kirmizi}16 dan geriye doğru 5 er 5 er") gerisayim_n(16, 5) # fonksiyu değerlerle çağıryorum print(f"{kirmizi}21 dan geriye doğru 7 şer 7 şer") gerisayim_n(21, 7) # fonksiyu değerlerle çağıryorum #ikiSoru() kodu çalıştırmak için # In[3]: ### ### Problem 3 Test etme ve istisnalar/Exceptions (10 Puan) ### def buyuk_kucuk_harf(s): #fonksiyonumuz bos = "" #boş bir str atıyorum kosul = str(s) #koşulumuz str olması for i in kosul: # koşuldaki her i için if i == i.upper(): # eğer i büyük karakterse i = i.lower() # i yi büyük karakter yapıyoruz bos += i # boşada onu ekliyoruz elif i == i.lower(): # eğer i küçük karakterse i = i.upper() #i yi büyük karakter yapıyoruz bos += i # boşada onu ekliyoruz return bos # boşu return ediyoruz def soru3(): ilk_girdi = r"" #boş girdi girip nolduğuna bakıyoruz çünkü Bos liste her zaman önemlidir ilk = buyuk_kucuk_harf(ilk_girdi) ilk_nedeni = "Bos liste her zaman önemlidir" iki_girdi = r"RUBYONRAILS" #Büyük harflerde çalışıyor mu diye test ediyoruz iki = buyuk_kucuk_harf(iki_girdi) #bi sorun yok iki_nedeni = "Büyük harflerle çalışıyor mu kontrolü" uc_girdi = r"tatarböreği" #küçük harflerde çalışıyor mu diye test ediyoruz uc = buyuk_kucuk_harf(uc_girdi)#bi sorun yok uc_nedeni = "Küçük harflerle çalışıyor mu kontrolü" dort_girdi = r"01100001" # sayılarda çalışıyor mu diye test ediyoruz dort = buyuk_kucuk_harf(dort_girdi)#bi sorun yok dort_nedeni = "Sayılarla çalşıyor mu kontrolü" bes_girdi = r"¯\_(ツ)_/¯" # karakterlerde çalışıyor mu diye test ediyoruz bes = buyuk_kucuk_harf(bes_girdi)#bi sorun yok bes_nedeni = "Karakterlerle çalışıyor mu kontrolü" alti_girdi = r"\n\t" # kaçış karakterlerine bakıyoruz n ve t yi büyültecek mi diye alti = buyuk_kucuk_harf(alti_girdi)#bi sorun yok alti_nedeni = "Özel kaçışlarda çalışıyor mu" # tablo yapmak için kullandığım kütüphane #bir tablo oluşturuyorum kütüphanin fonksiyonuyla her eleman yeni hücre yapıyor # ilk sıra başlıklar oluyor table_data = [ ["sıra"," Girdi"," Çıktı"," Nedeni"], ["1.",ilk_girdi,ilk,ilk_nedeni], ["2.",iki_girdi,iki,iki_nedeni], ["3.",uc_girdi,uc,uc_nedeni], ["4.",dort_girdi,dort,dort_nedeni], ["5.",bes_girdi,bes,bes_nedeni], ["6.",alti_girdi,alti,alti_nedeni] ] table = DoubleTable(table_data) # tableye tablomuzu atıyorum print(table.table) #testetme() 3.soruyu çalıştırıyorum # In[4]: ### ### Problem 4 Nesneler ve Terminoloji (10 Puan) ### # Aşağıdaki A ve B sınıflarının tanımını kullanarak, devamındaki soruları bos # bırakılan yere satır numarasını yazarak cevaplandırın. Bir kaç doğru cevap # olabilir, SADECE 1 tanesini yazın. #1. class A(): #2. x = 1 #3. def __init__(self, n): #4. self.y = n #5. A.x += 1 #6. def p(self): #7. print(self.y) #8. self.y += 3 #9. self.r() #10. def r(self): #11. self.y += 2 #12. print(self.y) #13. class B(A): #14. x = 10 #15. def __init__(self, n): #16. super().__init__(n) #17. sum = self.y + B.x #18. self.m = sum #19. def r(self): #20. self.y += self.x #21. print(self.m) #22. a = A(1) #23. b = B(2) #24. a.p() #25. b.p() def soru4(): print( f"""Cevaplar {pmavi} -------- a. Satır 3 de/da bir nesne niteligi yaratılır b. Satır 14 de/da bir sınıf niteliği yaratılır c. Satır 1 de/da bir ustsınıf tanımı baslar d. Satır 6 de/da bir sınıf metodu baslar e. Satır 16 de/da bir metot tanımı ustu yazılmaya/override başlanır """) # soru4() # In[5]: ### ### Problem 5 Nesne yaratma ve for döngüleri (25 Puan) ### # Aşağıdaki Menu ve Kahvalti sınıf kodlarını göz önüne alarak. class Menu(): """Bir örnek nesne/instance menüde bir yemeği temsil eder.""" def __init__(self, isim, vejetaryen_mi, fiyat): self.isim = isim self.vejetaryen_mi = vejetaryen_mi assert fiyat > 0 self.fiyat = fiyat class Kahvalti(Menu): def __init__(self, isim, vejetaryen_mi, fiyat, kahvalti_fiyati): Menu.__init__(self, isim, vejetaryen_mi, fiyat) assert kahvalti_fiyati > 0 assert kahvalti_fiyati <= 10 self.kahvalti_fiyati = kahvalti_fiyati # a). # Cevap # animasyon oluşturmak için 2 tane ascii döneri yaptım .formatla renk ekledim a = ("""{} ▐██▌ ▐███ ▐███ ████, {} ╓████████████████████████████▄ ██████████████████████████████ ▐█████████████████████████████ └▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀████████████▌ ╓███████████` ████████████████████████████ ▐███████████████████████████ ]██████████████████████████▌ ██████████████████████████` █████████████ ▐████████████████████████▌ ▐████████████████████████▌ ████████████████████████` ████████████████████████ ▀█████████ ┌▄▄▄▄▄▄▄▄▄▄▄▄▄█████████▌ ██████████████████████` ██████████████████████ ▓█████████████████████ ▐██████████▀▀ ██████████▄ ████████████████████ ▐███████████████████ ╙██████████████████▌ ▀▀▀▀▀▀████▀▀▀▀▀▀ {} ▐███ ▐███ ▐███""".format(siyah,kahverengi,siyah)) a1 = ("""{} ▐██▌ ▐███ ▐███ ████, {} ╓████████████████████████████▄ ██████████████████████████████ ▐█████████████████████████████ ▀█████████████████████ ╓███████████████████ ████████████████████████████ ▐███████████████████████████ ]██████████████████████████▌ ██████████████████████████` █████████████ ▐████████████████████████▌ ▐████████████████████████▌ ████████████████████████` ████████████████████████ ▀█████████████████▌ ▄██████████████████ ██████████████████████` ██████████████████████ ▓█████████████████████ ███████████████ ███████████████ ████████████████████ ▐███████████████████ ╙██████████████████▌ ▀▀▀▀▀▀████▀▀▀▀▀▀ {} ▐███ ▐███ ▐███""".format(siyah,kahverengi,siyah)) def donerye():#animasyon bar = [ #bir bar listesi oluşturp 2 döneride ekledim a, a1 ] i = 0 # sayaç while i<20: # döngü yaptım sil() # yukardakı sil fonkumu çağırdımki önceki print gözükmesin print(bar[i % len(bar)], end="\r") #aynı satır başı şeklinde barı printliyorum time.sleep(0.3) #sleep fonksiyonu 0.3 delay için i += 1 #sayaç def besAsikki(): donerye() # döner pişti :) yemek1 = Menu("doner",False,24) #yemek menüsünü örneklendiriyorum ve onu yemek1 e atıyorum print(f"{kirmizi}Seçilen yemek:{sari} {yemek1.isim},{yesil} Müşteri Vejetaryan mı ?{kirmizi} {yemek1.vejetaryen_mi},{siyah} Ücret: {yemek1.fiyat}{sifirla}") #örneklendirilmiş yemek1 i print ediyorum # Ve renk kütüphanemi kullanıyorum #besAsikki() # b). # Cevap def besBsikki(): yemek2 = Kahvalti("Mantarlı Omlet",True,12,8) #kahvaltı menüsünü örneklendiriyorum print(f"{kirmizi}Seçilen yemek: {sari}{yemek2.isim},{yesil} Müşteri Vejetaryan mı ?{kirmizi} {yemek2.vejetaryen_mi},{siyah} Ücret normal: {yemek2.fiyat} {beyaz}Ücret Kahvaltı: {yemek2.kahvalti_fiyati}{sifirla}") #kendi renk kütüphanemle değerleri print ediyorum print(f"""{sari} .......__ ." ". : : : : `.._________..' : : : : : : `...' ,'"`. / \ : : ___ : : _-"_-" `.___,' _-_-" _-_-" _______________________ -"-"_ \ / \ / .--_\______________________/_--. ""--------------------------"" {sifirla} """ )#tava ve yumurta resmi print ediyorum # c). # Cevap # def besCsikki(): yemek2 = Kahvalti("Mantarlı Omlet",True,12,8)#kahvaltı menüsünü örneklendiriyorum yemek2.kahvalti_fiyati = 50 #kahvaltı_fiyatı 50 ye eşitliyorum ki assetesi aşabileyim print(f"{kirmizi}yemek2'nin yeni kahvaltı fiyatı : {yemek2.kahvalti_fiyati},{sifirla}") # ahvaltı_fiyatı print ediyorum print(f""" {sari} %@@. %@@. %@@, @@@, %@@@@@@@@@@ .@@@@@@@@ .* @@@@. %@@, @@@@@@@, @@@@@@@@ @@@@@@@@@. ( %@@. @@@ %@@, @@@ %@@, @@@. %@@. @@@ %@@, /@@@@ %@@, &@@@@@ %@@@@@@@@ {sifirla} """) #besCsikki() # d). def menu_denetle(ornek_menu): #metotot tanımladık for x in range(len(ornek_menu)): # ornek_menu deki elaman sayısı kadar print ettiriyoruz if type(ornek_menu[x]) is Kahvalti: # eğer örnek menü kahvaltı classındansa print(f"{mavi}örnek menünün{yesil} {x}.{mavi} elemanı kahvaltı menüsü") #örnek menün x elamanın kahvaltı olduğunu print ettiriyorum #x + 1 yazmamın sebebi 0 dan başlamasında 1 den başlasın print etmeye if ornek_menu[x].kahvalti_fiyati==11:# eğer örnek 1 in kahvaltı_fiyatı 11 e eşitse print(f"{mavi}örnek menünün {yesil}{x}.{mavi} elemanının kahvaltı fiyatı 11'e eşit")#örnek menün x elemanı 11 eşit olduğunu print ettiriyorum ornek_menu[x].kahvalti_fiyati = 9 # örnek menünün x elamının kahvalti fiyatını 9 yapıyorum print(f"{mavi}örnek menünün{yesil} {x}.{mavi} elemanının kahvaltı fiyatı 9 yapıldı ") # yapılan şeyi print ediyorum elif ornek_menu[x].kahvalti_fiyati>11: # eğer kahvaltı fiyatı 11 den büyükse print(f"{mavi}örnek menünün{yesil} {x}.{mavi} elamının kahvaltı fiyatı{yesil} {ornek_menu[x].kahvalti_fiyati}{mavi} ve 11 den büyük") # kahvaltı fiyatının 11den büyük olduğunu print ediyorum adi = "sunulmayan_yemek" # yeni ad beliledim vejataryan_kontrol=ornek_menu[x].vejetaryen_mi #vejataryanın olup olmadığını aynen alıyorum parasi = ornek_menu[x].kahvalti_fiyati #parisi değerini kahvalti_fiyarı değerine eşitliyorum sunulmayan = Menu(adi,vejataryan_kontrol,parasi) #örneklediriyorum fiyatı kahvaltı_fiyatı olarak belirledim değişmek için yukarıdaki yeri fiyat olarak almalıyız ornek_menu[x] = ornek_menu.append(sunulmayan) #Ve önceki örneklendirmeyle bunu değişiyorum istersek ek elaman diyede ekliyebilriz print(f"{mor}yeni bir yemek menüsü oluşturuldu") # ve olaylırın olduğunu print ediyorum else: # kahvaltı classından değilse print(f"{mavi}örnek menünün{yesil} {x}.{mavi} elemanı kahvaltı sınıfından değil{sifirla}") # kahvalti sınıfından olmadığını print ediyorum def besDsikki(): yemek3 = Kahvalti("<NAME>",True,14,7)#yemek3 diye yeni bir örneklendirme oluşturdum yemek3.kahvalti_fiyati = 11 #kahvaltı_fiyatı 11 ye eşitliyorum ki assetesi aşabileyim yemek4 = Kahvalti("pide",True,12,8) #yemek4 diye yeni bir örneklendirme oluşturdum yemek4.kahvalti_fiyati = 30 #kahvaltı_fiyatı 30 a eşitliyorum ki assetesi aşabileyim yemek5 = Menu("doner",False,24)#yemek5 diye yeni bir örneklendirme oluşturdum ornek_menu = [yemek3,yemek4,yemek5] #Listeye örneklendirmeleri ekliyorum menu_denetle(ornek_menu)#fonksiyona örnek mneüyü veriyoruz #besDsikki() #çalıştırmak için # In[6]: ### ### Problem 6 Stringler ve Sayaçlar (10 Puan) ### def altiSoru(): sil() #yukardaki sil fonksiyonu çağırdım liste = [] # boş bir liste tanımladım cift_sayısı = 0 # çift sayısını sıfır yapıyoruz # girdi olarak eleman sayısı tanımlıyoruz n = int(input(f"{kirmizi}Kaç tane sayıyı test etmek istiyorsunuz :{turuncu} ")) print(f"\n{pkirmizi}test etmek istedğiniz sayıları enterlayın") # n aralığa kadar yineliyorum for i in range(0, n): #input alıyorum her input rastgele renk oluyor eleman = int(input(f"{rastgeleRenkler(mavi,mor,yesil,sari,beyaz,turuncu,pembe,gri)}")) #kütüphanamdeki rastgele renk yazdırma fonksiyonu liste.append(eleman) #eleman ekliyor for num in liste: #listede her num öğesi için if num %2 == 0: # eğer 2 ile kalanı 0 ise cift_sayısı += 1 # çift sayı yerine bir ekliyorum print(f"{yesil}toplam çift sayınız:",pyesil,cift_sayısı,sifirla) # çift sayı sayısını yazdırıyorum #altiSoru() # In[7]: # Cevaplar # class cember: #çember sınıfı def __init__(self,k, x, y, r): # nesne niteliklerini giriyoruz self.r = r #bu şekilde sınıfımızın başka yerlerindede bunları kullanabilir hale getiriyoruz self.x = x self.y = y self.k = k print(f"çizgi kalınlığı: {k} X Konumu: {x} Y Konumu {y} YarıÇapı {r}") #nesne niteliklerini print ediyoruz fig = plt.figure() #pylotdaki figure fonksiyonu kullanıyoruz ax = fig.add_subplot(111, xlim=(-100, 100), ylim=(-100, 100))# gösterilicek alan yerini ayarlıyoruz c = ax.add_patch(plt.Circle((x, y), radius=r)) #Çemberin özelliklerini giriyoruz transform = mpl.transforms.Affine2D().translate(-5,-5) #TransformNode ağacına göre ölçüleri Affine2Dye ayarlıyoruz transform += mpl.transforms.Affine2D().translate(10,10) # affinde2d = https://au.mathworks.com/help/images/ref/affine2d.html c.set_transform(transform+ax.transData) # dönüşümü setliyoruz ax.set_aspect('equal') #y nin xe göre oranını setliyoruz plt.title(f"çizgi kalınlığı: {k} X Konumu: {x} Y Konumu {y} YarıÇapı {r}") # başlık giriyoruz plt.grid(True) # kareli olmayı açıyorum plt.show() # gösteriyorum def yaricap(self):# yarıçapı döndürmek için getter methodu return self.r # r yi return ediyoruz def yaricap(self, val): # setterla yarıçapı değişiyoruz self.r = val #r yi val yapıyoruz print(f"yeni yarıçapı: {val}") # yeni yarı çapı print ediyoruz fig = plt.figure() #pylotdaki figure fonksiyonu kullanıyoruz ax = fig.add_subplot(111, xlim=(-100, 100), ylim=(-100, 100)) # gösterilicek alan yerini ayarlıyoruz c = ax.add_patch(plt.Circle((10, 15), radius=val)) #Çemberin özelliklerini giriyoruz transform = mpl.transforms.Affine2D().translate(-5,-5) #TransformNode ağacına göre ölçüleri Affine2Dye ayarlıyoruz transform += mpl.transforms.Affine2D().translate(10,10)# affinde2d = https://au.mathworks.com/help/images/ref/affine2d.html c.set_transform(transform+ax.transData) # dönüşümü setliyoruz ax.set_aspect('equal')#y nin xe göre oranını setliyoruz plt.title(f"Yeni YarıÇapı {val}") # başlık giriyoruz plt.grid(True) # kareli olmayı açıyorum plt.show() # gösteriyorum def yediSoru():#yedinci soruyu çağırmak için t = cember(2,10, 15 ,50) #nesne niteliklerini ekliyruz t.yaricap(20) # yeni yarıçapı setterla ekliyoruz #yediSoru() def basla(): #küçük bir giriş animasyonu sil() isim=[ "::::::::: :::::::::: ::::::::: ::: ::: ::: ::: ::: ", ":+: :+: :+: :+: :+: :+: :+: :+: :+: :+: :+: ", "+:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ ", "+#++:++#+ +#++:++# +#++:++#: +#++:++ +#++:++#++: +#++: ", "+#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ ", "#+# #+# #+# #+# #+# #+# #+# #+# #+# #+# ", "######### ########## ### ### ### ### ### ### ### ", "::: ::: ::: ::: :::::::: ::: ::: ::: :::", ":+: :+: :+: :+: :+: :+: :+: :+: :+: :+: ", "+:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ +:+ ", "+#++:++ +#+ +:+ +#+ +#+ +:+ +#++:++ ", "+#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ +#+ ", "#+# #+# #+# #+# #+# #+# #+# #+# #+# #+# ", "### ### ######## ######## ######## ### ###", ] don = 0 while don < 6: # kaç kere dönceğini ayarlıyoruz sil() print(" ") time.sleep(0.1) a = rastgeleRenkler(mavi,turuncu,sari,beyaz,yesil,mor,kirmizi,yesil,pembe) # renk kütüphanem for i in isim: #isim listesini döndürüyor print(a,i) time.sleep(0.09) don += 1 #sayaç print(sifirla) time.sleep(0.03) def konsol(): #konsol fonksiyonumuz küçük bir cui # kullanıcıdan değer alıp üsteki soruları çalıştırıyorum a = """ {} +==========================================================[-][o][x] | | | 1.Soru için == 1 2.soru için == 2 | | | | 3.soru için == 3 4.soru için == 4 | | | | 5.soru için | | | | a şıkkı == 5a b şıkkı == 5b | | | | c şıkkı == 5c d şıkkı == 5d | | | | 6.soru için == 6 7.soru için == 7 | | | | vidyoyu açmak için == v | | _____________________| |____________________________ | | ,--. ,--. ,--. ,--. | | |oo | _ \ `. | oo | | oo| | | o o|~~ |(_) / ; | ~~ | | ~~|o o o o o o | | |/\/\| '._,' |/\/\| |/\/\| | | _____________________ _____________________________ | | | | | | Menüyü tekrar görmek için = m Çıkmak için q | +==================================================================+{} """.format(mor,sifirla) print(a) dongu = True while dongu: cevap = str(input(f"{mavi}Hangi soruyu çalıştırmak istiyorsunuz ? ")) if cevap == "1": sil() print("") print(f"{kirmizi}1.soru birazdan başlatılacak") time.sleep(1) birSoru() elif cevap == "2": sil() print("") print(f"{kirmizi}2.soru birazdan başlatılacak") time.sleep(1) sil() ikiSoru() elif cevap == "3": sil() print("") print(f"{kirmizi}3.soru birazdan başlatılacak") time.sleep(1) sil() soru3() elif cevap == "4": sil() print("") print(f"{kirmizi}4.soru birazdan başlatılacak") time.sleep(1) sil() soru4() elif cevap == "5a": sil() print("") print(f"{kirmizi}5.soru a şıkkı birazdan başlatılacak") time.sleep(1) sil() besAsikki() elif cevap == "5b": sil() print("") print(f"{kirmizi}5.soru b şıkkı birazdan başlatılacak") time.sleep(1) sil() besBsikki() elif cevap == "5c": sil() print("") print(f"{kirmizi}5.soru c şıkkı birazdan başlatılacak") time.sleep(1) sil() besCsikki() elif cevap == "5d": sil() print("") print(f"{kirmizi}5.soru d şıkkı birazdan başlatılacak") time.sleep(1) sil() besDsikki() elif cevap == "6": sil() print("") print(f"{kirmizi}6.soru birazdan başlatılacak") time.sleep(1) sil() altiSoru() elif cevap == "7": sil() print("") print(f"{kirmizi}7.soru birazdan başlatılacak") time.sleep(1) sil() yediSoru() elif cevap == ("m"): sil() print(a) time.sleep(1) elif cevap == ("v"): #hocam console olarak cmd kullanıyorsanız çalışcaktır sadece os.system("start https://youtu.be/R0tqoGqHHo8") elif cevap == "q": dongu = False sil() print(f"""{siyah} ooOOOO {pmavi} tren kaltı yine bekleriz {siyah} oo _____ _I__n_n__||_|| ________ >(_________|_7_|-|______| /o ()() ()() o oo oo """) else: print(kahverengi) print(""" __________________ __________________ .-/| \ / |\-. |||| | |||| |||| Aradığınız | ~~*~~ |||| |||| Soruyu | |||| |||| Bulamadık | |||| |||| | |||| |||| | --==*==-- |||| |||| | |||| |||| ~~*~~ | Belki başka |||| |||| | sayfalarda |||| |||| | Aradğınız cevabı |||| |||| | Bulursun |||| ||||__________________ | __________________|||| ||/===================\|/===================\|| `--------------------~___~-------------------'' """) print(sifirla) print(sifirla) basla() konsol() ```
{ "source": "0xbhoori/ethtx", "score": 2 }
#### File: ethtx/ethtx/exceptions.py ```python __all__ = [ "NodeConnectionException", "ProcessingException", "InvalidTransactionHash", "InvalidEtherscanReturnCodeException", "FourByteConnectionException", "FourByteContentException", "FourByteException", ] import json from typing import Dict class NodeConnectionException(Exception): """Node Connection Exception.""" def __init__(self): super().__init__("Couldn't connect to node(s)") class ProcessingException(Exception): """Processing Exception.""" def __init__(self, msg): super().__init__("Exception processing: " + msg) class InvalidTransactionHash(Exception): """Invalid Transaction Hash.""" def __init__(self, tx_hash): super().__init__("Invalid transaction hash provided: " + tx_hash) class InvalidEtherscanReturnCodeException(Exception): def __init__(self, returned_code: int, params: Dict = None): params_msg = " with params: " + json.dumps(params) if params else "" msg = f"Invalid status code for etherscan request: {returned_code} {params_msg}" super().__init__(msg) class FourByteException(Exception): """4byte base exception class.""" class FourByteConnectionException(FourByteException): """4byte directory connection error.""" def __init__(self, msg: str): super().__init__(f"Couldn't connect to 4byte.directory: {msg}") class FourByteContentException(FourByteException): """4byte content exception. Missing output.""" def __init__(self, status_code: int, content: bytes): super().__init__( f"Wrong response from 4byte.directory. Status code:{status_code}, content: {content}" ) ``` #### File: providers/semantic_providers/database.py ```python import logging from typing import Dict, Optional import bson from pymongo.cursor import Cursor from pymongo.database import Database as MongoDatabase from .base import ISemanticsDatabase from .const import MongoCollections from ...utils.cache_tools import cache log = logging.getLogger(__name__) class MongoSemanticsDatabase(ISemanticsDatabase): _db: MongoDatabase def __init__(self, db: MongoDatabase): self._db = db self._addresses = None self._contracts = None self._signatures = None self._init_collections() def get_collection_count(self) -> int: return len(self._db.list_collection_names()) @cache def get_address_semantics(self, chain_id, address) -> Optional[Dict]: _id = f"{chain_id}-{address}" return self._addresses.find_one({"_id": _id}, {"_id": 0}) @cache def get_signature_semantics(self, signature_hash: str) -> Cursor: return self._signatures.find({"signature_hash": signature_hash}) def insert_signature( self, signature: dict, update_if_exist=False ) -> Optional[bson.ObjectId]: if update_if_exist: updated_signature = self._signatures.replace_one( {"_id": signature["_id"]}, signature, upsert=True ) return ( None if updated_signature.modified_count else updated_signature.upserted_id ) inserted_signature = self._signatures.insert_one(signature) return inserted_signature.inserted_id @cache def get_contract_semantics(self, code_hash): """Contract hashes are always the same, no mather what chain we use, so there is no need to use chain_id""" return self._contracts.find_one({"_id": code_hash}, {"_id": 0}) def insert_contract( self, contract, update_if_exist=False ) -> Optional[bson.ObjectId]: contract_with_id = {"_id": contract["code_hash"], **contract} if update_if_exist: updated_contract = self._contracts.replace_one( {"_id": contract_with_id["_id"]}, contract_with_id, upsert=True ) return ( None if updated_contract.modified_count else updated_contract.upserted_id ) inserted_contract = self._contracts.insert_one(contract_with_id) return inserted_contract.inserted_id def insert_address(self, address, update_if_exist=False) -> Optional[bson.ObjectId]: address_with_id = { "_id": f"{address['chain_id']}-{address['address']}", **address, } if update_if_exist: updated_address = self._addresses.replace_one( {"_id": address_with_id["_id"]}, address_with_id, upsert=True ) return ( None if updated_address.modified_count else updated_address.upserted_id ) inserted_address = self._addresses.insert_one(address_with_id) return inserted_address.inserted_id def _init_collections(self) -> None: for mongo_collection in MongoCollections: self.__setattr__(f"_{mongo_collection}", self._db[mongo_collection]) ``` #### File: ethtx/utils/cache_tools.py ```python import os from functools import WRAPPER_ASSIGNMENTS, wraps, lru_cache CACHE_SIZE = int(os.environ.get("CACHE_SIZE", 256)) def cache(func, cache_size: int = CACHE_SIZE): @lru_cache(maxsize=cache_size) def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper def ignore_unhashable(func): uncached = func.__wrapped__ attributes = WRAPPER_ASSIGNMENTS + ("cache_info", "cache_clear") wraps(func, assigned=attributes) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except TypeError as error: if "unhashable type" in str(error): return uncached(*args, **kwargs) raise wrapper.__uncached__ = uncached return wrapper ``` #### File: tests/models/semantics_model_test.py ```python from ethtx.models.semantics_model import ( TransformationSemantics, ParameterSemantics, EventSemantics, FunctionSemantics, SignatureArg, Signature, ERC20Semantics, ContractSemantics, AddressSemantics, ) from tests.models.mock import SemanticModelMock class TestSemanticsModels: def test_transformation_semantics(self): ts = TransformationSemantics() assert ts.transformed_name is None assert ts.transformed_type is None assert ts.transformation == "" def test_parameter_semantics(self): ps = ParameterSemantics(parameter_name="name", parameter_type="type") assert ps.parameter_name == "name" assert ps.parameter_type == "type" assert ps.components == [] assert not ps.indexed assert not ps.dynamic def test_event_semantics(self): es = EventSemantics( signature="0x", anonymous=True, name="name", parameters=[SemanticModelMock.PARAMETER_SEMANTICS], ) assert es.signature == "0x" assert es.anonymous assert es.name == "name" assert es.parameters == [SemanticModelMock.PARAMETER_SEMANTICS] def test_function_semantics(self): fs = FunctionSemantics( signature="0x", name="name", inputs=[SemanticModelMock.PARAMETER_SEMANTICS], outputs=[SemanticModelMock.PARAMETER_SEMANTICS], ) assert fs.signature == "0x" assert fs.name == "name" assert fs.inputs == [SemanticModelMock.PARAMETER_SEMANTICS] assert fs.outputs == [SemanticModelMock.PARAMETER_SEMANTICS] def test_signature_args(self): sa = SignatureArg(name="name", type="type") assert sa.name == "name" assert sa.type == "type" def test_signature(self): sa = SignatureArg(name="name", type="type") s = Signature(signature_hash="0x", name="name", args=[sa]) assert s.signature_hash == "0x" assert s.name == "name" assert s.args == [sa] assert s.count == 1 assert not s.tuple assert not s.guessed def test_erc20_semantics(self): es = ERC20Semantics(name="name", symbol="symbol", decimals=18) assert es.name == "name" assert es.symbol == "symbol" assert es.decimals == 18 def test_contract_semantics(self): cs = ContractSemantics(code_hash="0x", name="name") assert cs.code_hash == "0x" assert cs.name == "name" assert cs.events == {} assert cs.functions == {} assert cs.transformations == {} def test_address_semantics(self): ads = AddressSemantics( chain_id="mainnet", address="0x", name="name", is_contract=True, contract=SemanticModelMock.CONTRACT_SEMANTICS, ) assert ads.chain_id == "mainnet" assert ads.address == "0x" assert ads.name == "name" assert ads.is_contract assert ads.contract == SemanticModelMock.CONTRACT_SEMANTICS assert ads.standard is None assert ads.erc20 is None ```
{ "source": "0xboz/get_binance_tickers", "score": 3 }
#### File: 0xboz/get_binance_tickers/get_binance_tickers.py ```python import bs4 as bs import requests import pickle def save_ticker_pairs(): cmc_binance_url = 'https://coinmarketcap.com/exchanges/binance/' response = requests.get(cmc_binance_url) if response.ok: soup = bs.BeautifulSoup(response.text, 'html.parser') table = soup.find('table', {'id': 'exchange-markets'}) ticker_pairs = [] for row in table.findAll('tr')[1:]: ticker_pair = row.findAll('td')[2].text ticker_pairs.append(ticker_pair.strip().replace('/', '')) with open('binance_ticker_pairs.pickle', 'wb') as f: pickle.dump(ticker_pairs, f) if __name__ == '__main__': save_ticker_pairs() ```
{ "source": "0xboz/selenium_chrome_proxy_authorization", "score": 2 }
#### File: 0xboz/selenium_chrome_proxy_authorization/main.py ```python from settings import USERNAME, PASSWORD, CHROME_WEBDRIVER from pyvirtualdisplay import Display from selenium import webdriver from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.by import By import logging def create_proxyauth_extension(proxy_host, proxy_port, proxy_username, proxy_password, scheme='http', plugin_path=None): """ Proxy Auth Extension args: proxy_host (str): domain or ip address, ie proxy.domain.com proxy_port (int): port proxy_username (str): auth username proxy_password (str): auth password kwargs: scheme (str): proxy scheme, default http plugin_path (str): absolute path of the extension return str -> plugin_path """ import string import zipfile if plugin_path is None: plugin_path = '/tmp/chrome_proxyauth_plugin.zip' manifest_json = """ { "version": "1.0.0", "manifest_version": 2, "name": "Chrome Proxy", "permissions": [ "proxy", "tabs", "unlimitedStorage", "storage", "<all_urls>", "webRequest", "webRequestBlocking" ], "background": { "scripts": ["background.js"] }, "minimum_chrome_version":"22.0.0" } """ background_js = string.Template( """ var config = { mode: "fixed_servers", rules: { singleProxy: { scheme: "${scheme}", host: "${host}", port: parseInt(${port}) }, bypassList: ["foobar.com"] } }; chrome.proxy.settings.set({value: config, scope: "regular"}, function() {}); function callbackFn(details) { return { authCredentials: { username: "${username}", password: <PASSWORD>}" } }; } chrome.webRequest.onAuthRequired.addListener( callbackFn, {urls: ["<all_urls>"]}, ['blocking'] ); """ ).substitute( host=proxy_host, port=proxy_port, username=proxy_username, password=<PASSWORD>, scheme=scheme, ) with zipfile.ZipFile(plugin_path, 'w') as zp: zp.writestr("manifest.json", manifest_json) zp.writestr("background.js", background_js) return plugin_path if __name__ == "__main__": display = Display(visible=0, size=(1920, 1080)) display.start() proxyauth_plugin_path = create_proxyauth_extension( proxy_host="us3651.nordvpn.com", proxy_port=80, proxy_username=USERNAME, proxy_password=PASSWORD ) chrome_options = Options() chrome_options.add_argument("--start-maximized") chrome_options.add_extension(proxyauth_plugin_path) driver = webdriver.Chrome(options=chrome_options, executable_path=CHROME_WEBDRIVER) try: driver.get("http://ifconfig.me/ip") ip_address = driver.find_element(By.XPATH, '//html/body/pre').text print(ip_address) except NoSuchElementException as e: logging.error(e.__repr__()) if display: display.stop() if driver: driver.quit() ```
{ "source": "0xboz/stationarity_on_crypto_trading_data", "score": 3 }
#### File: 0xboz/stationarity_on_crypto_trading_data/catalyst_test.py ```python from catalyst.api import symbol, record from catalyst import run_algorithm import numpy as np import pandas as pd import stationarity_test def initialize(context): context.asset = symbol('btc_usdt') def handle_data(context, data): # The last known prices of current date and the day before yesterday_price, current_price = data.history( context.asset, 'price', 2, '1T') # Calculate return simple_return = current_price / yesterday_price # Calculate log return log_return = np.log(current_price) - np.log(yesterday_price) record(price=current_price, simple_return=simple_return, log_return=log_return) def analyze(context, perf): sTest = stationarity_test.StationarityTests() # print(perf[['price', 'simple_return', 'log_return']].head()) print('# Price Stationarity Testing') sTest.ADF_Test(perf.price) sTest.PP_Test(perf.price) sTest.KPSS_Test(perf.price) print('# Simple Return Stationarity Testing') sTest.ADF_Test(perf.simple_return) sTest.PP_Test(perf.simple_return) sTest.KPSS_Test(perf.simple_return) print('# Log Return Stationarity Testing') sTest.ADF_Test(perf.log_return) sTest.PP_Test(perf.log_return) sTest.KPSS_Test(perf.log_return) if __name__ == '__main__': run_algorithm(capital_base=1000, data_frequency='minute', initialize=initialize, handle_data=handle_data, analyze=analyze, exchange_name='poloniex', quote_currency='usdt', start=pd.to_datetime('2018-9-1', utc=True), end=pd.to_datetime('2018-9-3', utc=True)) ```
{ "source": "0xbs0d/figures", "score": 3 }
#### File: figures/patches/plugin.py ```python from glob import glob import os from .__about__ import __version__ HERE = os.path.abspath(os.path.dirname(__file__)) def patches(): all_patches = {} for path in glob(os.path.join(HERE, "patches", "*")): with open(path) as patch_file: name = os.path.basename(path) content = patch_file.read() all_patches[name] = content return all_patches ```
{ "source": "0xC000005/MineTube", "score": 2 }
#### File: 0xC000005/MineTube/Configuration.py ```python import yaml import sys import codecs input_path = None output_path = None ending = None starting = None remove = None # ---------------------------通用模塊------------------------------ # [通用] - 顯示數據,添加管道下支持 def write_msg(text): sys.stderr.write(text + '\n') def init(name='Configuration.yml'): write_msg("NOTICE: Initialize Configuration") global input_path, output_path, ending, starting, remove try: with codecs.open(name, 'r', errors='ignore') as doc: config = yaml.safe_load(doc) input_path = config['input']['path'] output_path = config['output']['path'] ending = config['input']['sentence']['ending']['re'] starting = config['input']['sentence']['starting']['re'] remove = config['input']['sentence']['remove']['re'] except IOError: write_msg("ERROR: Configuration file not found.") exit() if __name__ == '__main__': init() ```
{ "source": "0xc0170/pyOCD", "score": 2 }
#### File: pyOCD/core/coresight_target.py ```python from .target import Target from ..coresight import (dap, ap, cortex_m) from ..debug.svd import (SVDFile, SVDLoader) import threading import logging from xml.etree.ElementTree import (Element, SubElement, tostring) ## # @brief Debug target that uses CoreSight classes. class CoreSightTarget(Target): def __init__(self, link, memoryMap=None): super(CoreSightTarget, self).__init__(link, memoryMap) self.part_number = self.__class__.__name__ self.cores = {} self.aps = {} self.dp = dap.DebugPort(link) self._selected_core = 0 self._svd_load_thread = None @property def selected_core(self): return self.cores[self._selected_core] def select_core(self, num): if not self.cores.has_key(num): raise ValueError("invalid core number") logging.debug("selected core #%d" % num) self._selected_core = num @property ## @brief Waits for SVD file to complete loading before returning. def svd_device(self): if not self._svd_device and self._svd_load_thread: logging.debug("Waiting for SVD load to complete") self._svd_device = self._svd_load_thread.device return self._svd_device def loadSVD(self): def svdLoadCompleted(svdDevice): logging.debug("Completed loading SVD") self._svd_device = svdDevice self._svd_load_thread = None if not self._svd_device and self._svd_location: logging.debug("Started loading SVD") # Spawn thread to load SVD in background. self._svd_load_thread = SVDLoader(self._svd_location, svdLoadCompleted) self._svd_load_thread.load() def init(self, bus_accessible=True): # Start loading the SVD file self.loadSVD() # Create the DP and turn on debug. self.dp.init() self.dp.power_up_debug() # Create an AHB-AP for the CPU. self.aps[0] = ap.AHB_AP(self.dp, 0) self.aps[0].init(bus_accessible) # Create CortexM core. self.cores[0] = cortex_m.CortexM(self.link, self.dp, self.aps[0], self.memory_map) if bus_accessible: self.cores[0].init() def disconnect(self): for core in self.cores.values(): core.disconnect() self.dp.power_down_debug() def readIDCode(self): return self.dp.dpidr def flush(self): self.dp.flush() def halt(self): return self.selected_core.halt() def step(self, disable_interrupts=True): return self.selected_core.step(disable_interrupts) def resume(self): return self.selected_core.resume() def writeMemory(self, addr, value, transfer_size=32): return self.selected_core.writeMemory(addr, value, transfer_size) def readMemory(self, addr, transfer_size=32, now=True): return self.selected_core.readMemory(addr, transfer_size, now) def writeBlockMemoryUnaligned8(self, addr, value): return self.selected_core.writeBlockMemoryUnaligned8(addr, value) def writeBlockMemoryAligned32(self, addr, data): return self.selected_core.writeBlockMemoryAligned32(addr, data) def readBlockMemoryUnaligned8(self, addr, size): return self.selected_core.readBlockMemoryUnaligned8(addr, size) def readBlockMemoryAligned32(self, addr, size): return self.selected_core.readBlockMemoryAligned32(addr, size) def readCoreRegister(self, id): return self.selected_core.readCoreRegister(id) def writeCoreRegister(self, id, data): return self.selected_core.writeCoreRegister(id, data) def readCoreRegisterRaw(self, reg): return self.selected_core.readCoreRegisterRaw(reg) def readCoreRegistersRaw(self, reg_list): return self.selected_core.readCoreRegistersRaw(reg_list) def writeCoreRegisterRaw(self, reg, data): self.selected_core.writeCoreRegisterRaw(reg, data) def writeCoreRegistersRaw(self, reg_list, data_list): self.selected_core.writeCoreRegistersRaw(reg_list, data_list) def findBreakpoint(self, addr): return self.selected_core.findBreakpoint(addr) def setBreakpoint(self, addr, type=Target.BREAKPOINT_AUTO): return self.selected_core.setBreakpoint(addr, type) def getBreakpointType(self, addr): return self.selected_core.getBreakpointType(addr) def removeBreakpoint(self, addr): return self.selected_core.removeBreakpoint(addr) def setWatchpoint(self, addr, size, type): return self.selected_core.setWatchpoint(addr, size, type) def removeWatchpoint(self, addr, size, type): return self.selected_core.removeWatchpoint(addr, size, type) def reset(self, software_reset=None): return self.selected_core.reset(software_reset=software_reset) def resetStopOnReset(self, software_reset=None): return self.selected_core.resetStopOnReset(software_reset) def setTargetState(self, state): return self.selected_core.setTargetState(state) def getState(self): return self.selected_core.getState() def getMemoryMap(self): return self.memory_map def setVectorCatch(self, enableMask): return self.selected_core.setVectorCatch(enableMask) def getVectorCatch(self): return self.selected_core.getVectorCatch() # GDB functions def getTargetXML(self): return self.selected_core.getTargetXML() def getRegisterContext(self): return self.selected_core.getRegisterContext() def setRegisterContext(self, data): return self.selected_core.setRegisterContext(data) def setRegister(self, reg, data): return self.selected_core.setRegister(reg, data) def getTResponse(self, forceSignal=None): return self.selected_core.getTResponse(forceSignal) def getSignalValue(self): return self.selected_core.getSignalValue() def getThreadsXML(self): root = Element('threads') t = SubElement(root, 'thread', id="1", core="0") t.text = "Thread mode" return '<?xml version="1.0"?><!DOCTYPE feature SYSTEM "threads.dtd">' + tostring(root) ```
{ "source": "0xc0decafe/dizzy", "score": 2 }
#### File: dizzy/functions/encryption.py ```python from dizzy.config import CONFIG if CONFIG["DEPS"]["Crypto"]: from Crypto.Cipher import AES from . import BOTH def aes_encrypt(start, stop, key, mode=AES.MODE_CBC, mode_param=None, when=BOTH): def func(dizzy_iterator): enc = AES.new(key, mode, mode_param) dizzy_iterator[start:stop] = enc.encrypt(dizzy_iterator[start:stop].byte) return (func, when) def aes_decrypt(start, stop, key, mode=AES.MODE_CBC, mode_param=None, when=BOTH): def func(dizzy_iterator): enc = AES.new(key, mode, mode_param) dizzy_iterator[start:stop] = enc.decrypt(dizzy_iterator[start:stop].byte) return (func, when) else: def aes_encrypt(start, stop, key, mode=AES.MODE_CBC, mode_param=None, when=BOTH): raise DizzyParseException("python Crypto module not available.") def aes_decrypt(start, stop, key, mode=AES.MODE_CBC, mode_param=None, when=BOTH): raise DizzyParseException("python Crypto module not available.") ``` #### File: dizzy/objects/regex.py ```python from dizzy.value import Value from dizzy.config import CONFIG from dizzy import DizzyParseException if CONFIG["DEPS"]["exrex"]: from exrex import generate, count class Regex: def __init__(self, name, regex, limit=20): if isinstance(name, str) and name: self.name = name else: raise DizzyParseException("Name must be str and not empty.") if isinstance(regex, str): self.regex = regex else: raise DizzyParseException("regex must be str.") if isinstance(limit, int): self.limit = limit else: raise DizzyParseException("limit must be int.") self.len = count(self.regex, self.limit) def __iter__(self): for string in generate(self.regex, self.limit): value = bytes(string, encoding=CONFIG["GLOBALS"]["CODEC"]) yield Value(value, len(value) * 8) def length(self): return self.len else: class Regex: def __init__(self, _1, _2, _3=None): raise DizzyParseException("python egrex module not available.") ``` #### File: dizzy/session/http.py ```python from . import SessionException, SessionParseException from http.client import HTTPConnection from http.server import HTTPServer, BaseHTTPRequestHandler from threading import Thread, Lock from time import sleep from dizzy.log import print_dizzy, DEBUG, VERBOSE_1, VERBOSE_2 class DizzyHTTPServerThread(Thread): def __init__(self, server): Thread.__init__(self) self.server = server def run(self): self.server.serve_forever() class DizzySession(object): def __init__(self, section_proxy): self.dest = section_proxy.get('target_host') self.dport = section_proxy.getint('target_port', 80) self.src = section_proxy.get('source_host', '') self.src_str = self.src if self.src == '': self.src = None self.sport = section_proxy.getint('source_port', 80) self.method = section_proxy.get('method', 'GET') self.url = section_proxy.get('url') self.headers = {} headers = section_proxy.get('headers', '') if not headers == '': for l in headers.split(";"): r = l.split(":") self.headers[r[0]] = ":".join(r[1:]) self.cookies = {} self.timeout = section_proxy.getfloat('timeout', 1) self.auto_reopen = section_proxy.getboolean('auto_reopen', True) self.retry = section_proxy.getint('retry', 3) self.server_side = section_proxy.getboolean('server', False) self.read_first = self.server_side self.read_first = section_proxy.getboolean('read_first', self.read_first) self.is_open = False self.res = None self.thread = None self.send_lock = Lock() self.recv_lock = Lock() self.rlist = [] self.slist = [] class DizzyHTTPRequestHandler(BaseHTTPRequestHandler): dest = self.dest dport = self.dport send_lock = self.send_lock recv_lock = self.recv_lock rlist = self.rlist slist = self.slist set_headers = self.headers protocol_version = "HTTP/1.1" def all_methods(self): (addr, port) = self.client_address if addr == self.dest: print_dizzy("http/request_handler: handling request from %s" % addr, VERBOSE_2) while True and not self.server._BaseServer__shutdown_request: with self.recv_lock: if len(self.rlist) == 0: break print_dizzy("http/request_handler: rlist not empty", DEBUG) sleep(0.1) with self.recv_lock: length = int(self.headers['content-length']) self.rlist.append(self.rfile.read(length)) while True and not self.server._BaseServer__shutdown_request: with self.send_lock: if len(self.slist) == 1: break print_dizzy("http/request_handler: slist empty", DEBUG) sleep(0.1) with self.send_lock: data = self.slist.pop() self.send_response(200) for i in self.set_headers: self.send_header(i, self.set_headers[i]) self.send_header('Content-Length', len(data)) self.end_headers() self.wfile.write(data) else: print_dizzy("http/request_handler: denied access for %s" % addr, VERBOSE_2) def log_message(self, format, *args): return do_GET = all_methods do_HEAD = all_methods do_POST = all_methods do_PUT = all_methods do_DELETE = all_methods do_CONNECT = all_methods do_OPTIONS = all_methods do_TRACE = all_methods do_PATCH = all_methods self.request_handler = DizzyHTTPRequestHandler def open(self): try: if not self.server_side: self.connection = HTTPConnection(self.dest, self.dport, timeout=self.timeout, source_address=self.src) else: attempt = 0 while attempt < self.retry: try: self.connection = HTTPServer((self.src_str, self.sport), self.request_handler) except OSError: attempt += 1 sleep(1) continue else: break self.thread = DizzyHTTPServerThread(self.connection) self.thread.start() except Exception as e: raise SessionException("http/open: cant open session: %s" % e) else: self.is_open = True def close(self): if not self.is_open: return if not self.server_side: self.connection.close() self.res = None else: self.connection.shutdown() self.is_open = False def send(self, data): try: if not self.server_side: headers = self.headers cookies = ";".join(["%s=%s" % (n, v) for (n, v) in self.cookies.items()]) if len(cookies) > 0: headers.update({"Cookie" : cookies}) self.connection.request(self.method, self.url, body=data, headers=headers) self.res = self.connection.getresponse() headers = dict(self.res.getheaders()) for h in headers: if h == "Set-Cookie": try: nv = headers[h].split(";")[0].split("=") self.cookies[nv[0]] = nv[1] except: print_dizzy("http/send: failed to parse set-cookie: %s" % headers[h], VERBOSE_1) else: while True: with self.send_lock: if len(self.slist) == 0: break print_dizzy("http/send: slist not empty", DEBUG) sleep(0.1) with self.send_lock: self.slist.append(data) print_dizzy("http/send: pushed %s" % data, DEBUG) except Exception as e: if self.auto_reopen: print_dizzy("http/send: session got closed '%s', auto reopening..." % e, DEBUG) print_dizzy(e, DEBUG) self.close() self.open() else: self.close() raise SessionException("http/send: error on sending '%s', connection closed." % e) def recv(self): #from traceback import print_stack #print_stack() if not self.server_side: if not self.res is None: return self.res.read() else: while True: with self.recv_lock: if len(self.rlist) == 1: break print_dizzy("http/recv: rlist empty", DEBUG) sleep(0.1) with self.recv_lock: data = self.rlist.pop() print_dizzy("http/recv: poped %s" % data, DEBUG) return data ``` #### File: dizzy/tests/test_dizzy.py ```python from unittest import TestCase, main from dizzy.tests import first from dizzy.dizz import Dizz, load_dizz from dizzy.value import Value from dizzy.objects import START, END from dizzy.objects.field import Field from dizzy.functions.length import length from dizzy.functions import BOTH class TestDizzy(TestCase): def test_init(self): objects = list() objects.append(Field("test0", b"\x01\xff", 10, "std")) objects.append(Field("test1", b"\xab", 8, "std")) objects.append(Field("test2", b"\x00\xff", 12, "std")) d = Dizz("test", objects, fuzz="none") self.assertEqual(first(d), Value(b'\x1f\xfa\xb0\xff', 30)) def test_iter(self): expected = [Value(b'\x00""w3\x00!', 49), Value(b'\x00\x00\x00w3\x00!', 49), Value(b'\x00\x00\x02w3\x00!', 49), Value(b'\x00\x00\x04w3\x00!', 49), Value(b'\x00\x00\x06w3\x00!', 49), Value(b'\x00\x00\x08w3\x00!', 49), Value(b'\x01\xff\xf6w3\x00!', 49), Value(b'\x01\xff\xf8w3\x00!', 49), Value(b'\x01\xff\xfaw3\x00!', 49), Value(b'\x01\xff\xfcw3\x00!', 49), Value(b'\x01\xff\xfew3\x00!', 49), Value(b'\x00\xff\xf8w3\x00!', 49), Value(b'\x00\xff\xfaw3\x00!', 49), Value(b'\x00\xff\xfcw3\x00!', 49), Value(b'\x00\xff\xfew3\x00!', 49), Value(b'\x00\x02\x00w3\x00!', 49), Value(b'\x00\x04\x00w3\x00!', 49), Value(b'\x00\x06\x00w3\x00!', 49), Value(b'\x00\x08\x00w3\x00!', 49), Value(b'\x00""33\x00!', 49), Value(b'\x00""33\x00!', 49), Value(b'\x00""73\x00!', 49), Value(b'\x00""73\x00!', 49), Value(b'\x00"";3\x00!', 49), Value(b'\x00"#\xf73\x00!', 49), Value(b'\x00"#\xfb3\x00!', 49), Value(b'\x00"#\xfb3\x00!', 49), Value(b'\x00"#\xff3\x00!', 49), Value(b'\x00"#\xff3\x00!', 49), Value(b'\x00""\xfb3\x00!', 49), Value(b'\x00""\xfb3\x00!', 49), Value(b'\x00""\xff3\x00!', 49), Value(b'\x00""\xff3\x00!', 49), Value(b'\x00""D\x00\x00!', 49), Value(b'\x00""D\x01\x00!', 49), Value(b'\x00""D\x02\x00!', 49), Value(b'\x00""D\x03\x00!', 49), Value(b'\x00""D\x04\x00!', 49), Value(b'\x00""E\xfb\x00!', 49), Value(b'\x00""E\xfc\x00!', 49), Value(b'\x00""E\xfd\x00!', 49), Value(b'\x00""E\xfe\x00!', 49), Value(b'\x00""E\xff\x00!', 49), Value(b'\x00""D\xfc\x00!', 49), Value(b'\x00""D\xfd\x00!', 49), Value(b'\x00""D\xfe\x00!', 49), Value(b'\x00""D\xff\x00!', 49), Value(b'\x00""E\x00\x00!', 49), Value(b'\x00""F\x00\x00!', 49), Value(b'\x00""G\x00\x00!', 49), Value(b'\x00""D\x00\x00!', 49), Value(b'\x00DD\x88\x00\x00"', 50), Value(b'\x00DD\x88\x01\x00"', 50), Value(b'\x00DD\x88\x02\x00"', 50), Value(b'\x00DD\x88\x03\x00"', 50), Value(b'\x00DD\x88\x04\x00"', 50), Value(b'\x00DD\x8b\xfb\x00"', 50), Value(b'\x00DD\x8b\xfc\x00"', 50), Value(b'\x00DD\x8b\xfd\x00"', 50), Value(b'\x00DD\x8b\xfe\x00"', 50), Value(b'\x00DD\x8b\xff\x00"', 50), Value(b'\x00DD\x89\xfc\x00"', 50), Value(b'\x00DD\x89\xfd\x00"', 50), Value(b'\x00DD\x89\xfe\x00"', 50), Value(b'\x00DD\x89\xff\x00"', 50), Value(b'\x00DD\x89\x00\x00"', 50), Value(b'\x00DD\x8a\x00\x00"', 50), Value(b'\x00DD\x8b\x00\x00"', 50), Value(b'\x00DD\x8c\x00\x00"', 50), Value(b'\x00\x88\x89\x10\x00\x00#', 51), Value(b'\x00\x88\x89\x10\x01\x00#', 51), Value(b'\x00\x88\x89\x10\x02\x00#', 51), Value(b'\x00\x88\x89\x10\x03\x00#', 51), Value(b'\x00\x88\x89\x10\x04\x00#', 51), Value(b'\x00\x88\x89\x17\xfb\x00#', 51), Value(b'\x00\x88\x89\x17\xfc\x00#', 51), Value(b'\x00\x88\x89\x17\xfd\x00#', 51), Value(b'\x00\x88\x89\x17\xfe\x00#', 51), Value(b'\x00\x88\x89\x17\xff\x00#', 51), Value(b'\x00\x88\x89\x13\xfc\x00#', 51), Value(b'\x00\x88\x89\x13\xfd\x00#', 51), Value(b'\x00\x88\x89\x13\xfe\x00#', 51), Value(b'\x00\x88\x89\x13\xff\x00#', 51), Value(b'\x00\x88\x89\x11\x00\x00#', 51), Value(b'\x00\x88\x89\x12\x00\x00#', 51), Value(b'\x00\x88\x89\x13\x00\x00#', 51), Value(b'\x00\x88\x89\x14\x00\x00#', 51), Value(b'\x01\x11\x12 \x00\x00$', 52), Value(b'\x01\x11\x12 \x01\x00$', 52), Value(b'\x01\x11\x12 \x02\x00$', 52), Value(b'\x01\x11\x12 \x03\x00$', 52), Value(b'\x01\x11\x12 \x04\x00$', 52), Value(b'\x01\x11\x12/\xfb\x00$', 52), Value(b'\x01\x11\x12/\xfc\x00$', 52), Value(b'\x01\x11\x12/\xfd\x00$', 52), Value(b'\x01\x11\x12/\xfe\x00$', 52), Value(b'\x01\x11\x12/\xff\x00$', 52), Value(b"\x01\x11\x12'\xfc\x00$", 52), Value(b"\x01\x11\x12'\xfd\x00$", 52), Value(b"\x01\x11\x12'\xfe\x00$", 52), Value(b"\x01\x11\x12'\xff\x00$", 52), Value(b'\x01\x11\x12!\x00\x00$', 52), Value(b'\x01\x11\x12"\x00\x00$', 52), Value(b'\x01\x11\x12#\x00\x00$', 52), Value(b'\x01\x11\x12$\x00\x00$', 52), Value(b'\x02"$@\x00\x00%', 53), Value(b'\x02"$@\x01\x00%', 53), Value(b'\x02"$@\x02\x00%', 53), Value(b'\x02"$@\x03\x00%', 53), Value(b'\x02"$@\x04\x00%', 53), Value(b'\x02"$_\xfb\x00%', 53), Value(b'\x02"$_\xfc\x00%', 53), Value(b'\x02"$_\xfd\x00%', 53), Value(b'\x02"$_\xfe\x00%', 53), Value(b'\x02"$_\xff\x00%', 53), Value(b'\x02"$O\xfc\x00%', 53), Value(b'\x02"$O\xfd\x00%', 53), Value(b'\x02"$O\xfe\x00%', 53), Value(b'\x02"$O\xff\x00%', 53), Value(b'\x02"$A\x00\x00%', 53), Value(b'\x02"$B\x00\x00%', 53), Value(b'\x02"$C\x00\x00%', 53), Value(b'\x02"$D\x00\x00%', 53), Value(b'\x04DH\x80\x00\x00&', 54), Value(b'\x04DH\x80\x01\x00&', 54), Value(b'\x04DH\x80\x02\x00&', 54), Value(b'\x04DH\x80\x03\x00&', 54), Value(b'\x04DH\x80\x04\x00&', 54), Value(b'\x04DH\xbf\xfb\x00&', 54), Value(b'\x04DH\xbf\xfc\x00&', 54), Value(b'\x04DH\xbf\xfd\x00&', 54), Value(b'\x04DH\xbf\xfe\x00&', 54), Value(b'\x04DH\xbf\xff\x00&', 54), Value(b'\x04DH\x9f\xfc\x00&', 54), Value(b'\x04DH\x9f\xfd\x00&', 54), Value(b'\x04DH\x9f\xfe\x00&', 54), Value(b'\x04DH\x9f\xff\x00&', 54), Value(b'\x04DH\x81\x00\x00&', 54), Value(b'\x04DH\x82\x00\x00&', 54), Value(b'\x04DH\x83\x00\x00&', 54), Value(b'\x04DH\x84\x00\x00&', 54), Value(b"\x08\x88\x91\x00\x00\x00'", 55), Value(b"\x08\x88\x91\x00\x01\x00'", 55), Value(b"\x08\x88\x91\x00\x02\x00'", 55), Value(b"\x08\x88\x91\x00\x03\x00'", 55), Value(b"\x08\x88\x91\x00\x04\x00'", 55), Value(b"\x08\x88\x91\x7f\xfb\x00'", 55), Value(b"\x08\x88\x91\x7f\xfc\x00'", 55), Value(b"\x08\x88\x91\x7f\xfd\x00'", 55), Value(b"\x08\x88\x91\x7f\xfe\x00'", 55), Value(b"\x08\x88\x91\x7f\xff\x00'", 55), Value(b"\x08\x88\x91?\xfc\x00'", 55), Value(b"\x08\x88\x91?\xfd\x00'", 55), Value(b"\x08\x88\x91?\xfe\x00'", 55), Value(b"\x08\x88\x91?\xff\x00'", 55), Value(b"\x08\x88\x91\x01\x00\x00'", 55), Value(b"\x08\x88\x91\x02\x00\x00'", 55), Value(b"\x08\x88\x91\x03\x00\x00'", 55), Value(b"\x08\x88\x91\x04\x00\x00'", 55), Value(b'\x11\x11"\x00\x00\x00(', 56), Value(b'\x11\x11"\x00\x01\x00(', 56), Value(b'\x11\x11"\x00\x02\x00(', 56), Value(b'\x11\x11"\x00\x03\x00(', 56), Value(b'\x11\x11"\x00\x04\x00(', 56), Value(b'\x11\x11"\xff\xfb\x00(', 56), Value(b'\x11\x11"\xff\xfc\x00(', 56), Value(b'\x11\x11"\xff\xfd\x00(', 56), Value(b'\x11\x11"\xff\xfe\x00(', 56), Value(b'\x11\x11"\xff\xff\x00(', 56), Value(b'\x11\x11"\x7f\xfc\x00(', 56), Value(b'\x11\x11"\x7f\xfd\x00(', 56), Value(b'\x11\x11"\x7f\xfe\x00(', 56), Value(b'\x11\x11"\x7f\xff\x00(', 56), Value(b'\x11\x11"\x01\x00\x00(', 56), Value(b'\x11\x11"\x02\x00\x00(', 56), Value(b'\x11\x11"\x03\x00\x00(', 56), Value(b'\x11\x11"\x04\x00\x00(', 56), Value(b'\x00""w3\x00\x00', 49), Value(b'\x00""w3\x00\x01', 49), Value(b'\x00""w3\x00\x02', 49), Value(b'\x00""w3\x00\x03', 49), Value(b'\x00""w3\x00\x04', 49), Value(b'\x00""w3\xff\xfb', 49), Value(b'\x00""w3\xff\xfc', 49), Value(b'\x00""w3\xff\xfd', 49), Value(b'\x00""w3\xff\xfe', 49), Value(b'\x00""w3\xff\xff', 49), Value(b'\x00""w3\x7f\xfc', 49), Value(b'\x00""w3\x7f\xfd', 49), Value(b'\x00""w3\x7f\xfe', 49), Value(b'\x00""w3\x7f\xff', 49), Value(b'\x00""w3\x01\x00', 49), Value(b'\x00""w3\x02\x00', 49), Value(b'\x00""w3\x03\x00', 49), Value(b'\x00""w3\x04\x00', 49)] objects = list() objects.append(Field("test0", b"\x11\x11", fuzz="std")) objects.append(Field("test1", b"\x22", fuzz="std")) objects.append(Field("test2", b"\x33\x33", slice(9, 17), fuzz="std")) objects.append(Field("length", b"\x00\x00", fuzz="std")) functions = list() functions.append(length("length", "test0", "test2")) d = Dizz("test", objects, functions, fuzz="std") self.assertEqual([i for i in d], expected) def test_length(self): objects = list() objects.append(Field("test0", b"\x01\xff", 10, "std")) objects.append(Field("test1", b"\xab", 8, "std")) objects.append(Field("test2", b"\x00\xff", 12, "std")) d = Dizz("test", objects, fuzz="std") self.assertEqual(len(list(d)), d.length()) def test_start_at_std(self): objects = list() objects.append(Field("test0", b"\x01\xff", 10, "std")) objects.append(Field("test1", b"\xab", 8, "std")) objects.append(Field("test2", b"\x00\xff", 12, "std")) objects.append(Field("length", b"\x00\x00", fuzz="std")) functions = list() functions.append(length("length")) excepted = list(Dizz("test", objects, functions, fuzz="std")) for i in range(len(excepted)): got = list(Dizz("test", objects, functions, fuzz="std", start_at=i)) self.assertEqual(excepted[i:], got) def test_start_at_full(self): objects = list() objects.append(Field("test0", b"\x01\xff", 10, "std")) objects.append(Field("test1", b"\xab", 8, "std")) objects.append(Field("test2", b"\x00\xff", 12, "std")) excepted = list(Dizz("test", objects, fuzz="full")) for i in range(len(excepted), 4): self.assertEqual(excepted[i:], list(Dizz("test", objects, fuzz="full", start_at=i))) def test_load(self): expected = Value(b'\x00\x00\x00\x00\x00\n\x18\x00\x00\x00', 80) d = load_dizz("test", "modules_src/demo/demo/dizz/demo.dizz") self.assertEqual(first(d), expected) def test_import(self): expected = [Value(b'\n\xed\xcc', 20), Value(b'\x02\xed\xcc', 20), Value(b'\x06\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\x0e\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xec\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xee\xcc', 20), Value(b'\n\xef\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20), Value(b'\n\xed\xcc', 20)] objects = list() objects.append(Field("test0", b"\x01", 2, "full")) objects.append(Field("test1", b"\xff", 8, "std")) def func1(dizzy_iterator): dizzy_iterator["test1"] = b"\xaa" d1 = Dizz("test_import", objects, [(func1, BOTH)], fuzz="std") objects = list() objects.append(Field("test0", b"\x02", 2, "full")) objects.append(Field("test1", b"\xff", 8, "std")) objects.append(d1) def func0(dizzy_iterator): dizzy_iterator["test1"] = b"\xbb" dizzy_iterator["test_import"]["test1"] = b"\xcc" d0 = Dizz("test", objects, [(func0, BOTH)], fuzz="std") self.assertEqual(list(d0), expected) def test_int_assignment(self): objects = list() objects.append(Field("test0", b"\xaa", 10, "full", endian="<")) objects.append(Field("test1", b"\xff", 8, "std")) d0 = Dizz("test", objects, fuzz="std") d0_iter = iter(d0) d0_iter["test0"] = 1337 self.assertEqual(d0_iter["test0"].byte, b'9\x05') def test_START_END(self): objects = list() objects.append(Field("test0", b"\x00")) objects.append(Field("test1", b"\xff\xff")) objects.append(Field("test2", b"\xaa")) d = Dizz("test", objects, [length("test1", endian="<")], fuzz="std") d_iter = iter(d) next(d_iter) self.assertEqual(d_iter[START], Value(b"\x00")) self.assertEqual(d_iter["test1"], Value(b"\x20\x00")) self.assertEqual(d_iter[END], Value(b"\xaa")) if __name__ == '__main__': main() ```
{ "source": "0xC0ncord/TURRPG2", "score": 2 }
#### File: 0xC0ncord/TURRPG2/simple_redirect.py ```python import http.server, socketserver import urllib.parse import os import signal PORT = 8000 ALLOWED_EXTENSIONS = ('ukx', 'ut2', 'uax', 'usx', 'u', 'utx') ASSET_DIRS = ('Animations', 'Maps', 'Sounds', 'StaticMeshes', 'System', 'Textures') USE_COMPRESSION = True TEMP_COMPRESSED_DIR = ".redirect" import subprocess #TODO get rid of this when we use zlib UCC_PATH = "System/ucc.exe" #COMPRESS_OBJ = None PROCESSING_FILES = [] class UT2K4RedirectHandler(http.server.SimpleHTTPRequestHandler): # Compress a file, 1Gb at a time def compress_file(self, infilepath: str, outfilepath: str) -> None: global PROCESSING_FILES print("Compressing file {} to {}".format(infilepath, outfilepath)) # Append this file to files that we are currently compressing # This makes any further requests for it wait until it is complete PROCESSING_FILES.append(infilepath) # FIXME temporary workaround for zlib not cooperating subprocess.run(["wine", UCC_PATH, "compress", "../{}".format(infilepath).replace("/", "\\")], stdout=subprocess.PIPE, stderr=subprocess.PIPE) subprocess.run(["mv", "{}.uz2".format(infilepath), outfilepath], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # TODO # Open this file #infile = open(infilepath, 'rb') # Open the output file #outfile = open(outfilepath, 'wb+') # Get input file's total size #infile.seek(0, 2) #size_total = infile.tell() # Seek back to beginning #infile.seek(0) # Start compressing and writing data, 1Gb at a time #data = bytes() #read = 0 #size = 1073741824 # 1Gb #while read < size_total: # outfile.write(COMPRESS_OBJ.compress(infile.read(size))) # read = read + size #outfile.write(COMPRESS_OBJ.flush()) # Close handles #infile.close() #outfile.close() # Remove this file from the processing list PROCESSING_FILES.remove(infilepath) def do_GET(self) -> None: # Parse query data to find out what was requested parsedParams = urllib.parse.urlparse(self.path) # Only respond if the requested file ends with an allowed file extension if not parsedParams.path.endswith(ALLOWED_EXTENSIONS) and not parsedParams.path.endswith('uz2'): print("Request did not end with an allowed file extension; sending 404 response.") self.send_response(404) self.end_headers() return # Only respond to the 'Unreal' user-agent if self.headers.get('User-Agent') != "Unreal": print("Request did not contain 'User-Agent: Unreal' header; sending 404 response.") self.send_response(404) self.end_headers() return # Only respond if the client sends a 'Connection: close' if self.headers.get('Connection') != "close": print("Request did not contain 'Connection: close' header; sending 404 response.") self.send_response(404) self.end_headers() return # Unquote the requested path before looking in the filesystem filepath = urllib.parse.unquote(parsedParams.path) if (filepath.endswith('.uz2') and not USE_COMPRESSION) or (not filepath.endswith('.uz2') and USE_COMPRESSION): # Can't do it print("Request for compressed/uncompressed file did not match configured mode.") self.send_response(404) self.end_headers() return if not USE_COMPRESSION: # See if the file requested exists if os.access('.' + os.sep + filepath, os.R_OK): # File exists, serve it up http.server.SimpleHTTPRequestHandler.do_GET(self); else: # Try to find out where this file lives foundIt = False for asset_dir in ASSET_DIRS: if os.access('.' + os.sep + asset_dir + os.sep + filepath, os.R_OK): foundIt = True self.send_response(200) self.end_headers() # Found it, serve it up with open('.' + os.sep + asset_dir + os.sep + filepath, 'rb') as fp: self.copyfile(fp, self.wfile) break if not foundIt: # Didn't find it self.send_response(404) self.end_headers() else: # Check our temporary directory for the file in its compressed form if it exists if os.access(TEMP_COMPRESSED_DIR + os.sep + filepath, os.R_OK): self.send_response(200) self.end_headers() # Serve it up with open(TEMP_COMPRESSED_DIR + os.sep + filepath, 'rb') as fp: self.copyfile(fp, self.wfile) else: # Need to find the uncompressed file first uncomp_filepath = filepath[:-len('.uz2')] # See if this file is already being compressed at this time if filepath in PROCESSING_FILES: # Wait for it and then serve it up while filepath in PROCESSING_FILES: time.sleep(1) # It's done self.send_response(200) self.end_headers() # Serve it up try: with open(TEMP_COMPRESSED_DIR + os.sep + filepath, 'rb') as fp: self.copyfile(fp, self.wfile) finally: return foundIt = False for asset_dir in ASSET_DIRS: if os.access('.' + os.sep + asset_dir + os.sep + uncomp_filepath, os.R_OK): foundIt = True # Found it, so let's compress it self.compress_file('.' + os.sep + asset_dir + os.sep + uncomp_filepath, TEMP_COMPRESSED_DIR + os.sep + filepath) self.send_response(200) self.end_headers() # Serve it up try: with open(TEMP_COMPRESSED_DIR + os.sep + filepath, 'rb') as fp: self.copyfile(fp, self.wfile) finally: return if not foundIt: # Didn't find it self.send_response(404) self.end_headers() # Ready, set, go! def run() -> None: global USE_COMPRESSION global TEMP_COMPRESSED_DIR #global COMPRESS_OBJ def clean_up() -> None: httpd.shutdown() if USE_COMPRESSION: shutil.rmtree(TEMP_COMPRESSED_DIR) signal.signal(signal.SIGTERM, clean_up) signal.signal(signal.SIGPIPE, signal.SIG_DFL) try: # Setup and bind Handler = UT2K4RedirectHandler httpd = socketserver.TCPServer(("", PORT), Handler) except OSError: return try: # Set up things needed for compression if enabled if USE_COMPRESSION: try: if not os.path.exists(TEMP_COMPRESSED_DIR): # Make our temporary holding area for compressed files os.mkdir(TEMP_COMPRESSED_DIR) # TODO # Import zlib and get ourselves a compression object for working with them #import zlib #COMPRESS_OBJ = zlib.compressobj() # time is needed to sleep while waiting for compressed files import time # shutil needed to remove the temporary directory tree containing compressed files on cleanup import shutil except (OSError, ImportError): # Whatever, just disable compression USE_COMPRESSION = False # Start httpd.serve_forever() except: # Clean up clean_up() # Main if __name__ == "__main__": import argparse # Setup arguments parser = argparse.ArgumentParser(description="A simple UT2004 redirect server.") parser.add_argument('-d', action='store', dest='serve_dir', help="Set the base directory to serve files.") parser.add_argument('-c', action='store_true', dest='compression', help="Compress files to '.uz2' before serving.") parser.add_argument('--no-daemonize', '-n', action='store_true', dest='no_daemonize', help="Run in the foreground.") args = parser.parse_args() # If running in foreground, go ahead if args.no_daemonize: if args.serve_dir is not None: os.chdir(args.serve_dir + os.sep) USE_COMPRESSION = args.compression run() # Else fork ourselves running in the background else: import subprocess cmd = ['python3', __file__] if args.serve_dir is not None: cmd += ['-d', args.serve_dir] if args.compression: cmd += ['-c'] cmd += ['-n'] proc = subprocess.Popen(cmd) # Return the PID print(proc.pid) ```
{ "source": "0xC70FF3/geotools", "score": 2 }
#### File: geotools/geotools/__main__.py ```python from geotools.utils.polygon import Polygon import time def main(): p = Polygon([ [2.3957061767578125, 48.89812957181126], [2.399139404296875, 48.890906639609454], [2.3996543884277344, 48.88413419286922], [2.4090957641601562, 48.8801831753449], [2.412700653076172, 48.876570543321755], [2.414073944091797, 48.8712640169951], [2.414073944091797, 48.86358549323598], [2.4164772033691406, 48.849354525964365], [2.4125289916992188, 48.83466754148594], [2.4109840393066406, 48.833989576686], [2.4151039123535156, 48.83342459901093], [2.4224853515625, 48.83591045312373], [2.4199104309082027, 48.8414466848806], [2.4199104309082027, 48.84359322235957], [2.422313690185547, 48.84460997116046], [2.4247169494628906, 48.84189859515306], [2.4372482299804688, 48.84099477053062], [2.4381065368652344, 48.84483591253515], [2.440166473388672, 48.844497000090826], [2.4406814575195312, 48.845965604118284], [2.4465179443359375, 48.84585263610676], [2.4468612670898438, 48.844948882840264], [2.4631690979003906, 48.842689428318415], [2.466602325439453, 48.83997794833464], [2.4702072143554688, 48.83647540276501], [2.469348907470703, 48.833876581660704], [2.4657440185546875, 48.83184262762493], [2.4647140502929688, 48.827774471831894], [2.466602325439453, 48.827322434132746], [2.4631690979003906, 48.81884597223549], [2.4587059020996094, 48.81681140805428], [2.4494361877441406, 48.81805476264432], [2.441883087158203, 48.81794173168324], [2.4339866638183594, 48.81941111429733], [2.4302101135253906, 48.823140892101684], [2.4199104309082027, 48.82415805606007], [2.4114990234375, 48.82483615389669], [2.4025726318359375, 48.829695586560575], [2.364120483398437, 48.81590713080018], [2.3557090759277344, 48.81579409499648], [2.351932525634765, 48.81828082380189], [2.346954345703125, 48.81579409499648], [2.33184814453125, 48.816924441564105], [2.332019805908203, 48.818393853998344], [2.291507720947265, 48.826983403182346], [2.2789764404296875, 48.832407623139915], [2.272796630859375, 48.82788748061953], [2.267303466796875, 48.827774471831894], [2.2667884826660156, 48.83161662763493], [2.270050048828125, 48.832972612283456], [2.267475128173828, 48.83466754148594], [2.2630119323730464, 48.833876581660704], [2.25494384765625, 48.83466754148594], [2.2513389587402344, 48.838961105496054], [2.2508239746093746, 48.84291537835776], [2.252025604248047, 48.84517482268593], [2.2420692443847656, 48.847773057644694], [2.2394943237304688, 48.850145241393776], [2.2235298156738277, 48.85342092943525], [2.2279930114746094, 48.86584400488787], [2.2322845458984375, 48.87024780944447], [2.239837646484375, 48.87171565817035], [2.245502471923828, 48.876570543321755], [2.255115509033203, 48.87408670745326], [2.2585487365722656, 48.88063473600221], [2.2774314880371094, 48.87815110193676], [2.279834747314453, 48.87894136251639], [2.2806930541992188, 48.883005362568866], [2.2848129272460938, 48.886617529842795], [2.2930526733398438, 48.890455171696374], [2.294769287109375, 48.889890831072385], [2.310047149658203, 48.897113910028416], [2.3186302185058594, 48.89982229558958], [2.3201751708984375, 48.90106358992757], [2.384033203125, 48.902417694046676], [2.3919296264648438, 48.90106358992757], [2.3957061767578125, 48.89812957181126] ]) t = time.time() print((p.hashcodes(min_precision=2, max_precision=7, cover=False))) print("elapsed time: {0:.2f}s".format(time.time() - t)) if __name__ == "__main__": main() ``` #### File: geotools/utils/path.py ```python from geotools.utils import * class Path: def __init__(self, path): if not path or len(path) < 2: raise ValueError("must have at least two way points on the path") self._path = path def hashes(self, precision=DEFAULT_PRECISION): """return set of geo hashes along the path with the specified geo hash length """ return Path._hashes(self._path, precision) @staticmethod def _hashes(path, precision=DEFAULT_PRECISION): if len(path) == 2: line = path curr_hash = geohash.encode(longitude=line[0][0], latitude=line[0][1], precision=precision) dest_hash = geohash.encode(longitude=line[1][0], latitude=line[1][1], precision=precision) hashes = {dest_hash: None} n, e, w, s = True, True, True, True while curr_hash != dest_hash: hashes[curr_hash] = None bbox0 = geohash.bbox(curr_hash) line0 = ((bbox0["w"], bbox0["s"]), (bbox0["e"], bbox0["s"])) line1 = ((bbox0["e"], bbox0["s"]), (bbox0["e"], bbox0["n"])) line2 = ((bbox0["e"], bbox0["n"]), (bbox0["w"], bbox0["n"])) if s and intersects(line, line0): curr_hash, n = south(curr_hash), False elif e and intersects(line, line1): curr_hash, w = east(curr_hash), False elif n and intersects(line, line2): curr_hash, s = north(curr_hash), False else: curr_hash, e = west(curr_hash), False else: hashes = dict((key, None) for key in Path.hashes(path[0:2], precision)) hashes.update(dict((key, None) for key in Path.hashes(path[1:], precision))) return list(hashes.keys()) ``` #### File: utils/tests/test_path.py ```python from unittest import TestCase from unittest import main from geotools.utils.path import Path class GeoHashTestCase(TestCase): def test_hashes(self): london = (-0.123656, 51.51283) west_molesey = (-0.373535, 51.394043) hashes = Path([london, west_molesey]).hashes(5) self.assertTrue({'gcpu9', 'gcpuf', 'gcpug', 'gcpu2', 'gcpuu', 'gcpud', 'gcpu8', 'gcpsr', 'gcpvh', 'gcpvj'}.issubset(hashes)) self.assertEqual(10, len(hashes)) if __name__ == '__main__': main() ``` #### File: utils/tests/test_polygon.py ```python import geohash from unittest import TestCase from unittest import main from geotools.utils.polygon import Polygon class GeoHashTestCase(TestCase): """polygon: [(2.32378006, 48.86403720), (2.35691071, 48.88616602), (2.37905502, 48.85816465)] | out | u09wj5 | ((2.3291015625, 48.8836669921875), (2.340087890625, 48.88916015625)) | | partial | u09tvy | ((2.362060546875, 48.856201171875), (2.373046875, 48.8616943359375)) | | " | u09tvz | ((2.362060546875, 48.8616943359375), (2.373046875, 48.8671875)) | | " | u09wj0 | ((2.3291015625, 48.8671875), (2.340087890625, 48.8726806640625)) | | in | u09wj2 | ((2.340087890625, 48.8671875), (2.35107421875, 48.8726806640625)) | | edge | u09wje | ((2.362060546875, 48.88916015625), (2.35107421875, 48.8836669921875)) | """ def setUp(self): self.polygon = Polygon([ (2.32378006, 48.86403720), (2.35691071, 48.88616602), (2.37905502, 48.85816465)]) def test_bbox(self): self.assertDictEqual({ "w": 2.32378006, "s": 48.85816465, "e": 2.37905502, "n": 48.88616602, }, self.polygon.bbox() ) def test_contains(self): u09wj5 = geohash.bbox("u09wj5") u09wj2 = geohash.bbox("u09wj2") self.assertFalse(self.polygon.contains((u09wj5["w"], u09wj5["s"]))) # sw of u09wj5 self.assertTrue(self.polygon.contains((u09wj2["w"], u09wj2["s"]))) # sw of u09wj2 self.assertTrue(self.polygon.contains(self.polygon._coordinates[0])) # first point of polygon def test_intersects(self): bboxes = { "u09wj5": False, # out "u09tvy": True, # partially in "u09tvz": True, # partially in "u09wj0": True, # partially in "u09wj2": False, # in "u09wje": True, # edge crosses } for hashcode, expected in bboxes.items(): bbox = geohash.bbox(hashcode) self.assertEqual(expected, self.polygon.intersects(bbox)) def test_filter(self): hashes_to_be_checked = [ "u09wj5", # out "u09tvy", # partially in "u09tvz", # partially in "u09wj0", # partially in "u09wj2", # in "u09wje", # edge crosses ] inside, partially_inside = self.polygon._filter(hashes_to_be_checked) self.assertEqual(["u09wj2"], inside) self.assertEqual(128, len(partially_inside)) for hashcode in partially_inside: self.assertTrue( hashcode.startswith("u09tvy") or hashcode.startswith("u09tvz") or hashcode.startswith("u09wj0") or hashcode.startswith("u09wje") ) def test_filter_with_englobing_bbox(self): inside, partially_inside = self.polygon._filter(["u09"]) self.assertEqual([], inside) self.assertEqual(32, len(partially_inside)) for hashcode in partially_inside: self.assertTrue(hashcode.startswith("u09")) def test_hashes(self): self.assertListEqual( ['u09tvx', 'u09wj2', 'u09wj8', 'u09wj9'], self.polygon.hashcodes(min_precision=2, max_precision=6)) self.assertListEqual( ['u09tvx', 'u09wj2', 'u09wj8', 'u09wj9'], self.polygon.hashcodes(min_precision=6, max_precision=6)) hashcodes = self.polygon.hashcodes(min_precision=3, max_precision=7) self.assertEqual(190, len(hashcodes)) self.assertTrue({'u09tvx', 'u09wj2', 'u09wj8', 'u09wj9'}.issubset(hashcodes)) for hashcode in hashcodes: self.assertTrue(hashcode.startswith("u09")) hashcodes = self.polygon.hashcodes(min_precision=3, max_precision=9) self.assertEqual(10149, len(hashcodes)) self.assertTrue({'u09tvx', 'u09wj2', 'u09wj8', 'u09wj9'}.issubset(hashcodes)) for hashcode in hashcodes: self.assertTrue(hashcode.startswith("u09")) if __name__ == '__main__': main() ```
{ "source": "0xC70FF3/maps-playground", "score": 3 }
#### File: maps-playground/geojson/__geojson2geohashes.py ```python import sys import json import os import geojson from pygeotools.utils.polygon import Polygon PRECISION = 5 PROGRESSBAR_LEN = 50 def main(args=None): # jsondir = args.jsondir if args and args.jsondir else "." errors = list() step = 1 file = args[1] if len(args) > 1 else "data.geojson" output_dir = args[2] if len(args) > 2 else "json" with open(file) as data_file: data = data_file.read() print("Step {0:d} : IMPORT {1:s}".format(step, os.path.abspath(file)), flush=True) features = geojson.loads(data).features offset, length = args.offset if args.offset else 0, args.length if args.length else 0 total, errs, count = min(offset + length, len(features)) - offset, 0, 0 for feature in features[offset:min(offset + length, len(features))]: outfile_name = os.path.join(output_dir, '{0:s}.json'.format(feature.properties['NAME'])) count += 1 progress = int(float(count) / total * PROGRESSBAR_LEN) print("Importing [{0:s}>{1:s}] {2:d}/{3:d} - {4:s} {5:s}".format( "=" * progress, " " * (PROGRESSBAR_LEN - progress), count, total, feature.properties['NAME'], "({0:d} error{1:s} found)".format(errs, "s" if errs > 1 else "") if errs else "" ), end="\r", flush=True) try: if feature.geometry.type == "Polygon": p = Polygon(feature.geometry.coordinates[0]) hashcodes = p.fcover(precision=PRECISION) with open(outfile_name, 'w') as outfile: json.dump(hashcodes, outfile) elif feature.geometry.type == "MultiPolygon": hashcodes = list() for polygon in feature.geometry.coordinates: p = Polygon(polygon[0]) hashcodes.extend(p.fcover(precision=PRECISION)) with open(outfile_name, 'w') as outfile: json.dump(hashcodes, outfile) except Exception as e: errs += 1 errors.append(feature.properties["NAME"]) total += count step += 1 print(flush=True) return total, len(errors), errors if __name__ == "__main__": main(sys.argv) ``` #### File: maps-playground/www.geofabrik.de/__poly2geojson.py ```python import json import os def main(): features = { "type": "FeatureCollection", "features": [] } directory = "poly/" for file in os.listdir(directory): if file.endswith(".poly"): name = os.path.basename(file).split(".")[0] with open(os.path.join(directory, file)) as poly_file: polygon = list() polygons = list() line = poly_file.readline() while line: line = poly_file.readline() if line.startswith(" "): coordinates = line.split() polygon.append([float(coordinates[0]), float(coordinates[1])]) elif len(polygon) > 0: polygons.append(polygon) polygon = list() feature = { "properties": {"NAME": name}, "type": "Feature", "geometry": { "type": "MultiPolygon" if len(polygons) > 1 else "Polygon", "coordinates": [polygons] if len(polygons) > 1 else polygons } } features["features"].append(feature) with open('countries.geojson', 'w') as outfile: json.dump(features, outfile) if __name__ == "__main__": main() ```
{ "source": "0xcabrex/pesuDetailsSniper", "score": 4 }
#### File: 0xcabrex/pesuDetailsSniper/details_sniper.py ```python import requests import os from datetime import date def input_variables(): campuses = [] year = [] branches = [] name = input("Enter the name of the student: ").strip() if name == '': print("No name entered, exiting...") exit(0) campus_str = input("Enter the campus number (1=RR, 2=EC): ") if campus_str.find(','): for campus_str in campus_str.split(','): if campus_str == '1' or campus_str == '2': campuses.append(campus_str) elif campus_str == '': campuses = ["1", "2"] else: print(f"Error, {campus_str} not found!") exit(-1) try: year = input("Enter the batch year: ") year = int(year) num = year year_iter = 0 while num > 0: num = int(num / 10) year_iter += 1 if year_iter == 4: if int(year/100) == 20: curr_date = str(date.today()).split('-') curr_date.remove(curr_date[2]) if int(year%100) < int(curr_date[0])%100: year = year % 100 elif int(year%100) == int(curr_date[0])%100 and int(curr_date[1]) > 9: year = year % 100 else: print(f"{year} batch doesnt exist yet, exiting...") exit(-1) else: print("Enter an year in this decade, exiting...") exit(-1) elif year_iter == 2: curr_date = str(date.today()).split('-') curr_date.remove(curr_date[2]) if (year == int(curr_date[0])%100 and int(curr_date[1]) < 9) or (year > int(curr_date[0])%100): print(f"20{year} batch does not exist yet, exiting...") exit(0) else: print("Enter a year in this millenia at least :|") exit(-1) except ValueError: print("Please enter only numbers, exiting...") exit(-1) branch_str = input("Enter branch(EC, CS, EE): ").strip() if branch_str.find(','): for branch_str in branch_str.split(','): if len(branch_str) > 2 or branch_str.isdigit(): print("Please pick a valid branch, exiting...") exit(-1) elif branch_str == '': branches = ["CS", "EC", "EE"] else: branches.append(branch_str.upper()) return name, campuses, year, branches def attack_vector(name, campuses, year, branches): counter = 0 disconnect_counter = 0 disconnect_status_array = [] print("\nAttack in progress...") print("\nDetails: ") print(f"Name: {name}") print(f"Campus: {campuses}") print(f"Year: 20{year}") print(f"Branch: {branches}\n") print() print() for campus in campuses: print ("\033[A \033[A") print(f"searching {campus} campus") print() for branch in branches: print ("\033[A \033[A") print(f"Serching {branch} branch") while 1000 > counter: SRN = f"PES{campus}UG{year}{branch.upper()}{counter:03}" payload = {"loginId": f"{SRN}"} response = requests.post("https://www.pesuacademy.com/Academy/getStudentClassInfo", data=payload) print(f"Trying SRN number: {SRN}", end="\r") if response.status_code == 200: if response.text.lower().find(name.lower()) != -1: print(response.text.strip()) print() print("FOUND IT!") print(f"SRN: {SRN}") if not os.path.isdir("./logs"): os.mkdir("./logs") if not os.path.isdir(f"./logs/{campus}"): os.mkdir(f"./logs/{campus}") if not os.path.isdir(f"./logs/{campus}/20{year}"): os.mkdir(f"./logs/{campus}/20{year}") if not os.path.isdir(f"./logs/{campus}/20{year}/{branch.upper()}"): os.mkdir(f"./logs/{campus}/20{year}/{branch.upper()}") with open(f"./logs/{campus}/20{year}/{branch.upper()}/{name.lower().replace(' ', '_')}_details.html", 'w', encoding='utf-8') as file_handle: file_handle.write(response.text.strip()) print(f"written to file \"./logs/{campus}/20{year}/{branch.upper()}/{name.lower().replace(' ', '_')}_details.html") exit(1) if disconnect_counter != 0: disconnect_status_array.remove(disconnect_status_array[disconnect_counter]) disconnect_counter -= 1 else: disconnect_counter += 1 disconnect_status_array.append(response.status_code) if disconnect_counter > 3: print() print("Session timeout") print("ATTACK FAILED") print(f"List of status codes: {disconnect_status_array}") exit(-1) counter = counter + 1 counter = 0 print ("\033[A \033[A") print ("\033[A \033[A") print(f"Could not find details of student \"{name}\" with details provided") if __name__ == "__main__": print("PES University Student Details Sniper - CABREX") name, campuses, year, branches = input_variables() attack_vector(name, campuses, year, branches) ```
{ "source": "0xcabrex/Rexbot", "score": 2 }
#### File: 0xcabrex/Rexbot/bot.py ```python import discord from discord import Intents import random import time import os from discord.ext import commands from cogs.usefullTools.dbIntegration import * # Get prefix def get_prefix(bot, message): return fetch_prefix(message.guild.id)["prefix"] intents = Intents.default() intents.members = True bot = commands.Bot(command_prefix=get_prefix, case_insensitive=True, intents=intents) bot.remove_command('help') working_directory = os.getcwd() try: for filename in os.listdir('./cogs'): if filename.endswith('.py'): bot.load_extension(f"cogs.{filename[:-3]}") except Exception as e: print("Cogs error: Cannot load cogs") print("\033[5;37;40m\033[1;33;40mWARNING\033[1;33;40m\033[0;37;40m", end=' ') print("Functionality limited!\n") print(f"exception thrown:\n{e}") # Shows command prefix if asked @bot.event async def on_message(message): reply_choices = [ "Hi", "Hi there", "Hey", "Hey there", "Whatsup", "Waddup", "Whats going on", "Hello", "Hello!", "Sup", "Howdy" ] message_var = message.content if message.author.bot: return elif (bot.user in message.mentions) and message_var.lower().find('prefix') != -1: await message.channel.send(f'My command prefix is `{fetch_prefix(message.guild.id)["prefix"]}`, **{message.author.display_name}**') elif bot.user in message.mentions: if message_var.lower().find('awesome') != -1 or message_var.lower().find('cool') != -1 or message_var.lower().find('good') != -1 or message_var.lower().find('nice') != -1 : await message.channel.send(f'Thanks bro 😁') elif message_var.lower().find('bad') != -1 or message_var.lower().find('horrible') != -1 or message_var.lower().find('suck') != -1 or message_var.lower().find('terrible') != -1 or message_var.lower().find('waste') != -1 or message_var.lower().find('fk') != -1 or message_var.lower().find('fuck') != -1: await message.channel.send(f'No you\nI do the basic functions okay I aint dyno or mee6\n\nJesus christ.') elif message_var.lower().find('how are you') != -1 : await message.channel.send(f'I am fine, {message.author.display_name}') else: await message.channel.send(f'{random.choice(reply_choices)}, **{message.author.display_name}**!') if str(message.channel.type) == 'private': if len(message.content.split()) > 20: bugs_channel1 = bot.get_channel(769510637486997514) bugs_channel2 = bot.get_channel(769490617771884565) bugs_channel3 = bot.get_channel(782252610874638347) embed = discord.Embed( title='BUG REPORTED', colour = 0x008000 ) embed.add_field(name='Username', value=message.author) embed.add_field(name='User id', value=message.author.id) embed.add_field(name='Bug: ', value=message.content) if bugs_channel1 is not None: await bugs_channel1.send(embed=embed) await bugs_channel2.send(embed=embed) await bugs_channel3.send(embed=embed) elif bugs_channel2 is not None: await bugs_channel2.send(embed=embed) await bugs_channel3.send(embed=embed) await message.channel.send("Your bug has been reported") else: await message.channel.send("Please enter your bug in more than 20 words, try describing everything") await bot.process_commands(message) # Basic stuff @bot.event async def on_ready(): await bot.change_presence(status=discord.Status.online, activity=discord.Game('with your lives')) print("+[ONLINE] Rexbot is online") @bot.event async def on_member_join(member): try: print(f'+[NEW_MEMBER] {member} has joined the server: {member.guild.name}') channel = None if fetch_join_log_channel(int(member.guild.id)) is not None: channel = bot.get_channel(fetch_join_log_channel(int(member.guild.id))["channel_id"]) if channel is not None: embed = discord.Embed( title = 'Member joined the server', description=f'Member **{member.name}** joined the server!', colour=0x008000 ) members = await member.guild.fetch_members().flatten() bot_count = 0 for people in members: if people.bot is True: bot_count += 1 embed.set_thumbnail(url=member.avatar_url) embed.add_field(name='Number of members', value=len(members) - bot_count) embed.add_field(name='Number of bots', value=bot_count) embed.set_footer(text=f'id: {member.id}') await channel.send(embed=embed) else: pass except Exception as e: raise Exception @bot.event async def on_member_remove(member): try: print(f'+[REMOVE_MEMBER] {member} has left the server: {member.guild.name}') delete_warns(member.guild.id, member.id) channel = None if fetch_leave_log_channel(int(member.guild.id)): channel = bot.get_channel(fetch_leave_log_channel(int(member.guild.id))["channel_id"]) if channel is not None: embed = discord.Embed( title = 'Member left the server', description=f'Member **{member.name}** has left the server!', colour=0xFF0000 ) try: members = await member.guild.fetch_members().flatten() bot_count = 0 for people in members: if people.bot is True: bot_count += 1 embed.set_thumbnail(url=member.avatar_url) embed.add_field(name='Number of members', value=len(members) - bot_count) embed.add_field(name='Number of bots', value=bot_count) embed.set_footer(text=f'id: {member.id}') await channel.send(embed=embed) except: pass else: pass except Exception as e: raise Exception @bot.event async def on_guild_channel_delete(channel): join_channel = None if fetch_join_log_channel(int(channel.guild.id)) is not None: join_channel = fetch_join_log_channel(int(channel.guild.id))["channel_id"] if channel.id == join_channel: delete_join_log_channel(int(channel.guild.id)) leave_channel = None if fetch_leave_log_channel(int(channel.guild.id)) is not None: leave_channel = fetch_leave_log_channel(int(channel.guild.id))["channel_id"] if channel.id == leave_channel: delete_leave_log_channel(int(channel.guild.id)) log_channel = None if fetch_mod_log_channel(int(channel.guild.id)) is not None: mod_channel = fetch_mod_log_channel(int(channel.guild.id))["channel_id"] if channel.id == mod_channel: delete_mod_log_channel(int(channel.guild.id)) @bot.event async def on_guild_join(guild): insert_prefix(guild.id, "r$") @bot.event async def on_guild_remove(guild): clear_server_data(guild.id) @bot.event async def on_bulk_message_delete(messages): message_channel = fetch_message_edit_log_channel(int(messages[0].guild.id)) if message_channel is not None: message_channel = fetch_message_edit_log_channel(int(messages[0].guild.id))["channel_id"] message_channel = bot.get_channel(message_channel) embed = discord.Embed( title='Bulk message delete', description=f'{len(messages)} messages deleted in {messages[0].channel.mention}', color=0xff0000 ) if message_channel.id != messages[0].channel.id: await message_channel.send(embed=embed) @bot.event async def on_message_delete(message): message_channel = fetch_message_edit_log_channel(int(message.guild.id)) if message_channel is not None: message_channel = fetch_message_edit_log_channel(int(message.guild.id))["channel_id"] message_channel = bot.get_channel(message_channel) embed = discord.Embed( title='Message deleted', description=f'Message deleted in {message.channel.mention}\nContents:\n```\n{message.content}\n```\n' f'Author of the message:\n{message.author.mention}', color=0xff0000 ) if message_channel.id != message.channel.id: await message_channel.send(embed=embed) @bot.event async def on_message_edit(before, after): if not after.author.bot: if before.content != after.content: message_channel = fetch_message_edit_log_channel(int(before.guild.id)) if message_channel is not None: message_channel = fetch_message_edit_log_channel(int(before.guild.id))["channel_id"] message_channel = bot.get_channel(message_channel) embed = discord.Embed( title='Message edited', description=f'Message edited in {before.channel.mention}\nbefore:\n```\n{before.content}\n```\n\nAfter:\n```\n{after.content}\n```\n' f'Author of the message:\n{after.author.mention}\n' f'[jump](https://discordapp.com/channels/{after.guild.id}/{after.channel.id}/{after.id}) to the message', color=0xff0000 ) if message_channel.id != before.channel.id: await message_channel.send(embed=embed) # Ping @bot.command() async def ping(ctx): await ctx.send(f'Ping: {round(bot.latency * 1000)}ms') # If the user enters something bonkers @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.CommandNotFound): prefix = fetch_prefix(ctx.guild.id)["prefix"] await ctx.send(f"command not found\nPlease use `{prefix}help` to see all commands") TOKEN = os.getenv("REXBOT_TOKEN") try: if TOKEN is None: try: with open('./token.0', 'r', encoding='utf-8') as file_handle: TOKEN = file_handle.read() if TOKEN is not None: print('Using token found in token file..') bot.run(TOKEN) else: print("Token error: Token not found") except FileNotFoundError: print("No token file or environment variable\nQuitting") else: print('Using token found in Environment variable....') bot.run(TOKEN) except discord.errors.LoginFailure: print("\033[1;31;40mFATAL ERROR\033[0m 1;31;40m\nToken is malformed; invalid token") ```
{ "source": "0xCAF2/calcium-py", "score": 3 }
#### File: 0xCAF2/calcium-py/calcium.py ```python class Engine: def __init__(self, code_list): self.env = Environment(code_list) self.breakpoints = [] self.parser = Parser() def run(self): while True: result = self.step() if result == RESULT_EXECUTED: continue elif result == RESULT_BREAKPOINT: return False elif result == RESULT_TERMINATED: return True def step(self): last_index = len(self.env.code) last_index -= 1 if self.env.address.indent == 0: end_of_code = self.parser.parse(self.env.code[last_index]) is_end_of_code = isinstance(end_of_code, EndOfCode) if not is_end_of_code: raise InvalidEndOfCodeError() end_of_code.execute(self.env) return RESULT_TERMINATED else: if self.env.address.line == last_index: return RESULT_TERMINATED line = self.env.code[self.env.address.line] command = self.parser.parse(line) command.execute(self.env) is_end_of_code = isinstance(command, EndOfCode) if is_end_of_code: return RESULT_TERMINATED self.env.skip_to_next_line() next_line = self.env.code[self.env.address.line] keyword = next_line[INDEX_KEYWORD] while keyword == KEYWORD_COMMENT or keyword == KEYWORD_IFS: command = self.parser.parse(next_line) command.execute(self.env) self.env.skip_to_next_line() next_line = self.env.code[self.env.address.line] keyword = next_line[INDEX_KEYWORD] if self.env.address.line in self.breakpoints: return RESULT_BREAKPOINT else: return RESULT_EXECUTED # Keyword KEYWORD_ASSIGNMENT = '=' KEYWORD_ADDITION = '+' KEYWORD_SUBTRACTION = '-' KEYWORD_MULTIPLICATION = '*' KEYWORD_EXPONENTIATION = '**' KEYWORD_DIVISION = '/' KEYWORD_FLOOR_DIVISION = '//' KEYWORD_REMAINDER = '%' KEYWORD_COMPOUND_ADDITION = '+=' KEYWORD_COMPOUND_SUBTRACTION = '-=' KEYWORD_COMPOUND_MULTIPLICATION = '*=' KEYWORD_EQUAL = '==' KEYWORD_NOT_EQUAL = '!=' KEYWORD_LESS_THAN = '<' KEYWORD_LESS_THAN_OR_EQUAL = '<=' KEYWORD_GREATER_THAN = '>' KEYWORD_GREATER_THAN_OR_EQUAL = '>=' KEYWORD_AND = 'and' KEYWORD_OR = 'or' KEYWORD_IS = 'is' KEYWORD_IS_NOT = 'is not' KEYWORD_IN = 'in' KEYWORD_NOT_IN = 'not in' KEYWORD_BIT_AND = '&' KEYWORD_BIT_OR = '|' KEYWORD_BIT_XOR = '^' KEYWORD_LEFT_SHIFT = '<<' KEYWORD_RIGHT_SHIFT = '>>' KEYWORD_NOT = 'not' KEYWORD_NEGATIVE = '-_' KEYWORD_BIT_NOT = '~' KEYWORD_IFS = 'ifs' KEYWORD_IF = 'if' KEYWORD_ELIF = 'elif' KEYWORD_ELSE = 'else' KEYWORD_FOR_RANGE = 'for range' KEYWORD_FOR_EACH = 'for each' KEYWORD_WHILE = 'while' KEYWORD_BREAK = 'break' KEYWORD_CONTINUE = 'continue' KEYWORD_FUNC_DEF = 'def' KEYWORD_CALL = 'call' KEYWORD_RETURN = 'return' KEYWORD_CLASS_DEF = 'class' KEYWORD_TRY = 'try' KEYWORD_EXCEPT = 'except' KEYWORD_RAISE = 'raise' KEYWORD_VARIABLE = 'var' KEYWORD_ATTRIBUTE = 'attr' KEYWORD_SUBSCRIPT = 'sub' KEYWORD_COMMENT = '#' KEYWORD_PASS = 'pass' KEYWORD_END_OF_CODE = 'end' # Index INDEX_INDENT = 0 INDEX_OPTIONS = 1 INDEX_KEYWORD = 2 INDEX_ASSIGNMENT_LHS = 3 # Left Hand Side INDEX_ASSIGNMENT_RHS = 4 # Right Hand Side INDEX_CONDITION = 3 INDEX_FOR_RANGE_VARIABLE_NAME = 3 INDEX_FOR_RANGE_VALUES = 4 INDEX_FOR_EACH_ELEMENT_NAME = 3 INDEX_FOR_EACH_ITERABLE_NAME = 4 INDEX_FUNC_DEF_FUNC_NAME = 3 INDEX_FUNC_DEF_PARAMETERS = 4 INDEX_CALL_LHS = 3 INDEX_CALL_REFERENCE = 4 INDEX_CALL_ARGS = 5 # Arguments INDEX_RETURN_VALUE = 3 INDEX_CLASS_DEF_CLASS_NAME = 3 INDEX_CLASS_DEF_SUPERCLASS_NAME = 4 INDEX_EXCEPT_TYPE_NAME = 3 INDEX_EXCEPT_OBJ_NAME = 4 INDEX_RAISE_EXCEPTION = 3 INDEX_RAISE_ARGS = 4 INDEX_EXPRESSION_KEYWORD = 0 INDEX_VARIABLE_NAME = 1 INDEX_ATTRIBUTE_OBJECT_NAME = 1 INDEX_ATTRIBUTE_PROPERTY_NAMES = 2 INDEX_SUBSCRIPT_REFERENCED_OBJECT = 1 INDEX_SUBSCRIPT_INDEX_EXPR = 2 INDEX_LEFT_OPERAND = 1 INDEX_RIGHT_OPERAND = 2 INDEX_UNARY_OPERAND = 1 # Result RESULT_TERMINATED = 0 RESULT_EXECUTED = 1 RESULT_BREAKPOINT = 2 class Address: def __init__(self, indent, line): self.indent = indent self.line = line def _copy_address(point): address = Address(point.indent, point.line) return address # BlockKind BLOCK_KIND_IFS = 0 BLOCK_KIND_IF_ELIF_ELSE = 1 BLOCK_KIND_FOR_RANGE = 2 BLOCK_KIND_FOR_EACH = 3 BLOCK_KIND_WHILE = 4 BLOCK_KIND_FUNC_CALL = 5 BLOCK_KIND_CLASS_DEF = 6 BLOCK_KIND_TRY = 7 BLOCK_KIND_EXCEPT = 8 class Block: def __init__(self, kind, address, begin, end): self.kind = kind self.address = _copy_address(address) self.begin = begin self.end = end class Namespace: def __init__(self, nesting_scope, dictobj): self.nesting_scope = nesting_scope # None is allowed self.dictobj = dictobj def register(self, name, obj): self.dictobj[name] = obj def lookup(self, name): if name in self.dictobj: return self.dictobj[name] else: raise NameNotFoundError(name) class GlobalScope(Namespace): pass class FuncScope(Namespace): pass class ClassScope(Namespace): def get_attr(self): return self.dictobj class Inaccessible: def evaluate(self, env): return self class BuiltinFuncObj(Inaccessible): def __init__(self, name, body): self.name = name self.body = body self.selfclass = builtin_type.builtin_function_or_method class Accessible: def evaluate(self, env): return self class FuncObj(Accessible): def __init__(self, name, params, nesting_scope, address): self.name = name self.params = params self.nesting_scope = nesting_scope self.address = _copy_address(address) self.attributes = {} self.selfclass = builtin_type.function def get_attr(self, name): return self.attributes[name] def set_attr(self, name, value): self.attributes[name] = value return True class MethodObj(Inaccessible): def __init__(self, instance, funcobj): self.instance = instance self.funcobj = funcobj self.selfclass = builtin_type.instance_method class ClassObj(Accessible): def __init__(self, name, superclass, attributes): self.name = name self.superclass= superclass self.attributes = attributes def get_attr(self, name): if name in self.attributes: return self.attributes[name] else: attr = self.superclass.get_attr(name) return attr def set_attr(self, name, value): if self.attributes != None: self.attributes[name] = value return True else: return False def get_description(self): return '<class ' + self.name + '>' class Instance(Accessible): def __init__(self, selfclass): self.selfclass= selfclass self.attributes = {} def get_attr(self, name): try: attr = self.attributes[name] return attr except: classattr = self.selfclass.get_attr(name) is_funcobj = isinstance(classattr, FuncObj) if is_funcobj: methodobj = MethodObj(self, classattr) return methodobj else: return classattr def set_attr(self, name, value): self.attributes[name] = value return True class Super(Accessible): def __init__(self, classobj, instance): self.classobj = classobj self.instance = instance self.selfclass = builtin_type.super def get_attr(self, name): currentclass = self.instance.selfclass while True: if currentclass == None: raise SuperCallFailedError() if self.classobj.name != currentclass.name: currentclass = currentclass.superclass continue else: superclass = currentclass.superclass if superclass == None: raise SuperCallFailedError() funcobj = superclass.get_attr(name) is_funcobj = isinstance(funcobj, FuncObj) if funcobj == None or not is_funcobj: raise SuperCallFailedError() methodobj = MethodObj(self.instance, funcobj) return methodobj def set_attr(self, name, value): return False class Variable: def __init__(self, name): self.name = name def assign(self, obj, env): env.register(self.name, obj) def evaluate(self, env): value = env.lookup(self.name) return value class Attribute: def __init__(self, objname, propertynames): self.objname = objname self.propertynames = propertynames def assign(self, value, env): instance = env.lookup(self.objname) target = instance length = len(self.propertynames) for i in range(length - 1): target = _get_attribute(target, self.propertynames[i]) target.set_attr(self.propertynames[length - 1], value) def evaluate(self, env): instance = env.lookup(self.objname) try: target = instance for prop in self.propertynames: target = _get_attribute(target, prop) return target except: raise AttributeNotExistError(prop) class Subscript: def __init__(self, objref, indexexpr): self.objref = objref self.indexexpr = indexexpr def assign(self, value, env): obj = self.lookup(env) is_str = isinstance(obj, str) if is_str: raise SubscriptNotAllowedError() index = env.evaluate(self.indexexpr) obj[index] = value def evaluate(self, env): obj = self.lookup(env) index = env.evaluate(self.indexexpr) try: value = obj[index] return value except: raise ValueNotFoundError() def lookup(self, env): obj = env.evaluate(self.objref) is_list = isinstance(obj, list) is_str = isinstance(obj, str) is_dict = isinstance(obj, dict) if is_list or is_str or is_dict: return obj else: raise SubscriptNotAllowedError() class BuiltinType: def __init__(self): self.object = ClassObj('object', None, None) self.function = ClassObj('function', self.object, None) self.instance_method = ClassObj( 'instancemethod', self.object, None) self.super = ClassObj('super', self.object, None) self.builtin_function_or_method = ClassObj( 'builtin_function_or_method', self.object, None) builtin_type = BuiltinType() class BinaryOperation: def __init__(self, operator, left, right): self.operator = operator self.left = left self.right = right def operate(self, env): l = env.evaluate(self.left) r = env.evaluate(self.right) op = self.operator try: if op == KEYWORD_ADDITION: return l + r elif op == KEYWORD_SUBTRACTION: return l - r elif op == KEYWORD_MULTIPLICATION: return l * r elif op == KEYWORD_EXPONENTIATION: return l ** r elif op == KEYWORD_DIVISION: return l / r elif op == KEYWORD_FLOOR_DIVISION: return l // r elif op == KEYWORD_REMAINDER: return l % r elif op == KEYWORD_EQUAL: return l == r elif op == KEYWORD_NOT_EQUAL: return l != r elif op == KEYWORD_LESS_THAN: return l < r elif op == KEYWORD_LESS_THAN_OR_EQUAL: return l <= r elif op == KEYWORD_GREATER_THAN: return l > r elif op == KEYWORD_GREATER_THAN_OR_EQUAL: return l >= r elif op == KEYWORD_AND: return l and r elif op == KEYWORD_OR: return l or r elif op == KEYWORD_IS: return l is r elif op == KEYWORD_IS_NOT: return l is not r elif op == KEYWORD_IN: return l in r elif op == KEYWORD_NOT_IN: return l not in r elif op == KEYWORD_BIT_AND: return l & r elif op == KEYWORD_BIT_OR: return l | r elif op == KEYWORD_BIT_XOR: return l ^ r elif op == KEYWORD_LEFT_SHIFT: return l << r elif op == KEYWORD_RIGHT_SHIFT: return l >> r else: raise Exception() except: raise InvalidOperationError() class UnaryOperation: def __init__(self, operator, operand): self.operator = operator self.operand = operand def operate(self, env): v = env.evaluate(self.operator) op = self.operator try: if op == KEYWORD_NOT: return not v elif op == KEYWORD_NEGATIVE: return -v elif op == KEYWORD_BIT_NOT: return ~v else: raise Exception() except: raise InvalidOperationError() method_names = { 'append': {}, 'pop': {}, 'insert': {}, 'find': {}, 'replace': {}, 'keys': {} } def _get_attribute(obj, name): is_list = isinstance(obj, list) if is_list: if name == 'append' or name in method_names['append']: def append(args, env): elem = env.evaluate(args[0]) obj.append(elem) builtin_append = BuiltinFuncObj(name, append) return builtin_append elif name == 'pop' or name in method_names['pop']: def pop(args, env): length = len(args) try: if length == 0: value = obj.pop() else: index = env.evaluate(args[0]) value = obj.pop(index) return value except: raise CannotPopFromListError() builtin_pop = BuiltinFuncObj(name, pop) return builtin_pop elif name == 'insert' or name in method_names['insert']: def insert(args, env): index = env.evaluate(args[0]) elem = env.evaluate(args[1]) obj.insert(index, elem) builtin_insert = BuiltinFuncObj(name, insert) return builtin_insert else: raise MethodNotFoundError(name) is_str = isinstance(obj, str) if is_str: if name == 'find' or name in method_names['find']: def find(args, env): substr = env.evaluate(args[0]) result = obj.find(substr) return result builtin_find = BuiltinFuncObj(name, find) return builtin_find elif name == 'replace' or name in method_names['replace']: def replace(args, env): from_str = env.evaluate(args[0]) to_str = env.evaluate(args[1]) new_str = obj.replace(from_str, to_str) return new_str builtin_replace = BuiltinFuncObj(name, replace) return builtin_replace else: raise MethodNotFoundError(name) is_dict = isinstance(obj, dict) if is_dict: if name == 'keys' or name in method_names['keys']: def keys(args, env): keys_list = obj.keys() return keys_list builtin_keys = BuiltinFuncObj(name, keys) return builtin_keys else: raise MethodNotFoundError(name) # ClassObj or Instance object attr = obj.get_attr(name) return attr # Built-in function's name BUILTIN_FUNC_NAME_PRINT = 'print' BUILTIN_FUNC_NAME_DICT = 'dict' BUILTIN_FUNC_NAME_INT = 'int' BUILTIN_FUNC_NAME_LEN = 'len' BUILTIN_FUNC_NAME_LIST = 'list' BUILTIN_FUNC_NAME_STR = 'str' BUILTIN_FUNC_NAME_SUPER = 'super' BUILTIN_FUNC_NAME_HASATTR = 'hasattr' BUILTIN_FUNC_NAME_ISINSTANCE = 'isinstance' BUILTIN_FUNC_NAME_ISSUBCLASS = 'issubclass' class Environment: def __init__(self, codelist): self.code = codelist self.global_context = GlobalScope(None, {}) self.context = self.global_context self.address = Address(1, 0) # (indent, line) self.callstack = [] self.returned_value = None self.blocks = [] self.exception = None self.parser = Parser() self.builtin_print = print def _dict(args, env): return {} builtin_dict = BuiltinFuncObj(BUILTIN_FUNC_NAME_DICT, _dict) self.global_context.register(BUILTIN_FUNC_NAME_DICT, builtin_dict) def _hasattr(args, env): obj = env.evaluate(args[0]) attr_name = env.evaluate(args[1]) has_attr = hasattr(obj, attr_name) return has_attr builtin_hasattr = BuiltinFuncObj( BUILTIN_FUNC_NAME_HASATTR, _hasattr) self.global_context.register( BUILTIN_FUNC_NAME_HASATTR, builtin_hasattr) def _int(args, env): value = env.evaluate(args[0]) try: num = int(value) return num except: raise CannotParseInt() builtin_int = BuiltinFuncObj(BUILTIN_FUNC_NAME_INT, _int) self.global_context.register(BUILTIN_FUNC_NAME_INT, builtin_int) def _isinstance(args, env): instance = env.evaluate(args[0]) classobj = env.evaluate(args[1]) is_instance = isinstance(instance, Instance) is_classobj = isinstance(classobj, ClassObj) if is_classobj and is_instance: currentclass = instance.selfclass while True: if currentclass == None: return False elif currentclass is classobj: return True else: currentclass = currentclass.superclass else: is_builtin = isinstance(classobj, BuiltinFuncObj) if is_builtin: builtin_funcobj = classobj if builtin_funcobj.name == BUILTIN_FUNC_NAME_LIST: is_list = isinstance(instance, list) return is_list elif builtin_funcobj.name == BUILTIN_FUNC_NAME_STR: is_str = isinstance(instance, str) return is_str elif builtin_funcobj.name == BUILTIN_FUNC_NAME_DICT: is_dict = isinstance(instance, dict) return is_dict else: return False else: return False builtin_isinstance = BuiltinFuncObj( BUILTIN_FUNC_NAME_ISINSTANCE, _isinstance) self.global_context.register( BUILTIN_FUNC_NAME_ISINSTANCE, builtin_isinstance) def _issubclass(args, env): classtype = env.evaluate(args[0]) superclass = env.evaluate(args[1]) is_subclass = issubclass(classtype, superclass) return is_subclass builtin_issubclass = BuiltinFuncObj( BUILTIN_FUNC_NAME_ISSUBCLASS, _issubclass) self.global_context.register( BUILTIN_FUNC_NAME_ISSUBCLASS, builtin_issubclass) def _len(args, env): value = env.evaluate(args[0]) try: length = len(value) return length except: raise NotIterableError() builtin_len = BuiltinFuncObj(BUILTIN_FUNC_NAME_LEN, _len) self.global_context.register(BUILTIN_FUNC_NAME_LEN, builtin_len) def _list(args, env): iterable = env.evaluate(args[0]) try: new_list = list(iterable) return new_list except: raise NotIterableError() builtin_list = BuiltinFuncObj(BUILTIN_FUNC_NAME_LIST, _list) self.global_context.register(BUILTIN_FUNC_NAME_LIST, builtin_list) def _print(args, env): description = '' length = len(args) count = 0 for arg in args: value = env.evaluate(arg) desc = _get_description(value) description += desc if count < length - 1: description += ' ' self.builtin_print(description) return None builtin_print = BuiltinFuncObj(BUILTIN_FUNC_NAME_PRINT, _print) self.global_context.register(BUILTIN_FUNC_NAME_PRINT, builtin_print) def _str(args, env): value = env.evaluate(args[0]) new_str = str(value) return new_str builtin_str = BuiltinFuncObj(BUILTIN_FUNC_NAME_STR, _str) self.global_context.register(BUILTIN_FUNC_NAME_STR, builtin_str) def _super(args, env): try: classtype = env.evaluate(args[0]) obj = env.evaluate(args[1]) is_classtype = isinstance(classtype, ClassObj) is_instance = isinstance(obj, Instance) if is_classtype and is_instance: s = Super(classtype, obj) return s else: # Caught by except below raise Exception() except: raise InvalidArgumentsForSuperError() builtin_super = BuiltinFuncObj(BUILTIN_FUNC_NAME_SUPER, _super) self.global_context.register(BUILTIN_FUNC_NAME_SUPER, builtin_super) def begin_block(self, block): self.address = _copy_address(block.address) should_begin = block.begin(self) if should_begin: self.shift_indent(1) self.blocks.append(block) def end_block(self): block = self.pop_block() block.end(self) if block.kind == BLOCK_KIND_IFS \ or block.kind == BLOCK_KIND_CLASS_DEF \ or block.kind == BLOCK_KIND_TRY \ or block.kind == BLOCK_KIND_EXCEPT: return False elif block.kind == BLOCK_KIND_IF_ELIF_ELSE \ or block.kind == BLOCK_KIND_FUNC_CALL: return True else: self.begin_block(block) return True def evaluate(self, expr): is_variable = isinstance(expr, Variable) is_attribute = isinstance(expr, Attribute) is_subscript = isinstance(expr, Subscript) if is_variable or is_attribute or is_subscript: evaluated_value = expr.evaluate(self) return evaluated_value is_binop = isinstance(expr, BinaryOperation) is_unaryop = isinstance(expr, UnaryOperation) if is_binop or is_unaryop: evaluated_value = expr.operate(self) return evaluated_value is_list = isinstance(expr, list) if is_list: evaluated_list = [] for elem in expr: value = self.evaluate(elem) evaluated_list.append(value) return evaluated_list is_accessible = isinstance(expr, Accessible) if is_accessible: return expr is_dict = isinstance(expr, dict) if is_dict: evaluated_dict = {} for key in expr: evaluated_dict[key] = self.evaluate(expr[key]) return evaluated_dict return expr def jump_to(self, address): self.address = _copy_address(address) def lookup(self, name): current_scope = self.context while True: try: value = current_scope.lookup(name) return value except: current_scope = current_scope.nesting_scope if current_scope == None: raise NameNotFoundError(name) else: continue def pop_block(self): b = self.blocks.pop() return b def pop_stack(self): previous_context = self.callstack.pop() self.context = previous_context def push_stack(self, new_context): self.callstack.append(self.context) self.context = new_context def register(self, name, obj): self.context.register(name, obj) def retrieve_nesting_scope(self): current_scope = self.context while True: is_classscope = isinstance(current_scope, ClassScope) if is_classscope: current_scope = current_scope.nesting_scope continue else: return current_scope def shift_indent(self, delta): self.address.indent += delta def skip_to_next_line(self): next_line_index = self.address.line + 1 while True: next_line = self.code[next_line_index] next_indent = next_line[INDEX_INDENT] delta = next_indent - self.address.indent if delta <= 0: for _ in range(0, delta, -1): is_address_jumped = self.end_block() if is_address_jumped: self.skip_to_next_line() return break next_line_index += 1 self.address.line = next_line_index def step_line(self, delta): self.address.line += delta def switch_context(self, next_context): self.context = next_context def _get_description(value): desc = str(value) return desc # Commands class Assignment: def __init__(self, lhs, rhs): self.lhs = lhs self.rhs = rhs def execute(self, env): value = env.evaluate(self.rhs) self.lhs.assign(value, env) class Ifs: def execute(self, env): def begin(env): return True def end(env): env.shift_indent(-1) block = Block(BLOCK_KIND_IFS, env.address, begin, end) env.begin_block(block) def _execute_conditional_block(env): def begin(env): return True def end(env): env.shift_indent(-2) env.pop_block() block = Block( BLOCK_KIND_IF_ELIF_ELSE, env.address, begin, end) env.begin_block(block) class If: def __init__(self, condition): self.condition = condition def execute(self, env): is_satisfied = env.evaluate(self.condition) if is_satisfied: _execute_conditional_block(env) class Elif: def __init__(self, condition): self.condition = condition def execute(self, env): is_satisfied = env.evaluate(self.condition) if is_satisfied: _execute_conditional_block(env) class Else: def execute(self, env): _execute_conditional_block(env) class LoopCounter: def __init__(self, start, stop, step): self.start = start self.stop = stop self.step = step self.now = None def next(self): if self.step > 0 and self.start >= self.stop \ or self.step < 0 and self.start <= self.stop: return None elif self.now == None: self.now = self.start return self.start else: self.now += self.step if self.step > 0: if self.now >= self.stop: return None else: return self.now elif self.step < 0: if self.now <= self.stop: return None else: return self.now else: return None class ForRange: def __init__(self, varname, start, stop, step): self.varname = varname self.start = start self.stop = stop self.step = step def execute(self, env): stop_value = env.evaluate(self.stop) if self.start == None and self.step == None: # eg. for i in range(n): loopcounter = LoopCounter(0, stop_value, 1) elif self.start != None and self.step == None: # eg. for i in range(m, n): start_value = env.evaluate(self.start) loopcounter = LoopCounter(start_value, stop_value, 1) else: # eg. for i in range(a, b, c): start_value = env.evaluate(self.start) step_value = env.evaluate(self.step) loopcounter = LoopCounter(start_value, stop_value, step_value) def begin(env): next_value = loopcounter.next() if next_value != None: env.register(self.varname, next_value) return True else: return False def end(env): pass block = Block( BLOCK_KIND_FOR_RANGE, env.address, begin, end) env.begin_block(block) class ForEach: def __init__(self, elemname, iterable): self.elemname = elemname self.iterable = iterable def execute(self, env): iterableobj = env.evaluate(self.iterable) is_list = isinstance(iterableobj, list) is_str = isinstance(iterableobj, str) is_dict = isinstance(iterableobj, dict) if not (is_list or is_str or is_dict): raise NotIterableError() if not is_dict: length = len(iterableobj) loopcounter = LoopCounter(0, length, 1) def begin(env): next_index = loopcounter.next() if next_index != None: env.register(self.elemname, iterableobj[next_index]) return True else: return False else: keys = iterableobj.keys() length = len(keys) loopcounter = LoopCounter(0, length, 1) def begin(env): next_index = loopcounter.next() if next_index != None: env.register(self.elemname, iterableobj[keys[next_index]]) return True else: return False def end(env): pass block = Block( BLOCK_KIND_FOR_EACH, env.address, begin, end) env.begin_block(block) class While: def __init__(self, condition): self.condition = condition def execute(self, env): def begin(env): condition_value = env.evaluate(self.condition) if condition_value: return True else: return False def end(env): pass block = Block(BLOCK_KIND_WHILE, env.address, begin, end) env.begin_block(block) class Break: def execute(self, env): while True: block = env.pop_block() if block.kind == BLOCK_KIND_IFS \ or block.kind == BLOCK_KIND_IF_ELIF_ELSE \ or block.kind == BLOCK_KIND_TRY \ or block.kind == BLOCK_KIND_EXCEPT: env.shift_indent(-1) continue elif block.kind == BLOCK_KIND_WHILE \ or block.kind == BLOCK_KIND_FOR_RANGE \ or block.kind == BLOCK_KIND_FOR_EACH: env.shift_indent(-1) break else: raise InvalidBreakError() class Continue: def execute(self, env): while True: block = env.pop_block() if block.kind == BLOCK_KIND_IFS \ or block.kind == BLOCK_KIND_IF_ELIF_ELSE \ or block.kind == BLOCK_KIND_TRY \ or block.kind == BLOCK_KIND_EXCEPT: env.shift_indent(-1) continue elif block.kind == BLOCK_KIND_WHILE \ or block.kind == BLOCK_KIND_FOR_RANGE \ or block.kind == BLOCK_KIND_FOR_EACH: env.begin_block(block) break else: raise InvalidContinueError() class FuncDef: def __init__(self, name, params): self.name = name self.params = params def execute(self, env): defined_address = env.address nesting_scope = env.retrieve_nesting_scope() funcobj = FuncObj(self.name, self.params, nesting_scope, defined_address) env.register(self.name, funcobj) def _invoke(calledobj, args, lhs, env): funcobj = calledobj def get_returned_value(env): return env.returned_value is_methodobj = isinstance(calledobj, MethodObj) if is_methodobj: funcobj = calledobj.funcobj if funcobj.name == '__init__': def _get_returned_value(env): return calledobj.instance get_returned_value = _get_returned_value args.insert(0, calledobj.instance) # insert self caller_address = _copy_address(env.address) args_dict = {} length = len(args) for i in range(length): paramname = funcobj.params[i] argvalue = env.evaluate(args[i]) args_dict[paramname] = argvalue new_context = FuncScope(funcobj.nesting_scope, args_dict) env.push_stack(new_context) def begin(env): return True def end(env): env.pop_stack() if lhs != None: returned_value = get_returned_value(env) lhs.assign(returned_value, env) env.returned_value = None env.jump_to(caller_address) block = Block(BLOCK_KIND_FUNC_CALL, funcobj.address, begin, end) env.begin_block(block) class Call: def __init__(self, lhs, funcref, args): self.lhs = lhs self.funcref = funcref self.args = args def execute(self, env): calledobj = env.evaluate(self.funcref) is_funcobj = isinstance(calledobj, FuncObj) is_methodobj = isinstance(calledobj, MethodObj) is_builtin = isinstance(calledobj, BuiltinFuncObj) is_classtype = isinstance(calledobj, ClassObj) if is_funcobj or is_methodobj: _invoke(calledobj, self.args, self.lhs, env) elif is_builtin: returned_value = calledobj.body(self.args, env) if self.lhs != None: self.lhs.assign(returned_value, env) elif is_classtype: instance = Instance(calledobj) try: init = instance.get_attr('__init__') _invoke(init, self.args, self.lhs, env) except: self.lhs.assign(instance, env) return # Successful else: raise CannotInvokeFunctionError() class Return: def __init__(self, expression): self.expression = expression def execute(self, env): env.returned_value = env.evaluate(self.expression) while True: try: block = env.pop_block() if block.kind == BLOCK_KIND_FUNC_CALL: block.end(env) return elif block.kind == BLOCK_KIND_IFS \ or block.kind == BLOCK_KIND_IF_ELIF_ELSE \ or block.kind == BLOCK_KIND_TRY \ or block.kind == BLOCK_KIND_EXCEPT \ or block.kind == BLOCK_KIND_WHILE \ or block.kind == BLOCK_KIND_FOR_RANGE \ or block.kind == BLOCK_KIND_FOR_EACH: continue else: raise Exception() except: raise InvalidReturnError() class ClassDef: def __init__(self, classname, superclassname): self.classname = classname self.superclassname = superclassname def execute(self, env): if self.superclassname == None \ or self.superclassname == builtin_type.object.name: superclass = builtin_type.object else: superclass = env.lookup(self.superclassname) is_classtype = isinstance(superclass, ClassObj) if not is_classtype: raise InvalidSuperclassError() def begin(env): nesting_scope = env.retrieve_nesting_scope() new_context = ClassScope(nesting_scope, {}) env.switch_context(new_context) return True current_context = env.context def end(env): attributes = env.context.dictobj env.switch_context(current_context) classtype = ClassObj( self.classname, superclass, attributes) env.register(self.classname, classtype) env.shift_indent(-1) block = Block( BLOCK_KIND_CLASS_DEF, env.address, begin, end) env.begin_block(block) class Comment: def __init__(self, options): self.options = options def execute(self, env): pass # Do nothing class Pass: def execute(self, env): pass # Do nothing class EndOfCode: def execute(self, env): pass # Do nothing class Parser: def __init__(self): t = {} def _assignment(line): lhs = self.parse_ref(line[INDEX_ASSIGNMENT_LHS]) rhs = self.parse_expr(line[INDEX_ASSIGNMENT_RHS]) cmd = Assignment(lhs, rhs) return cmd t[KEYWORD_ASSIGNMENT] = _assignment def _make_compound_assignment(keyword, line): lhs = self.parse_ref(line[INDEX_ASSIGNMENT_LHS]) rhs = self.parse_expr(line[INDEX_ASSIGNMENT_RHS]) op = BinaryOperation(keyword, lhs, rhs) cmd = Assignment(lhs, op) return cmd def _compound_addition(line): cmd = _make_compound_assignment(KEYWORD_ADDITION, line) return cmd t[KEYWORD_COMPOUND_ADDITION] = _compound_addition def _compound_subtraction(line): cmd = _make_compound_assignment(KEYWORD_SUBTRACTION, line) return cmd t[KEYWORD_COMPOUND_SUBTRACTION] = _compound_subtraction def _compound_multiplication(line): cmd = _make_compound_assignment(KEYWORD_MULTIPLICATION, line) return cmd t[KEYWORD_MULTIPLICATION] = _compound_multiplication def _ifs(line): cmd = Ifs() return cmd t[KEYWORD_IFS] = _ifs def _if(line): condition = self.parse_expr(line[INDEX_CONDITION]) cmd = If(condition) return cmd t[KEYWORD_IF] = _if def _elif(line): condition = self.parse_expr(line[INDEX_CONDITION]) cmd = Elif(condition) return cmd t[KEYWORD_ELIF] = _elif def _else(line): cmd = Else() return cmd t[KEYWORD_ELSE] = _else def _while(line): condition = self.parse_expr(line[INDEX_CONDITION]) cmd = While(condition) return cmd t[KEYWORD_WHILE] = _while def _for_range(line): varname = line[INDEX_FOR_RANGE_VARIABLE_NAME] values = line[INDEX_FOR_RANGE_VALUES] length = len(values) if length == 1: stop = self.parse_expr(values[0]) cmd = ForRange(varname, None, stop, None) return cmd elif length >= 2: start = self.parse_expr(values[0]) stop = self.parse_expr(values[1]) if length == 2: cmd = ForRange(varname, start, stop, None) return cmd else: step = self.parse_expr(values[2]) cmd = ForRange(varname, start, stop, step) return cmd t[KEYWORD_FOR_RANGE] = _for_range def _for_each(line): elemname = line[INDEX_FOR_EACH_ELEMENT_NAME] iterable = self.parse_expr(line[INDEX_FOR_EACH_ITERABLE_NAME]) cmd = ForEach(elemname, iterable) return cmd t[KEYWORD_FOR_EACH] = _for_each def _break(line): cmd = Break() return cmd t[KEYWORD_BREAK] = _break def _continue(line): cmd = Continue() return cmd t[KEYWORD_CONTINUE] = _continue def _func_def(line): funcname = line[INDEX_FUNC_DEF_FUNC_NAME] params = line[INDEX_FUNC_DEF_PARAMETERS] cmd = FuncDef(funcname, params) return cmd t[KEYWORD_FUNC_DEF] = _func_def def _call(line): lhs = line[INDEX_CALL_LHS] if lhs != None: lhs = self.parse_ref(lhs) calledref = self.parse_ref(line[INDEX_CALL_REFERENCE]) args = self.parse_args(line, INDEX_CALL_ARGS) cmd = Call(lhs, calledref, args) return cmd t[KEYWORD_CALL] = _call def _return(line): length = len(line) if length - 1 < INDEX_RETURN_VALUE: cmd = Return(None) return cmd else: expr = self.parse_expr(line[INDEX_RETURN_VALUE]) cmd = Return(expr) return cmd t[KEYWORD_RETURN] = _return def _class_def(line): classname = line[INDEX_CLASS_DEF_CLASS_NAME] superclassname = line[INDEX_CLASS_DEF_SUPERCLASS_NAME] cmd = ClassDef(classname, superclassname) return cmd t[KEYWORD_CLASS_DEF] = _class_def def _comment(line): options = line[INDEX_OPTIONS] cmd = Comment(options) return cmd t[KEYWORD_COMMENT] = _comment def _pass(line): cmd = Pass() return cmd t[KEYWORD_PASS] = _pass def _end_of_code(line): cmd = EndOfCode() return cmd t[KEYWORD_END_OF_CODE] = _end_of_code self.table = t def parse(self, line): keyword = line[INDEX_KEYWORD] parserfunc = self.table[keyword] command = parserfunc(line) return command def parse_args(self, listobj, index): args_list = listobj[index] parsed_args = [] for elem in args_list: arg = self.parse_expr(elem) parsed_args.append(arg) return parsed_args def parse_expr(self, obj): is_list = isinstance(obj, list) if is_list: is_nested_list = isinstance(obj[0], list) if is_nested_list: parsed_list = [] for elem in obj[0]: value = self.parse_expr(elem) parsed_list.append(value) return parsed_list keyword = obj[INDEX_EXPRESSION_KEYWORD] if keyword == KEYWORD_VARIABLE \ or keyword == KEYWORD_ATTRIBUTE \ or keyword == KEYWORD_SUBSCRIPT: ref = self.parse_ref(obj) return ref elif keyword == KEYWORD_NOT \ or keyword == KEYWORD_NEGATIVE \ or keyword == KEYWORD_BIT_NOT: operand = self.parse_expr(obj[INDEX_UNARY_OPERAND]) unary_op = UnaryOperation(keyword, operand) return unary_op else: left = self.parse_expr(obj[INDEX_LEFT_OPERAND]) right = self.parse_expr(obj[INDEX_RIGHT_OPERAND]) bin_op = BinaryOperation(keyword, left, right) return bin_op is_dict = isinstance(obj, dict) if is_dict: parsed_dict = {} for key in obj: parsed_dict[key] = self.parse_expr(obj[key]) return parsed_dict else: return obj def parse_ref(self, listobj): keyword = listobj[INDEX_EXPRESSION_KEYWORD] if keyword == KEYWORD_VARIABLE: name = listobj[INDEX_VARIABLE_NAME] var = Variable(name) return var elif keyword == KEYWORD_ATTRIBUTE: objname = listobj[INDEX_ATTRIBUTE_OBJECT_NAME] propertynames = [] length = len(listobj) for i in range(INDEX_ATTRIBUTE_PROPERTY_NAMES, length): propertynames.append(listobj[i]) attr = Attribute(objname, propertynames) return attr elif keyword == KEYWORD_SUBSCRIPT: objref = self.parse_ref( listobj[INDEX_SUBSCRIPT_REFERENCED_OBJECT]) indexexpr = self.parse_expr( listobj[INDEX_SUBSCRIPT_INDEX_EXPR]) sub = Subscript(objref, indexexpr) return sub class AttributeNotExistError(Exception): def __init__(self, attrname): s = super(AttributeNotExistError, self) s.__init__() self.attrname = attrname class CannotInvokeFunctionError(Exception): pass class CannotParseInt(Exception): pass class CannotPopFromListError(Exception): pass class InconsistentBlockError(Exception): pass class InvalidArgumentsForSuperError(Exception): pass class InvalidBreakError(Exception): pass class InvalidContinueError(Exception): pass class InvalidEndOfCodeError(Exception): pass class InvalidExceptionError(Exception): pass class InvalidOperationError(Exception): pass class InvalidReturnError(Exception): pass class InvalidSuperclassError(Exception): pass class MethodNotFoundError(Exception): def __init__(self, name): s = super(MethodNotFoundError, self) s.__init__() self.name = name class NameNotFoundError(Exception): def __init__(self, name): s = super(NameNotFoundError, self) s.__init__() self.name = name class NotIterableError(Exception): pass class SubscriptNotAllowedError(Exception): pass class SuperCallFailedError(Exception): pass class UnhandledExceptionError(Exception): pass class ValueNotFoundError(Exception): pass ```
{ "source": "0xcaffebabe/distributed-message-push-system", "score": 3 }
#### File: push-clients/push-client-python/bio_thread_io_manager.py ```python import socket from bio_client import * from socket_channel import * import time import threading class BioThreadIoManager: def __init__(self, s: socket, client): self.client = client self.socketChannel = SocketChannel(s) self.running = False def send(self, msg): self.socketChannel.writeAndFlush(msg) def close(self): self.running = False self.socketChannel.close() def startThread(self): self.running = True self.heartbeatThread = threading.Thread(target=self.__heartbeat__) self.ioloopThread = threading.Thread(target=self.__ioloop__) self.heartbeatThread.start() self.ioloopThread.start() def __heartbeat__(self): while self.running: try: self.send('heartbeat-' + self.client.userId) except: print('发送心跳失败') time.sleep(10) print ('heartbeat stop!') self.close() def __ioloop__(self): while self.running: try: str = self.socketChannel.readLine() self.client.onMessage(str) except Exception as e: print('连接发生异常 5s 后重新连接') time.sleep(5) self.client.reconnect() break print ("ioloop thread stop!") self.close() ``` #### File: push-clients/push-client-python/client.py ```python class Client: def __init__(self, messageHandler): self.messageHandler = messageHandler def onMessage(self, msg): if self.messageHandler is not None: self.messageHandler.handle(msg) ``` #### File: push-clients/push-client-python/connector.py ```python from http_template import * import requests class Connector: def __init__(self, lookup_address): self.lookup_address = lookup_address self.host = '' self.port = 0 def lookupConnector(self): response = requests.get(self.lookup_address).text splitResult = response.split(':') if len(splitResult) != 2: raise IOError('获取 connector 失败') self.host = splitResult[0] self.port = int(splitResult[1]) def isAvailable(self): return self.host != '' and self.port > 0 ``` #### File: push-client-python/test/connector_test.py ```python from connector import * import unittest import mock class ConnectorTest(unittest.TestCase): def test_is_available_excepect_false(self): connector = Connector() self.assertEqual(False, connector.is_available()) ``` #### File: push-client-python/test/http_template_test.py ```python import sys sys.path.append("..") from http_template import * import unittest import mock class HttpTemplateTest(unittest.TestCase): @mock.patch('http_template.requests') def test_get(self, requests): requests.get.return_value = self self.text = 'test html' t = HttpTemplate() result = t.get('http://baidu.com') requests.get.assert_called_with('http://baidu.com') self.assertEqual('test html', result) ... ```
{ "source": "0xcaff/nos-stock", "score": 3 }
#### File: nos-stock/infer/infer.py ```python import time from sys import argv import cv2 import tensorflow as tf MODEL_PATH = argv[1] LABELS_PATH = argv[2] VIDEO_PATH = argv[3] LABEL_LINES = [line.rstrip() for line in tf.gfile.GFile(LABELS_PATH)] def main(_): # Import Model with tf.gfile.FastGFile(MODEL_PATH, 'rb') as model_file: graph_def = tf.GraphDef() graph_def.ParseFromString(model_file.read()) _ = tf.import_graph_def(graph_def, name='') # Initialize Session sess = tf.Session() # Setup Network output_tensor = sess.graph.get_tensor_by_name('final_result:0') input_tensor = sess.graph.get_tensor_by_name("Cast:0") # Initialize Video Processing capture = cv2.VideoCapture(VIDEO_PATH) frame_number = 0 while capture.isOpened(): start_time = time.perf_counter() # Read Frame ret, frame = capture.read() frame_number = frame_number + 1 if not ret: print("Failed to read file.") break print("Frame Number: {}".format(frame_number)) print(frame) # Infer Frame predictions = sess.run(output_tensor, {input_tensor: frame}) # Print Prediction prediction = predictions[0] for idx, label in enumerate(LABEL_LINES): print("{}: {:.2f}".format(label, prediction[idx])) end_time = time.perf_counter() print("Took {:.2f} seconds".format(end_time - start_time)) print("") if frame_number == 0: print("Failed to read file.") capture.release() sess.close() if __name__ == "__main__": tf.app.run(main=main, argv=[argv[0]] + argv[2:]) ```
{ "source": "0xcan/myers-briggs-tweet-classification", "score": 3 }
#### File: 0xcan/myers-briggs-tweet-classification/test_utils.py ```python import pickle from nltk.corpus import stopwords from nltk.stem import WordNetLemmatizer import contractions import re from nltk.tokenize import word_tokenize import pandas as pd import snscrape.modules.twitter as sntwitter def get_tweets(username): tweets = [] for i, tweet in enumerate(sntwitter.TwitterSearchScraper(f'from:{username}').get_items()): tweets.append(tweet.content) if i == 50: break return tweets def load_files(): try: with open("saved-models/RandomForest_E-I.sav", "rb") as file: ei_classifier = pickle.load(file) with open("saved-models/RandomForest_N-S.sav", "rb") as file: ns_classifier = pickle.load(file) with open("saved-models/SVM_F-T.sav", "rb") as file: ft_classifier = pickle.load(file) with open("saved-models/Xgboost_J-P.sav", "rb") as file: jp_classifier = pickle.load(file) except FileNotFoundError: print("Model not found!") try: with open("vectorizer/vectorizer.pkl", "rb") as file: vectorizer = pickle.load(file) except FileNotFoundError: print("Tokenizer not found!") return ei_classifier, ns_classifier, ft_classifier, jp_classifier, vectorizer def preprocessing(text): stopword_list = stopwords.words("english") lemmatizer = WordNetLemmatizer() text = contractions.fix(text) text = text.lower() text = re.sub(r'@([a-zA-Z0-9_]{1,50})', '', text) text = re.sub(r'#([a-zA-Z0-9_]{1,50})', '', text) text = re.sub(r'http[s]?://\S+', '', text) text = re.sub(r'[^A-Za-z0-9]+', ' ', text) text = re.sub(r' +', ' ', text) text = " ".join([word for word in text.split() if not len(word) <3]) text = word_tokenize(text) text = [word for word in text if not word in stopword_list] text = [lemmatizer.lemmatize(word) for word in text] text = " ".join(text) return text def get_prediction(username): ei_classifier, ns_classifier, ft_classifier, jp_classifier, vectorizer = load_files() tweets = get_tweets(username) text = " ".join(tweets) text = preprocessing(text) text = vectorizer.transform([text]) prediction = "" e_or_i = "E" if ei_classifier.predict(text)[0] == 1 else "I" n_or_s = "N" if ns_classifier.predict(text)[0] == 1 else "S" f_or_t = "F" if ft_classifier.predict(text)[0] == 1 else "T" j_or_p = "J" if jp_classifier.predict(text)[0] == 1 else "P" prediction = e_or_i + n_or_s + f_or_t + j_or_p return prediction, tweets ```
{ "source": "0xcf/decal-util", "score": 2 }
#### File: 0xcf/decal-util/fabfile.py ```python import configparser import crypt import os import random import string from textwrap import dedent from fabric.api import env from fabric.api import execute from fabric.api import parallel from fabric.api import run from fabric.api import settings from fabric.api import task from ocflib.infra.db import get_connection from ocflib.misc.mail import send_mail env.use_ssh_config = True MYSQL_DEFAULT_CONFIG = 'mysql.conf' PW_LENGTH = 16 CURRENT_SEMESTER_FKID = '4' def _db(): conf = configparser.ConfigParser() if os.path.exists('mysql.conf'): conf.read('mysql.conf') else: conf.read(MYSQL_DEFAULT_CONFIG) return get_connection( user=conf.get('mysql', 'user'), password=conf.get('mysql', 'password'), db=conf.get('mysql', 'db'), ) def _get_students(track): assert track in ('basic', 'advanced'), 'invalid track: %s' % track with _db() as c: c.execute('SELECT `username` FROM `students` WHERE `track` = %s AND `semester` = %s ORDER BY `username`', (track, CURRENT_SEMESTER_FKID,)) for i in c.fetchall(): yield i['username'] def _fqdnify(users): for user in users: yield '{}.decal.xcf.sh'.format(user) def restart(): return run('reboot now') @task def powercycle(group): hosts = _fqdnify(_get_students(group)) with settings(user='root'): execute(restart, hosts=hosts) @parallel def hostname(): return run('hostname') @task def list(group): hosts = _fqdnify(_get_students(group)) with settings(user='root', key_filename='~/.ssh/id_decal'): execute(hostname, hosts=hosts) def create_user(): username = env.host.split('.')[0] # Generate a random temporary password to be emailed out to each student rand = random.SystemRandom() password = ''.join(rand.choice(string.ascii_letters + string.digits) for _ in range(PW_LENGTH)) # Create a new user account in the sudo group so they have root access run('useradd -m -g sudo -s /bin/bash {}'.format(username)) # Set their password to the temporary password previously generated run("echo '{}:{}' | chpasswd -e".format(username, crypt.crypt(password))) # Set password expiration for the user so that they have to change their # password immediately upon login run('chage -d 0 {}'.format(username)) # make sure password authentication is on so students can log in # it appears it gets turned off now if you supply a root ssh key run("sed -i '/^PasswordAuthentication no/d' /etc/ssh/sshd_config && systemctl reload ssh") # Send an email out to the user with their new password message = dedent(""" Hello {username}, We have created a virtual machine for you for the Linux SysAdmin DeCal! You should be able to connect to it at {hostname} by running 'ssh {username}@{hostname}' and entering your temporary password {password}. You should see a prompt to change your temporary password to something more secure after your first login. Let us know if you have any questions or issues, DeCal Staff """).strip() send_mail( <EMAIL>'.<EMAIL>(<EMAIL>), '[Linux SysAdmin DeCal] Virtual Machine Information', message.format( username=username, hostname=env.host, password=password, ), cc='<EMAIL>', sender='<EMAIL>', ) @task def create_users(group): hosts = _fqdnify(_get_students(group)) with settings(user='root'): execute(create_user, hosts=hosts) def enable_ssh_password_auth(): # turn on password authentication again run("sed -i '/^PasswordAuthentication no/d' /etc/ssh/sshd_config") run('systemctl reload ssh') @task def fix_ssh_password_auth(group): hosts = _fqdnify(_get_students(group)) with settings(user='root'): execute(enable_ssh_password_auth, hosts=hosts) ```
{ "source": "0xcharly/qmk_cli", "score": 3 }
#### File: qmk_cli/subcommands/env.py ```python import os from pathlib import Path from milc import cli from qmk_cli.helpers import is_qmk_firmware @cli.argument('var', default=None, nargs='?', help='Optional variable to query') @cli.subcommand('Prints environment information.') def env(cli): home = os.environ.get('QMK_HOME', "") data = { 'QMK_HOME': home, 'QMK_FIRMWARE': home if is_qmk_firmware(Path(home)) else "" } # Now munge the current cli config for key, val in cli.config.general.items(): converted_key = 'QMK_' + key.upper() data[converted_key] = val if cli.args.var: # dump out requested arg print(data[cli.args.var]) else: # dump out everything for key, val in data.items(): print(f'{key}="{val}"') ```
{ "source": "0xCh-Devil/shodanfy", "score": 2 }
#### File: 0xCh-Devil/shodanfy/shodanfy.py ```python import requests from lxml import html import sys import re def getContentFromShodan(ip:str,proxy:str)->str: try: if "ipinfo" in proxy: return requests.get( '{}'.format(ip), headers = { 'User-Agent' : 'Mozilla/5.0' }, proxies = { 'http' : proxy, 'https' : proxy, } ) return requests.get( 'https://www.shodan.io/host/{}'.format(ip), headers = { 'User-Agent' : 'Mozilla/5.0' }, proxies = { 'http' : proxy, 'https' : proxy, } ) except Exception as e: sys.exit( print(e) ) def main(args:dict) -> None: ip = args.get('ip') v = {} d = [] proxy = args.get('proxy') socks5 = args.get('socks5') socks5 = "socks5://{}".format(socks5) if len(socks5) >= len(proxy): proxy = socks5 getports = False if args.get('getports') == False else True getinfo = False if args.get('getinfo') == False else True getvuln = False if args.get('getvulns') == False else True getmoreinfo = False if args.get('getmoreinfo') == False else True getbanner = False if args.get('getbanner') == False else True getall = True if any([getports,getinfo,getvuln,getmoreinfo,getbanner]): getall = False else: getports=getinfo=getvuln=getmoreinfo=getbanner=True # --- r = getContentFromShodan(ip,proxy=proxy) r2 = getContentFromShodan('http://ipinfo.io',proxy=proxy) ip_candidates = re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", r2.text) print("[*] YOUR IP: " + ip_candidates[0]) if r.status_code == 200: tree = html.fromstring(r.content) if tree.xpath('//ul[@class="ports"]/li/a/text()') != []: if getports: print('[+] Get ports.. ') for p in tree.xpath('//ul[@class="ports"]/li/a/text()'): print('\t'+p) if tree.xpath('//table[@class="table"]/tbody/tr/td/text()') != []: co = tree.xpath('//table[@class="table"]/tbody/tr/td/text()') ro = tree.xpath('//table[@class="table"]/tbody/tr/th/text()') di = dict(zip(co,ro)) if getinfo: print('[+] Get info...') for i in di.items(): if 'CVE-' in str(i[1]): v[i[1]] = i[0] else: if getinfo: print('\t'+i[0]+' -> '+i[1]) if v != {}: if getvuln: print('[+] Get vulns...') print('-'*40) c = ['CVE','Description'] cves = [] for i in v.items(): print('CVE: '+i[0]) print('Description:\n'+i[1]) print('-'*40) if tree.xpath('//ul[@class="services"]//div[@class="service-details"]'): title = [] banner = [] port = tree.xpath('//ul[@class="services"]//div[@class="service-details"]//div[@class="port"]/text()') proto = tree.xpath('//ul[@class="services"]//div[@class="service-details"]//div[@class="protocol"]/text()') state = tree.xpath('//ul[@class="services"]//div[@class="service-details"]//div[@class="state"]/text()') if tree.xpath('//ul[@class="services"]//div[@class="service-main"]'): title=tree.xpath('//ul[@class="services"]//div[@class="service-main"]/h3/text()') if tree.xpath('//ul[@class="services"]//div[@class="service-main"]/h3/small'): versions = tree.xpath('//ul[@class="services"]//div[@class="service-main"]/h3/small/text()') if tree.xpath('//ul[@class="services"]//div[@class="service-main"]/pre'): banner=tree.xpath('//ul[@class="services"]//div[@class="service-main"]/pre/text()') for i in range(len(port)): po = port[i] pr = proto[i] st = state[i] try: vv = versions[i] except: vv = "None" try: tt = title[i] except: tt = "None" bb = banner[i] d.append((po,pr,st,tt,vv,bb)) # proto if getbanner or getmoreinfo: if getbanner: print('[+] Get banner..') else: print('[+] Get moreinfo..') for i in d: if getmoreinfo: print('\nPort: %s/%s\t%s\t%s (%s) '%(i[0],i[1],i[2],i[3],i[4])) if getbanner: print('Banner:\n%s'%i[5]) else: print('Not information found for this ip..') args = { 'getports': False, 'getbanner': False, 'getvulns': False, 'getinfo': False, 'getmoreinfo': False, 'getall': True, 'ip': "", 'stdin':False, 'proxy': "", 'socks5': "" } if __name__ == "__main__": if len(sys.argv) == 1: print('Usage: python3 shodanfy.py <ip> [OPTIONS]\n') print('\t--stdin\t\tGet ips from stdin (required)') print('\t--proxy\t\tSet proxy (host:port)') print('\t--socks5\t\tSet proxy socks5 (host:port)') print('\t--getall\tGet all informations,vulns,.. (Default)') print('\t--getvuln\tGet vulnerabilities for this ip (CVEs)') print('\t--getinfo\tGet basic info (hostname,ports,country..)') print('\t--getmoreinfo\tGet more info (port,protocol,state,version..)') print('\t--getports\tGet all ip ports..') print('\nCoded by @m4ll0k (github.com/m4ll0k)\n') sys.exit(0) for arg in sys.argv: if arg == '--getvuln': args['getvulns'] = True if arg == '--getinfo': args['getinfo'] = True if arg == '--getbanner': args['getbanner'] = True if arg == '--getmoreinfo': args['getmoreinfo'] = True if arg == '--proxy': proxy = sys.argv[sys.argv.index('--proxy') + 1] if '--' in proxy: sys.exit(print( 'Please check your proxy, (host:port)' )) args.update({"proxy":str(proxy)}) if arg == '--socks5': socks5 = sys.argv[sys.argv.index('--socks5') + 1] if '--' in socks5: sys.exit(print( 'Please check your proxy socks5, (host:port)' )) args.update({"socks5":str(socks5)}) if arg == '--getports': args['getports'] = True if arg == '--stdin': args['stdin'] = True if re.search(r'\d+.\d+.\d+.\d+',arg): args['ip'] = re.search(r'\d+.\d+.\d+.\d+',arg).group(0) # ip checker for i in args['ip'].split('.'): if int(i) > 255: sys.exit(print('Wrong ip!')) if len(args['ip'].split('.')) != 4: sys.exit(print("Wrong ip!!!")) if args['ip'] == "" and args['stdin'] == False: sys.exit(print('Wrong ip or ip not specified!!')) else: if args['stdin']: for i in sys.stdin.readlines(): i = i.strip() if i != "": print('[+] IP: %s'%i) if re.search(r'[\d{3}\.]{4,15}',i): args['ip'] = re.search(r'[\d{3}\.]{4,15}',i).group(0) if args['ip'] == "": sys.exit(print('Wrong ip or ip not specified!!')) main(args) elif args['stdin'] is False: print("[+] IP: %s"%args['ip']) main(args) ```
{ "source": "0xChief/staketaxcsv", "score": 2 }
#### File: terra/col4/handle_mirror_borrow.py ```python from common.make_tx import ( make_borrow_tx, make_deposit_collateral_tx, make_liquidate_tx, make_repay_tx, make_swap_tx, make_withdraw_collateral_tx ) from terra import util_terra from terra.constants import CUR_UST def handle_deposit_borrow(exporter, elem, txinfo): # Query data = elem from_contract = data["logs"][0]["events_by_type"]["from_contract"] # Extract deposit collateral_amount_string = from_contract["collateral_amount"][0] deposit_amount, deposit_currency = util_terra._amount(collateral_amount_string) # Extract borrow mint_amount_string = from_contract["mint_amount"][0] borrow_amount, borrow_currency = util_terra._amount(mint_amount_string) row = make_deposit_collateral_tx(txinfo, deposit_amount, deposit_currency, z_index=0) exporter.ingest_row(row) row = make_borrow_tx(txinfo, borrow_amount, borrow_currency, empty_fee=True, z_index=1) exporter.ingest_row(row) try: if(from_contract["is_short"][0] == 'true'): short_amount_string = data["logs"][0]["events_by_type"]["from_contract"]["return_amount"][0] short_amount = util_terra._float_amount(short_amount_string, CUR_UST) row = make_swap_tx(txinfo, borrow_amount, borrow_currency, short_amount, CUR_UST) exporter.ingest_row(row) except Exception: pass def handle_repay_withdraw(exporter, elem, txinfo): # Query data = elem # Extract repay from_contract = data["logs"][0]["events_by_type"]["from_contract"] burn_amount_string = from_contract["burn_amount"][0] repay_amount, repay_currency = util_terra._amount(burn_amount_string) row = make_repay_tx(txinfo, repay_amount, repay_currency, z_index=0) exporter.ingest_row(row) if len(data["logs"]) > 1: # Extract withdraw from_contract = data["logs"][1]["events_by_type"]["from_contract"] withdraw_amount_string = from_contract["withdraw_amount"][0] withdraw_amount, withdraw_currency = util_terra._amount(withdraw_amount_string) row = make_withdraw_collateral_tx(txinfo, withdraw_amount, withdraw_currency, empty_fee=True, z_index=1) exporter.ingest_row(row) def handle_auction(exporter, elem, txinfo): # Query data = elem # Extract auction from_contract = data["logs"][0]["events_by_type"]["from_contract"] liquidated_amount_string = from_contract["liquidated_amount"][0] liquidated_amount, liquidated_currency = util_terra._amount(liquidated_amount_string) # Extract withdraw collateral_amount_string = from_contract["return_collateral_amount"][0] collateral_amount, collateral_currency = util_terra._amount(collateral_amount_string) row = make_liquidate_tx(txinfo, liquidated_amount, liquidated_currency, collateral_amount, collateral_currency) exporter.ingest_row(row) ```
{ "source": "0xChromic/CHROMIC", "score": 3 }
#### File: 0xChromic/CHROMIC/sql_queries.py ```python import mysql.connector as conn # geckos is a list of token public keys. None if it is not used. class Helper(object): def __init__(self, conn_obj, db_name): self.cursor = conn_obj.cursor() self.conn = conn_obj self.cursor.execute("USE {}".format(db_name)) def close(self): self.cursor.close() self.conn.close() def getTokenByQrId(self,qr_id): _query = "SELECT * FROM `prodtable` WHERE qr_id='{0}'".format(qr_id) self.cursor.execute(_query) print(self.cursor.rowcount) token_id, qr_id, ipfs_link, is_minted, owner_address, is_being_minted, tx_id = self.cursor.fetchall()[0] return (token_id, qr_id, ipfs_link, is_minted, owner_address, is_being_minted, tx_id) def mintTokenByQrID(self,qr_id,owner_address,tx_hash): _queryMINTED = "UPDATE `prodtable` SET `is_being_minted`='TRUE',`owner_address`='{0}',`tx_id`='{2}' WHERE `qr_id`='{1}'".format(owner_address, qr_id,tx_hash) # _queryOWNER = "UPDATE FROM `prodtable` SET `owner_address`='{0}' WHERE `qr_id`='{1}'".format(owner_address,qr_id) self.cursor.execute(_queryMINTED) # self.cursor.execute(_queryOWNER) self.conn.commit() def mintToken(self,qr_id): _queryMINTED = "UPDATE `prodtable` SET `is_minted`='TRUE' WHERE `qr_id`='{0}'".format( qr_id) # _queryOWNER = "UPDATE FROM `prodtable` SET `owner_address`='{0}' WHERE `qr_id`='{1}'".format(owner_address,qr_id) self.cursor.execute(_queryMINTED) # self.cursor.execute(_queryOWNER) self.conn.commit() def setTxHashByQrID(self,qr_id,tx_hash): _queryTx = "UPDATE `prodtable` SET `tx_id`='{0}' WHERE `qr_id`='{1}'".format(tx_hash, qr_id) self.cursor.execute(_queryTx) self.conn.commit() ```
{ "source": "0xcite/fingerping", "score": 3 }
#### File: 0xcite/fingerping/fingerping.py ```python import sys import os.path from xpng import Xpng from tests import Tests from fingerprints import Fingerprints class Fingerping: def __init__(self): self.all_tests = sorted(Tests.all_tests, key=lambda test: test.name) self.all_fingerprints = Fingerprints.all_fingerprints def do_tests(self, get_image_cont_func, warn): "Test all the images in a directory (don't print warnings when generating fingerprints)" results = {} fingerprintScores = {} # Initialite the count of matching tests to zero for each fingerprint for fingerprint in self.all_fingerprints: fingerprintScores[fingerprint.name] = 0 # Execute each test for test in self.all_tests: content = get_image_cont_func(test.filename) image = Xpng(content) if not image.valid == 0: # Only execute the test if there is an image to test result = test.function(image) else: result = 0 # Save the result of the test results[test.name] = result # Check if the result matches some of the fingeprints and if so, increment the match counter for fingerprint in self.all_fingerprints: if not test.name in fingerprint.results: # warn if a fingerprint is missing the result for the test being run if warn: print "warning, missing key", test.name, "in", fingerprint.name elif fingerprint.results[test.name] == result: fingerprintScores[fingerprint.name] += 1 return results, fingerprintScores def generate_csv(self): 'Generate a csv table with all the test results for each fingerprint (which you can then import in LibreOffice or whatever)' header = "/" for test in self.all_tests: header = header + "\t" + test.name print header for fingerprint in self.all_fingerprints: row = fingerprint.name for test in self.all_tests: if not test.name in fingerprint.results: row += "\t\"\"" else: row += "\t" + str(fingerprint.results[test.name]) print row def show_results(self, scores): 'Show the fingerprinting result with the most likely library match at the bottom' nb = len(self.all_tests) ordered = sorted(scores.iteritems(), key=lambda x: x[1]) for result in ordered: print '{:20s} {:3d}/{:3d}'.format(result[0], result[1], nb) if __name__ == "__main__": 'Means this script is directly executed with "python fingerping.py"' f = Fingerping() def read_image(file_name): 'reads the image in memory from file' file_name = directory + file_name + ".png" if os.path.exists(file_name): with open(file_name, 'rb') as f: try: return f.read() except: pass # TODO: replace with argparse def check_command_line(line): 'Check if the command line has valid options' if len(line) == 3: if not line[1] == "-gen": return False else: return True if len(line) == 2: if (line[1][0] == "-") and not (line[1] == "-csv"): return False return True return False if not check_command_line(sys.argv): print "usage:" print "" print "fingerping.py path # Matches the images in the path folder with the fingerprint of known PNG libraries" print "fingerping.py -gen path # Generates a new library fingerprint from the images in the path folder" print "fingerping.py -csv # prints all the known fingerprints as a CSV table" sys.exit(0) # Generate a csv output with all the test results for each library fingerprint known to the tool if sys.argv[1] == "-csv": f.generate_csv() sys.exit(0) # last command line argument is the directory with all the images to use in a fingerprint test directory = sys.argv[len(sys.argv) - 1] + "/" warn = sys.argv[1] != "-gen" results, fingerprintScores = f.do_tests(read_image, warn) # If the -gen parameter is given on the command line, don't give the fingerprinting results # but instead generate a new fingerprint if sys.argv[1] == "-gen": print results else: f.show_results(fingerprintScores) ```
{ "source": "0xCleetusEth/Web3.Py_Proto", "score": 2 }
#### File: Web3.Py_Proto/environment/getenv.py ```python from dotenv import load_dotenv import os load_dotenv() def publicKey(): farmAccountPublic = os.getenv('ACCOUNT_PUBLIC') return farmAccountPublic def privateKey(): farmAccountPrivate = os.getenv('ACCOUNT_PRIVATE') return farmAccountPrivate def ethereum_API(): API = os.getenv('ETH_API') return API def ftm_API(): API = os.getenv('FTM_API') return API def matic_API(): API = os.getenv('MATIC_API') return API ```
{ "source": "0xCN/workaholic", "score": 2 }
#### File: 0xCN/workaholic/main.py ```python from threading import Thread, Event from gevent.pywsgi import WSGIServer import os from server import app from data import db import atexit import shelve import datetime import httpagentparser import colorama from utils import ( exit_handler, watcher, clear, invalid, cursor, help_message, info, play_audio, ) colorama.init() colors = [ colorama.Fore.LIGHTBLUE_EX, colorama.Fore.GREEN, colorama.Fore.LIGHTCYAN_EX, ] threads = [] # on program end atexit.register(exit_handler) # on program start with shelve.open('saved/state') as state: if {"status", "datetime"} <= state.keys(): db.internal_data["status"] = state["status"] if state["status"] == "work": play_audio() db.internal_data["datetime"] = datetime.datetime.now() else: db.internal_data["datetime"] = state["datetime"] def main(): clear() while True: cursor() x = input() if x.lower() == 'exit': clear() exit_handler() os._exit(1) break options(x) def set_command(options=[]): if len(options) <= 1: print("\nError: there must be over 2 set args\n") elif options[0] == "command": db.data["command"] = options[1] def set_status(options=[]): current = db.internal_data["status"] statuses = db.config["breaks"].keys() if options[0] not in statuses or len(options) < 1: print("\nInvalid status, list of statuses:") for i in statuses: print( "{blue}-{green} {stat}".format(blue=colors[0], green=colors[1], stat=i) ) print(colors[2]) elif db.config["breaks"][options[0]]["limit"] < 1: print( "\n{blue}{stat}{normal} limit reached {green}0{normal}, you are not allowed \n".format( stat=options[0], blue=colors[0], green=colors[1], normal=colors[2] ) ) elif current == options[0]: print("your current status is already: " + current + "\n") else: db.internal_data["datetime"] = datetime.datetime.now() db.internal_data["status"] = options[0] db.config["breaks"][options[0]]["limit"] -= 1 print("\nstatus set to: " + colors[0] + options[0] + colors[2]) print( "new-limit: {green}{limit}{normal}, time: {green}{time}{normal} minutes\n".format( limit=db.config["breaks"][options[0]]["limit"], time=db.config["breaks"][options[0]]["minutes"], green=colors[1], normal=colors[2], ) ) def get_clients(options=[]): uas = db.internal_data["userAgents"] print( "\n{green}Info:{normal} you have {blue}{n}{normal} client connected\n".format( green=colors[1], normal=colors[2], blue=colors[0], n=len(uas) ) ) for i in range(0, len(uas)): info = httpagentparser.simple_detect(uas[i]) print( "{blue}{num}{normal}: [{green}{os}{normal}] {blue}{browser}{normal}".format( num=i + 1, os=info[0], browser=info[1], green=colors[1], normal=colors[2], blue=colors[0], ) ) print() def options(op): options = op.split() option = options[0] switcher = { "clear": clear, "set": set_status, "clients": get_clients, "help": help_message, "info": info, } try: switcher.get(option.lower(), invalid)(options[1:]) except Exception as e: invalid() print(e) if __name__ == "__main__": try: # running the main and watcher loops in different threads # so they don't interfere with our web server Thread(target=main, daemon=True).start() Thread(target=watcher, daemon=True).start() # server in production mode with gevent http_server = WSGIServer( (db.config["host"], db.config["port"]), app, log=app.logger ) http_server.serve_forever() except KeyboardInterrupt: # definitely not doing this to hide errors or anything clear() ``` #### File: 0xCN/workaholic/utils.py ```python import os import time from data import db from ctypes import Structure, windll, c_uint, sizeof, byref from playsound import playsound import random import datetime import shelve import colorama colorama.init() colors = [ colorama.Fore.LIGHTBLUE_EX, colorama.Fore.GREEN, colorama.Fore.LIGHTCYAN_EX, ] help_str = """ {blue}Help Message{normal} - {green}help{normal}: show this message - {green}clients{normal}: show connected browser extensions - {green}clear{normal}: clear the screen - {green}set{normal}: set status, syntax: $: set {blue}[status]{normal} - {green}info{normal}: show current states - {green}exit{normal}: exit the program # do {blue}info{normal} to see list of statuses # failure to get back on time and set mode to work will cause sounds to go off # you can configure time limits at {green}'chrome-extension/config.json'{normal} """.format( break_minutes=db.config["breaks"]["break"]["minutes"], break_limit=db.config["breaks"]["break"]["limit"], eating_minutes=db.config["breaks"]["eating"]["minutes"], eating_limit=db.config["breaks"]["eating"]["limit"], study_minutes=db.config["breaks"]["studying"]["minutes"], study_limit=db.config["breaks"]["studying"]["limit"], chores_minutes=db.config["breaks"]["chores"]["minutes"], chores_limit=db.config["breaks"]["chores"]["limit"], bathroom_minutes=db.config["breaks"]["bathroom"]["minutes"], bathroom_limit=db.config["breaks"]["bathroom"]["limit"], sleep_time=db.config["breaks"]["sleep"]["minutes"] / 60.0, blue=colors[0], green=colors[1], normal=colors[2] ) def help_message(options=[]): print(help_str) def exit_handler(): if db.internal_data["status"] == "work": print("[SUFFER] ENDED BEFORE GOING TO SLEEP") play_audio() print("- program ended, saving states", end="") with shelve.open("saved/state") as state: print(".", end="") state["status"] = db.internal_data["status"] print(".", end="") state["datetime"] = db.internal_data["datetime"] print(".") print("- states saved"+colors[2]) def cursor(): print(colorama.Fore.RED + "$" + colorama.Fore.LIGHTCYAN_EX + ": ", end="") def print_banner(): print(" " + colorama.Fore.LIGHTCYAN_EX + ",") print( "{coffee}c[_] {workaholic}--=Workaholic=-- {version}{v}\n{n}".format( coffee=colorama.Fore.LIGHTBLUE_EX, workaholic=colorama.Fore.LIGHTGREEN_EX, version=colorama.Fore.LIGHTBLUE_EX, v=db.config["version"], n=colorama.Fore.LIGHTCYAN_EX ) ) def clear(options=[]): if db.platform == "nt": os.system("cls") else: os.system("clear") print_banner() def invalid(options=[]): print("Error: invalid command\n") def info(options=[]): colors = [ colorama.Fore.LIGHTBLUE_EX, colorama.Fore.GREEN, colorama.Fore.LIGHTCYAN_EX, ] current = db.internal_data["status"] start_time = db.internal_data["datetime"].strftime("%m/%d/%Y, %H:%M:%S") clients = len(db.internal_data["userAgents"]) stats = db.config["breaks"] idle = db.config["idle_limit"] / 60.0 blocked = db.config["blocked_domains"] process = db.config["process"] blocked_str = "[" for i in range(len(blocked)): blocked_str += colors[1] + '"' + blocked[i] + '"' + colors[2] if i != len(blocked) - 1: blocked_str += ", " blocked_str += "]" server = ( colors[1] + "http://" + db.config["host"] + colors[1] + ":" + colors[0] + str(db.config["port"]) + colors[1] + "/" + colors[2] ) print( "\nstatus: {}, start-time: {}\nconnected-clients: {}, idle-time-limit: {} minutes".format( colors[0] + current + colors[2], colors[1] + start_time + colors[2], colors[1] + str(clients) + colors[2], colors[1] + str(idle) + colors[2], ) ) print("process-to-be-monitored: {}".format(colors[0] + process + colors[2])) print("blocked-domains: {}".format(blocked_str)) print("web-server: {}\n".format(server)) for i in stats: print( "{:<18} | {}{:<19}| {}{}".format( colors[0] + i + colors[2], "time: ", colors[1] + str(stats[i]["minutes"]) + colors[2], "limit: ", colors[1] + str(stats[i]["limit"]) + colors[2], ) ) print("\n{:>8}[time is in minutes] \n".format("")) def get_task(): name = db.config["process"] check = os.popen('tasklist /FI "IMAGENAME eq {}"'.format(name)).read().strip()[0:4] if check.lower() == "info": return True else: return False class LASTINPUTINFO(Structure): _fields_ = [ ("cbSize", c_uint), ("dwTime", c_uint), ] def get_idle_duration(): lastInputInfo = LASTINPUTINFO() lastInputInfo.cbSize = sizeof(lastInputInfo) windll.user32.GetLastInputInfo(byref(lastInputInfo)) millis = windll.kernel32.GetTickCount() - lastInputInfo.dwTime return millis / 1000.0 def play_audio(type="suffer"): file_name = random.choice(db.audio[type]) path = "audio/{type}/{file_name}".format(type=type, file_name=file_name) playsound(path) def watcher(): i = 0 n = 0 while True: status = db.internal_data["status"] if status == "work": idle = get_idle_duration() i += 1 if i % 5 == 0 and get_task(): play_audio("idle") i = 0 elif idle > db.config["idle_limit"]: play_audio("idle") play_audio("suffer") db.internal_data["status"] = "idle" i = 0 elif status == "idle": idle = get_idle_duration() # when user gets back if idle < 2: play_audio("idle") play_audio("suffer") db.internal_data["status"] = "work" elif status in db.config["breaks"].keys(): date = db.internal_data["datetime"] now = datetime.datetime.now() difference = now - date difference = difference.total_seconds() if difference > db.config["breaks"][status]["minutes"] * 60: if get_idle_duration() < 2: play_audio("time") # mental torture play_audio("suffer") db.internal_data["status"] = "work" time.sleep(5) elif n > 60: # emptying list of user-agents # this helps keep track of connected clients db.internal_data["userAgents"] = [] n = 0 n += 1 time.sleep(1) ```
{ "source": "0xCoeus/DiscordPepeEtcBot", "score": 2 }
#### File: 0xCoeus/DiscordPepeEtcBot/DiscordBot.py ```python import os import discord import random from dotenv.main import load_dotenv from discord.ext import commands def main(): bot = commands.Bot(command_prefix = '$') load_dotenv() @bot.event async def on_ready(): token = os.getenv("DISCORD_TOKEN_ID") print('We have logged in as {0.user}'.format(bot)) @bot.command() async def pepe(ctx): path = "C:/Users/nkman/Crypto_Twitter/pepes" files = os.listdir(path) d = random.choice(files) await ctx.channel.send(file=discord.File(f"C:/Users/nkman/Crypto_Twitter/pepes/{d}")) bot.run(os.getenv("DISCORD_TOKEN_ID")) if __name__ == '__main__': main() ```
{ "source": "0xCompyler/obsidian-server", "score": 2 }
#### File: src/scripts/file_handler.py ```python import os import ibm_boto3 from ibm_botocore.client import Config, ClientError import secrets BASE_DIR = os.getcwd() DUMP_DIR = os.path.join(BASE_DIR, 'DUMP') def create_bucket(bucket_name: str): if bucket_name == '': return {"error": "Bucket name cannot be empty"} try: cos = ibm_boto3.resource("s3", ibm_api_key_id=secrets.api_key, ibm_service_instance_id=secrets.instance_crn, config=Config(signature_version="oauth"), endpoint_url=secrets.endpoint ) except: return {"error": "connection error"} try: cos.Bucket(bucket_name).create( CreateBucketConfiguration={ "LocationConstraint":secrets.location } ) return {"status": "ok"} except Exception as e: return {"error": e} def list_files(bucket_name: str): cos = ibm_boto3.resource("s3", ibm_api_key_id=secrets.api_key, ibm_service_instance_id=secrets.instance_crn, config=Config(signature_version="oauth"), endpoint_url=secrets.endpoint ) files = cos.Bucket(bucket_name).objects.all() return files def upload_file(bucket_name: str, item_name: str, file_path: str): try: cos = ibm_boto3.resource("s3", ibm_api_key_id=secrets.api_key, ibm_service_instance_id=secrets.instance_crn, config=Config(signature_version="oauth"), endpoint_url=secrets.endpoint ) except Exception as e: return {"error": e} part_size = 1024 * 1024 * 5 file_threshold = 1024 * 1024 * 15 transfer_config = ibm_boto3.s3.transfer.TransferConfig( multipart_threshold=file_threshold, multipart_chunksize=part_size ) with open(file_path, "rb") as file_data: cos.Object(bucket_name, item_name).upload_fileobj( Fileobj=file_data, Config=transfer_config ) return {"url": f"https://{bucket_name}.{secrets.endpoint[8: ]}/{item_name}"} def download_files(bucket_name: str, item_name: str): cos = ibm_boto3.resource("s3", ibm_api_key_id=secrets.api_key, ibm_service_instance_id=secrets.instance_crn, config=Config(signature_version="oauth"), endpoint_url=secrets.endpoint ) try: file = cos.Object(bucket_name, item_name).get() content = file['Body'].read() with open(os.path.join(DUMP_DIR, item_name.split('/')[-1]), 'wb+') as f: f.write(content) return True except Exception as e: print(f"[ERROR] Downloading error {e}") return False ``` #### File: src/scripts/video.py ```python import os import json from ibm_cloud_sdk_core.authenticators import IAMAuthenticator from ibm_watson import SpeechToTextV1 from moviepy.editor import VideoFileClip from ibm_watson.websocket import RecognizeCallback #! Initialize STT Model try: with open('speechtotext.json', 'r') as credentialsFile: credentials1 = json.loads(credentialsFile.read()) STT_API_KEY_ID = credentials1.get('apikey') STT_URL = credentials1.get('url') except json.decoder.JSONDecodeError: print("Speech to text credentials file is empty, please enter the credentials and try again.") exit() STT_language_model = "Earnings call language model" STT_acoustic_model = "Earnings call acoustic model" authenticator = IAMAuthenticator(STT_API_KEY_ID) speech_to_text_client = SpeechToTextV1( authenticator=authenticator ) speech_to_text_client.set_service_url(STT_URL) def video_to_audio(video_path: str): video = VideoFileClip(video_path) audio = video.audio audio.write_audiofile("DUMP/temp.mp3") myFlag = {"flag": 1} return json.dumps(myFlag, indent=2) class MyRecognizeCallback(RecognizeCallback): def __init__(self): RecognizeCallback.__init__(self) def on_data(self, data): print(json.dumps(data, indent=2)) def on_error(self, error): print('Error received: {0}'.format(error)) def on_inactivity_timeout(self, error): print('Inactivity timeout: {0}'.format(error)) myRecognizeCallback = MyRecognizeCallback() def speech_to_text(): fileName = "DUMP/temp.mp3" filename_converted = fileName.replace( " ", "-").replace("'", "").lower() print("Processing ...\n") with open("DUMP/temp.mp3", 'rb') as audio_file: speech_recognition_results = speech_to_text_client.recognize( audio=audio_file, content_type='audio/mp3', recognize_callback=myRecognizeCallback, model='en-US_BroadbandModel', keywords=['redhat', 'data and AI', 'Linux', 'Kubernetes'], keywords_threshold=0.5, timestamps=True, speaker_labels=True, word_alternatives_threshold=0.9 ).get_result() transcript = '' for chunks in speech_recognition_results['results']: if 'alternatives' in chunks.keys(): alternatives = chunks['alternatives'][0] if 'transcript' in alternatives: transcript = transcript + \ alternatives['transcript'] transcript += '\n' with open('transcribe.txt', "w") as text_file: text_file.write(transcript.replace("%HESITATION", "")) speakerLabels = speech_recognition_results["speaker_labels"] print("Done Processing ...\n") extractedData = [] for i in speech_recognition_results["results"]: if i["word_alternatives"]: mydict = {'from': i["word_alternatives"][0]["start_time"], 'transcript': i["alternatives"] [0]["transcript"].replace("%HESITATION", ""), 'to': i["word_alternatives"][0]["end_time"]} extractedData.append(mydict) finalOutput = [] finalOutput.append({"filename": filename_converted.split('.')[0] +'.txt'}) for i in extractedData: for j in speakerLabels: if i["from"] == j["from"] and i["to"] == j["to"]: mydictTemp = {"from": i["from"], "to": i["to"], "transcript": i["transcript"], "speaker": j["speaker"], "confidence": j["confidence"], "final": j["final"], } finalOutput.append(mydictTemp) os.remove("DUMP/temp.mp3") return finalOutput if __name__ == '__main__': video_to_audio("/Users/arijitroy/Projects/quartz/DUMP/Amethyst.mp4") print(speech_to_text()) ```
{ "source": "0xCompyler/shulker", "score": 2 }
#### File: v1/endpoints/comments.py ```python from fastapi import APIRouter from app.core.sentiment_scores import SentimentScore from app.schema.comments import CommentsAnalysis router = APIRouter() @router.post("/analyze") def _analyse_commens(request_body: CommentsAnalysis): sc = SentimentScore(request_body.comment_url) return sc.analyze() ``` #### File: app/core/azure_utils.py ```python import logging import azure.cognitiveservices.speech as speechsdk from azure.storage.blob import ( BlobServiceClient, BlobClient, ContainerClient, generate_blob_sas, ) import json import dotenv import os import youtube_dl class SpeechUtils: _current_dir: str = os.getcwd() _storage_connection_string: str = "" _audio_path_wav: str _audio_path_webm: str _region: str = "" _key: str = "" _video_url: str _video_id: str def _audio_path_setter(self): self._audio_path_wav: str = os.path.join( self._current_dir, f"dump/{self._video_id}.wav" ) self._audio_path_webm: str = os.path.join( self._current_dir, f"dump/{self._video_id}.webm" ) def __init__(self) -> None: dot_env_path = os.path.join(self._current_dir, ".env") dotenv.load_dotenv() self._region = os.getenv("REGION") self._key = os.getenv("API_KEY") self._storage_connection_string = os.getenv("CONNECTION_STRING") def download_audio(self, youtube_url: str): self._video_url = youtube_url self._video_id = self._video_url.split("=")[-1] self._audio_path_setter() ydl_opts = { "format": "worstaudio", "extractaudio": True, "audioformat": "mp3", "outtmpl": self._audio_path_webm, "postprocessors": [ { "key": "FFmpegExtractAudio", "preferredcodec": "wav", "preferredquality": "96", } ], } with youtube_dl.YoutubeDL(ydl_opts) as ytdl: ytdl.cache.remove() ytdl.download([youtube_url]) def audio_to_blob(self): print("[azure-blob] Uploading") blob_service_client = BlobServiceClient.from_connection_string( self._storage_connection_string ) blob_client = blob_service_client.get_blob_client( container="shulker", blob=f"audio/{self._video_id}.wav" ) with open(self._audio_path_wav, "rb") as data: blob_client.upload_blob(data) sas = generate_blob_sas( account_name="aquafilestorage", account_key=self._key, container_name="shulker", blob_name=f"audio/{self._video_id}.wav", ) print(sas) return blob_client.url def transcribe(self): speech_config = speechsdk.SpeechConfig( subscription=self._key, region=self._region ) speech_config.output_format = speechsdk.OutputFormat.Detailed speech_config.request_word_level_timestamps() audio_config = speechsdk.audio.AudioConfig(filename=self._audio_path_wav) speech_recognizer = speechsdk.SpeechRecognizer( speech_config=speech_config, language="en-US", audio_config=audio_config ) result = speech_recognizer.recognize_once() transcription = {} if result.reason == speechsdk.ResultReason.RecognizedSpeech: json_result = json.loads(result.json) best_element = sorted( json_result["NBest"], key=lambda x: x["Confidence"], reverse=True )[0] speech = [] for word in best_element["Words"]: start = word["Offset"] / 10000 duration = word["Duration"] / 100000 word_dict = { "text": word["Word"], "token": word["Word"], "start": start, "end": start + duration, } speech.append(word_dict) word_level = { "speaker": "main", "sentences": [best_element["Lexical"]], "speech": speech, } transcription["media"] = "" transcription["speakers"] = ["main"] transcription["turns"] = [word_level] return transcription elif result.reason == speechsdk.ResultReason.NoMatch: return { "error": "No speech could be recognized: {}".format( result.no_match_details ) } elif result.reason == speechsdk.ResultReason.Canceled: cancellation_details = result.cancellation_details return { "error": "Speech Recognition canceled: {}".format( cancellation_details.reason ) } # su = SpeechUtils() # su.download_audio("https://www.youtube.com/watch?v=I4EWvMFj37g&t=24s") # su.transcribe() speech_utils = SpeechUtils() if __name__ == "__main__": speech_utils = SpeechUtils() speech_utils.download_audio("https://www.youtube.com/watch?v=iqlH4okiQqg") url = speech_utils.audio_to_blob() print(url) ``` #### File: app/core/sentiment_scores.py ```python import os from typing import List from azure.ai.textanalytics import TextAnalyticsClient from azure.core.credentials import AzureKeyCredential from dotenv import load_dotenv import requests class SentimentScore: _key: str = "" _endpoint: str = "" _client: str = "" _comments: List = [] _id: str = "" def __init__(self, aid) -> None: load_dotenv() self._key = os.getenv("TEXT_KEY") self._endpoint = os.getenv("TEXT_ENDPOINT") self._id = aid ta_credential = AzureKeyCredential(self._key) text_analytics_client = TextAnalyticsClient( endpoint=self._endpoint, credential=ta_credential ) self._client = text_analytics_client def _fetch_comments(self): res = requests.get(self._id) if res.status_code != 200: return {"error": f"{res.status_code}"} results_json = res.json() for item in results_json["items"]: doc = item["snippet"]["topLevelComment"]["snippet"] self._comments.append(doc["textOriginal"]) def analyze(self): self._fetch_comments() result = self._client.analyze_sentiment( self._comments[:10], show_opinion_mining=True ) doc_result = [doc for doc in result if not doc.is_error] positive_reviews = [doc for doc in doc_result if doc.sentiment == "positive"] negative_reviews = [doc for doc in doc_result if doc.sentiment == "negative"] positive_mined_opinions = [] mixed_mined_opinions = [] negative_mined_opinions = [] result_dict = {} result_dict["positive_score"] = [] result_dict["neutral_score"] = [] result_dict["negative_score"] = [] result_dict["keywords"] = [] for document in doc_result: result_dict["positive_score"].append(document.confidence_scores.positive) result_dict["neutral_score"].append(document.confidence_scores.neutral) result_dict["negative_score"].append(document.confidence_scores.negative) for sentence in document.sentences: for mined_opinion in sentence.mined_opinions: target = mined_opinion.target for assessment in mined_opinion.assessments: result_dict["keywords"].append(assessment.text) result_dict["positive_score"] = sum(result_dict["positive_score"]) / len( result_dict["positive_score"] ) result_dict["neutral_score"] = sum(result_dict["neutral_score"]) / len( result_dict["neutral_score"] ) result_dict["negative_score"] = sum(result_dict["negative_score"]) / len( result_dict["negative_score"] ) return result_dict if __name__ == "__main__": url = "https://www.googleapis.com/youtube/v3/commentThreads?key=AIzaSyCr01yrmzx3WDqfWUsv-4aRtoYRxd5mDYw&textFormat=plainText&part=snippet&videoId=I4EWvMFj37g&maxResults=100&" sc = SentimentScore(url) print(sc.analyze()) ```