text
stringlengths
2
1.04M
meta
dict
package com.orientechnologies.orient.core.record; /** * Generic record representation without a schema definition. The object can be reused across call to the database. */ public interface ORecordStringable { public String value(); public ORecordStringable value(String iValue); }
{ "content_hash": "c40f1a86b87daa487b4cb76c0ee90146", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 115, "avg_line_length": 25.083333333333332, "alnum_prop": 0.7508305647840532, "repo_name": "MaDaPHaKa/Orient-object", "id": "00a506b684d48918a81f821450aa8a1081ab9598", "size": "954", "binary": false, "copies": "6", "ref": "refs/heads/ObjectLazyProxyProject", "path": "core/src/main/java/com/orientechnologies/orient/core/record/ORecordStringable.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "4750208" }, { "name": "JavaScript", "bytes": "459297" }, { "name": "PHP", "bytes": "9168" }, { "name": "Shell", "bytes": "10533" } ], "symlink_target": "" }
// Copyright (c) 2011-2014 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef RECENTREQUESTSTABLEMODEL_H #define RECENTREQUESTSTABLEMODEL_H #include "walletmodel.h" #include <QAbstractTableModel> #include <QStringList> #include <QDateTime> class CWallet; class RecentRequestEntry { public: RecentRequestEntry() : nVersion(RecentRequestEntry::CURRENT_VERSION), id(0) { } static const int CURRENT_VERSION = 1; int nVersion; int64_t id; QDateTime date; SendCoinsRecipient recipient; IMPLEMENT_SERIALIZE ( RecentRequestEntry* pthis = const_cast<RecentRequestEntry*>(this); unsigned int nDate = date.toTime_t(); READWRITE(pthis->nVersion); nVersion = pthis->nVersion; READWRITE(id); READWRITE(nDate); READWRITE(recipient); if (fRead) pthis->date = QDateTime::fromTime_t(nDate); ) }; class RecentRequestEntryLessThan { public: RecentRequestEntryLessThan(int nColumn, Qt::SortOrder fOrder): column(nColumn), order(fOrder) {} bool operator()(RecentRequestEntry &left, RecentRequestEntry &right) const; private: int column; Qt::SortOrder order; }; /** Model for list of recently generated payment requests / clustercoin: URIs. * Part of wallet model. */ class RecentRequestsTableModel: public QAbstractTableModel { Q_OBJECT public: explicit RecentRequestsTableModel(CWallet *wallet, WalletModel *parent); ~RecentRequestsTableModel(); enum ColumnIndex { Date = 0, Label = 1, Message = 2, Amount = 3, NUMBER_OF_COLUMNS }; /** @name Methods overridden from QAbstractTableModel @{*/ int rowCount(const QModelIndex &parent) const; int columnCount(const QModelIndex &parent) const; QVariant data(const QModelIndex &index, int role) const; bool setData(const QModelIndex &index, const QVariant &value, int role); QVariant headerData(int section, Qt::Orientation orientation, int role) const; QModelIndex index(int row, int column, const QModelIndex &parent) const; bool removeRows(int row, int count, const QModelIndex &parent = QModelIndex()); Qt::ItemFlags flags(const QModelIndex &index) const; /*@}*/ const RecentRequestEntry &entry(int row) const { return list[row]; } void addNewRequest(const SendCoinsRecipient &recipient); void addNewRequest(const std::string &recipient); void addNewRequest(RecentRequestEntry &recipient); public slots: void sort(int column, Qt::SortOrder order = Qt::AscendingOrder); void updateDisplayUnit(); private: WalletModel *walletModel; QStringList columns; QList<RecentRequestEntry> list; int64_t nReceiveRequestsMaxId; /** Updates the column title to "Amount (DisplayUnit)" and emits headerDataChanged() signal for table headers to react. */ void updateAmountColumnTitle(); /** Gets title for amount column including current display unit if optionsModel reference available. */ QString getAmountTitle(); }; #endif
{ "content_hash": "79902cdbd2ee8d895eb7bdb9d9e9a31b", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 126, "avg_line_length": 29.453703703703702, "alnum_prop": 0.7060672744419993, "repo_name": "ClusterCoin/ClusterCoin", "id": "56ea820d8db641927804e3e03339617081c31f6a", "size": "3181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/recentrequeststablemodel.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "171070" }, { "name": "C++", "bytes": "3105221" }, { "name": "CSS", "bytes": "1127" }, { "name": "Objective-C++", "bytes": "6330" }, { "name": "Python", "bytes": "146135" }, { "name": "Shell", "bytes": "46374" }, { "name": "TypeScript", "bytes": "9011165" } ], "symlink_target": "" }
package opamp // DO NOT EDIT THIS FILE. GENERATED BY stm32xgen.
{ "content_hash": "f71c30bd207127b9e5903d9f3b91c962", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 49, "avg_line_length": 21.666666666666668, "alnum_prop": 0.7538461538461538, "repo_name": "ziutek/emgo", "id": "8f7056b2fff4c07cc54adc886d582e39dddfcbbf", "size": "154", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "egpath/src/stm32/hal/raw/opamp/l1xx_md--0_doc.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "10051" }, { "name": "C", "bytes": "81187703" }, { "name": "C++", "bytes": "8879" }, { "name": "Go", "bytes": "17751655" }, { "name": "Shell", "bytes": "14801" } ], "symlink_target": "" }
<html> <body> <div id=result></div> <script> function log(message) { document.getElementById("result").innerHTML += message + "<br>"; } var worker = new SharedWorker("websocket_worker_simple.js"); var href = window.location.href; var hostBegin = href.indexOf("/") + 2; var hostEnd = href.lastIndexOf(":"); var host = href.slice(hostBegin, hostEnd); var portBegin = hostEnd + 1; var portEnd = href.lastIndexOf("/"); var port = href.slice(portBegin, portEnd); var url = "ws://" + host + ":" + port + "/echo"; worker.port.onmessage = function (evt) { log(evt.data); if (evt.data == "DONE") { document.title = "OK"; } else { document.title = "FAIL"; } }; worker.port.postMessage(url); </script> </body> </html>
{ "content_hash": "f726baa60d1f9f4bf04bd47869e02204", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 66, "avg_line_length": 23.451612903225808, "alnum_prop": 0.6451169188445667, "repo_name": "junmin-zhu/chromium-rivertrail", "id": "ed990e2d0d92584a7ef7d7e9f27fa59ee54a7e0a", "size": "727", "binary": false, "copies": "1", "ref": "refs/heads/v8-binding", "path": "net/data/websocket/websocket_shared_worker.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "853" }, { "name": "AppleScript", "bytes": "6973" }, { "name": "Arduino", "bytes": "464" }, { "name": "Assembly", "bytes": "1172794" }, { "name": "Awk", "bytes": "9519" }, { "name": "C", "bytes": "75806807" }, { "name": "C#", "bytes": "1132" }, { "name": "C++", "bytes": "145161929" }, { "name": "DOT", "bytes": "1559" }, { "name": "F#", "bytes": "381" }, { "name": "Java", "bytes": "1546515" }, { "name": "JavaScript", "bytes": "18675242" }, { "name": "Logos", "bytes": "4517" }, { "name": "Matlab", "bytes": "5234" }, { "name": "Objective-C", "bytes": "6981387" }, { "name": "PHP", "bytes": "97817" }, { "name": "Perl", "bytes": "926245" }, { "name": "Python", "bytes": "8088373" }, { "name": "R", "bytes": "262" }, { "name": "Ragel in Ruby Host", "bytes": "3239" }, { "name": "Shell", "bytes": "1513486" }, { "name": "Tcl", "bytes": "277077" }, { "name": "XML", "bytes": "13493" } ], "symlink_target": "" }
package feo type Feopolicybinding struct { Name string `json:"name,omitempty"` }
{ "content_hash": "20ec702f6f2c7ab122c7d80d721a40e2", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 36, "avg_line_length": 16.6, "alnum_prop": 0.7590361445783133, "repo_name": "chiradeep/go-nitro", "id": "ee6af0bd14f21b909a7b7dfd969c81888e24c020", "size": "83", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/feo/feopolicy_binding.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "870653" }, { "name": "Makefile", "bytes": "975" }, { "name": "Shell", "bytes": "865" } ], "symlink_target": "" }
C_SRC= CPP_SRC = input.cpp output.cpp delta.cpp bf.cpp contact.cpp forces.cpp dynamics.cpp # ISPC files ISPC_SRC= delta.ispc # ISPC targets ISPC_TARGETS= avx CC=gcc CXX=g++ HULLINC = -Iext/hul HULLLIB = -Lext/hul -lhul # Python paths PYTHONINC=-I/usr/include/python2.7 PYTHONLIB=-L/usr/lib -lpython2.7 # Program name EXE=delta # Floating point type REAL=double # Debug version DEBUG=yes # Do the rest include common.mk
{ "content_hash": "c114b8d9cad1180d26cf1286b62817ea", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 83, "avg_line_length": 14.3, "alnum_prop": 0.7342657342657343, "repo_name": "KonstantinosKr/deltaserial", "id": "1a7e3622398f0a8349c02f9ed39e21895c1e6f00", "size": "439", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "light/Makefile", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "506319" }, { "name": "C++", "bytes": "282183" }, { "name": "Makefile", "bytes": "11851" }, { "name": "Objective-C", "bytes": "155466" }, { "name": "Python", "bytes": "6206" } ], "symlink_target": "" }
module Stick # = Unit Prefix # class Prefix # The system object to which this prefix belongs. attr :system # Long form of prefix, eg. "mega". attr :name # Short form of prefix, eg. "M". attr :symbol # Mulitplicative factor, eg 10e6. attr :factor # def initialize(system, name, symbol, factor) @system = system @name = name @symbol = symbol @factor = factor #BigDecimal.new(factor.to_s) end # def method_missing(type) if system.types.key?(type) Measure.new(Unit.new(system.types[type], 1, self)) else super end end end end
{ "content_hash": "cfb9759300a8ff266f33a9cea059b7c7", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 58, "avg_line_length": 16.82051282051282, "alnum_prop": 0.5777439024390244, "repo_name": "rubyworks/stick", "id": "9a6707a5c844cfc84bd645320e1797cd6f8b7b3f", "size": "656", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/stick/prefix.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "775456" }, { "name": "Shell", "bytes": "39" } ], "symlink_target": "" }
define([ 'find/app/page/abstract-find-settings-page', 'find/app/page/settings/aci-widget', 'find/app/page/settings/community-widget', 'find/app/page/settings/map-widget', 'find/app/page/settings/mmap-widget', 'find/app/page/settings/query-manipulation-widget', 'find/app/page/settings/saved-search-widget', 'find/app/page/settings/stats-server-widget', 'find/app/page/settings/view-widget', 'i18n!find/nls/bundle' ], function(SettingsPage, AciWidget, CommunityWidget, MapWidget, MmapWidget, QueryManipulationWidget, SavedSearchWidget, StatsServerWidget, ViewWidget, i18n) { return SettingsPage.extend({ initializeWidgets: function() { this.widgetGroups = [ [ new AciWidget({ configItem: 'content', description: i18n['settings.content.description'], isOpened: true, title: i18n['settings.content.title'], strings: this.serverStrings() }), new CommunityWidget({ configItem: 'login', description: i18n['settings.community.description'], isOpened: true, securityTypesUrl: this.urlRoot + 'securitytypes', title: i18n['settings.community.title'], strings: _.extend(this.serverStrings(), { fetchSecurityTypes: i18n['settings.community.login.fetchTypes'], iconClass: '', invalidSecurityType: i18n['settings.community.login.invalidType'], loginTypeLabel: i18n['settings.community.login.type'], validateFailed: i18n['settings.test.failed'] }) }) ], [ new QueryManipulationWidget({ configItem: 'queryManipulation', description: i18n['settings.queryManipulation.description'], isOpened: true, title: i18n['settings.queryManipulation'], strings: _.extend(this.serverStrings(), { blacklist: i18n['settings.queryManipulation.blacklist'], disable: i18n['settings.queryManipulation.disable'], disabled: i18n['settings.queryManipulation.disabled'], dictionary: i18n['settings.queryManipulation.dictionary'], expandQuery: i18n['settings.queryManipulation.expandQuery'], enable: i18n['settings.queryManipulation.enable'], enabled: i18n['settings.queryManipulation.enabled'], index: i18n['settings.queryManipulation.index'], loading: i18n['settings.queryManipulation.loading'], typeaheadMode: i18n['settings.queryManipulation.typeaheadMode'] }) }), new ViewWidget({ configItem: 'view', description: i18n['settings.view.description'], isOpened: true, title: i18n['settings.view'], strings: _.extend(this.serverStrings(), { connector: i18n['settings.view.connector'], referenceFieldLabel: i18n['settings.view.referenceFieldLabel'], referenceFieldBlank: i18n['settings.view.referenceFieldBlank'], referenceFieldPlaceholder: i18n['settings.view.referenceFieldPlaceholder'], viewingMode: i18n['settings.view.viewingMode'] }) }) ], [ new StatsServerWidget({ configItem: 'statsServer', description: i18n['settings.statsserver.description'], isOpened: true, title: i18n['settings.statsserver.title'], strings: _.extend(this.serverStrings(), { disable: i18n['settings.statsserver.disable'], disabled: i18n['settings.statsserver.disabled'], enable: i18n['settings.statsserver.enable'], enabled: i18n['settings.statsserver.enabled'], loading: i18n['settings.statsserver.loading'] }) }), new SavedSearchWidget({ configItem: 'savedSearches', description: i18n['settings.savedSearches.description'], isOpened: true, title: i18n['settings.savedSearches'], strings: _.extend(this.serverStrings(), { loading: i18n['settings.mmap.loading'], disablePolling: i18n['settings.savedSearches.polling.disable'], enablePolling: i18n['settings.savedSearches.polling.enable'], pollingDisabled: i18n['settings.savedSearches.polling.disabled'], pollingEnabled: i18n['settings.savedSearches.polling.enabled'], pollingInterval: i18n['settings.savedSearches.polling.interval'] }) }), new MmapWidget({ configItem: 'mmap', description: i18n['settings.mmap.description'], isOpened: true, title: i18n['settings.mmap'], strings: _.extend(this.serverStrings(), { disable: i18n['settings.mmap.disable'], disabled: i18n['settings.mmap.disabled'], enable: i18n['settings.mmap.enable'], enabled: i18n['settings.mmap.enabled'], loading: i18n['settings.mmap.loading'], url: i18n['settings.mmap.url'] }) }), new MapWidget({ configItem: 'map', description: i18n['settings.map.description'], isOpened: true, title: i18n['settings.map'], strings: _.extend(this.serverStrings(), { attribution: i18n['settings.map.attribution'], disable: i18n['settings.map.disable'], disabled: i18n['settings.map.disabled'], enable: i18n['settings.map.enable'], enabled: i18n['settings.map.enabled'], loading: i18n['settings.map.loading'], url: i18n['settings.map.url'], resultsstep: i18n['settings.map.results.step'] }) }) ] ]; } }); });
{ "content_hash": "f25ee6ed9aa121b6ed92bc5cc517d4a3", "timestamp": "", "source": "github", "line_count": 136, "max_line_length": 159, "avg_line_length": 54.705882352941174, "alnum_prop": 0.473252688172043, "repo_name": "LinkPowerHK/find", "id": "7a38d803845c1d9bb94a0a665f6675dfb81e14d7", "size": "7627", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "idol/src/main/public/static/js/find/app/page/find-settings-page.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "249082" }, { "name": "CoffeeScript", "bytes": "5997" }, { "name": "HTML", "bytes": "52621" }, { "name": "Java", "bytes": "562660" }, { "name": "JavaScript", "bytes": "817396" }, { "name": "Ruby", "bytes": "206" }, { "name": "Shell", "bytes": "4427" } ], "symlink_target": "" }
package io.sqooba.oss.timeseries.window import io.sqooba.oss.timeseries.immutable.TSEntry import scala.collection.immutable.Queue /** Extension to the Aggregator that also supports removing entries from the * aggregated value. Assuming we want to aggregate the content of a window, and to * do so for each different window returned by WindowSlider, many iterations will * be required. * * Depending on the aggregation function, this is however not required: For simple * cases like addition or multiplication and any situation where the contributions * of a single entry to the aggregated value may be reversed, we can compute an * aggregated value for each window in linear time. * * The reversible aggregator will be applied sequentially, so it may keep track of * any state from one addition or removal to the next. * * Some aggregations depend on the duration of the entries like integration or * averaging, others like min max don't. To keep those types of aggregations well * separated, implementations need to extend either the time-aware or the * time-unaware subtrait. This allows us to use different windowing functions for * the two types. * * @tparam T the type of the entries being aggregated over * @tparam A the type of the aggregated value */ sealed trait ReversibleAggregator[T, A] extends Aggregator[T, A] { /** Updates the aggregated value according to the fact that * the head of the currentWindow is being removed. * * @param currentWindow the current content of the window. It still * contains the entry that has to be removed */ // TODO: consider returning the resulting aggregated value? def dropHead(currentWindow: Queue[TSEntry[T]]): Unit = dropEntry(currentWindow.head) /** Updates the aggregated value according to the fact that * this entry is being removed. * * @param entry to remove from the head of the window */ // TODO: consider returning the resulting aggregated value? def dropEntry(entry: TSEntry[T]): Unit /** Combine the addition and the removal of entries from the aggregated value. * * @param add the value that will be added * @param currentWindow the current window, from which we will drop the first entry. * Note that it does not yet contain 'add' */ def addAndDrop(add: TSEntry[T], currentWindow: Queue[TSEntry[T]]): Unit = { dropHead(currentWindow) // addEntry needs to work on the updated window addEntry(add, currentWindow.tail) } /** Combine the addition and the removal of entries from the aggregated value. * * @param add the entry that will be added at the tail * @param remove the entry that will be removed at the head */ def addAndDrop(add: TSEntry[T], remove: TSEntry[T]): Unit = { dropEntry(remove) addEntry(add) } } /** This trait should be extended by all aggregators that depend on the time/duration * in their calculation like integration, averaging over time etc. */ trait TimeAwareReversibleAggregator[T, A] extends ReversibleAggregator[T, A] /** This trait should be extended by all aggregators that don't depend on the * duration in their calculation like min, max, median. */ trait TimeUnawareReversibleAggregator[T, A] extends ReversibleAggregator[T, A]
{ "content_hash": "ec53ab19f9935de3218b0469b4135703", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 87, "avg_line_length": 41.8, "alnum_prop": 0.7251794258373205, "repo_name": "Shastick/tslib", "id": "92b474fc4c6045cee19e57fad801a40392f4896c", "size": "3344", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/scala/io/sqooba/oss/timeseries/window/ReversibleAggregator.scala", "mode": "33188", "license": "mit", "language": [ { "name": "Scala", "bytes": "69421" } ], "symlink_target": "" }
#if LZHAM_USE_PTHREADS_API #if LZHAM_NO_ATOMICS #error No atomic operations defined in lzham_platform.h! #endif #include <pthread.h> #include <semaphore.h> #include <unistd.h> namespace lzham { class semaphore { LZHAM_NO_COPY_OR_ASSIGNMENT_OP(semaphore); public: inline semaphore(long initialCount = 0, long maximumCount = 1, const char* pName = NULL) { LZHAM_NOTE_UNUSED(maximumCount), LZHAM_NOTE_UNUSED(pName); LZHAM_ASSERT(maximumCount >= initialCount); if (sem_init(&m_sem, 0, initialCount)) { LZHAM_FAIL("semaphore: sem_init() failed"); } } inline ~semaphore() { sem_destroy(&m_sem); } inline void release(long releaseCount = 1) { LZHAM_ASSERT(releaseCount >= 1); int status = 0; #ifdef WIN32 if (1 == releaseCount) status = sem_post(&m_sem); else status = sem_post_multiple(&m_sem, releaseCount); #else while (releaseCount > 0) { status = sem_post(&m_sem); if (status) break; releaseCount--; } #endif if (status) { LZHAM_FAIL("semaphore: sem_post() or sem_post_multiple() failed"); } } inline bool wait(uint32 milliseconds = UINT32_MAX) { int status; if (milliseconds == UINT32_MAX) { status = sem_wait(&m_sem); } else { struct timespec interval; interval.tv_sec = milliseconds / 1000; interval.tv_nsec = (milliseconds % 1000) * 1000000L; status = sem_timedwait(&m_sem, &interval); } if (status) { if (errno != ETIMEDOUT) { LZHAM_FAIL("semaphore: sem_wait() or sem_timedwait() failed"); } return false; } return true; } private: sem_t m_sem; }; class spinlock { public: inline spinlock() { if (pthread_spin_init(&m_spinlock, 0)) { LZHAM_FAIL("spinlock: pthread_spin_init() failed"); } } inline ~spinlock() { pthread_spin_destroy(&m_spinlock); } inline void lock() { if (pthread_spin_lock(&m_spinlock)) { LZHAM_FAIL("spinlock: pthread_spin_lock() failed"); } } inline void unlock() { if (pthread_spin_unlock(&m_spinlock)) { LZHAM_FAIL("spinlock: pthread_spin_unlock() failed"); } } private: pthread_spinlock_t m_spinlock; }; template<typename T, uint cMaxSize> class tsstack { public: inline tsstack() : m_top(0) { } inline ~tsstack() { } inline void clear() { m_spinlock.lock(); m_top = 0; m_spinlock.unlock(); } inline bool try_push(const T& obj) { bool result = false; m_spinlock.lock(); if (m_top < (int)cMaxSize) { m_stack[m_top++] = obj; result = true; } m_spinlock.unlock(); return result; } inline bool pop(T& obj) { bool result = false; m_spinlock.lock(); if (m_top > 0) { obj = m_stack[--m_top]; result = true; } m_spinlock.unlock(); return result; } private: spinlock m_spinlock; T m_stack[cMaxSize]; int m_top; }; class task_pool { public: task_pool(); task_pool(uint num_threads); ~task_pool(); enum { cMaxThreads = LZHAM_MAX_HELPER_THREADS }; bool init(uint num_threads); void deinit(); inline uint get_num_threads() const { return m_num_threads; } inline uint get_num_outstanding_tasks() const { return m_num_outstanding_tasks; } // C-style task callback typedef void (*task_callback_func)(uint64 data, void* pData_ptr); bool queue_task(task_callback_func pFunc, uint64 data = 0, void* pData_ptr = NULL); class executable_task { public: virtual void execute_task(uint64 data, void* pData_ptr) = 0; }; // It's the caller's responsibility to delete pObj within the execute_task() method, if needed! bool queue_task(executable_task* pObj, uint64 data = 0, void* pData_ptr = NULL); template<typename S, typename T> inline bool queue_object_task(S* pObject, T pObject_method, uint64 data = 0, void* pData_ptr = NULL); template<typename S, typename T> inline bool queue_multiple_object_tasks(S* pObject, T pObject_method, uint64 first_data, uint num_tasks, void* pData_ptr = NULL); void join(); private: struct task { inline task() : m_data(0), m_pData_ptr(NULL), m_pObj(NULL), m_flags(0) { } uint64 m_data; void* m_pData_ptr; union { task_callback_func m_callback; executable_task* m_pObj; }; uint m_flags; }; tsstack<task, cMaxThreads> m_task_stack; uint m_num_threads; pthread_t m_threads[cMaxThreads]; semaphore m_tasks_available; enum task_flags { cTaskFlagObject = 1 }; volatile atomic32_t m_num_outstanding_tasks; volatile atomic32_t m_exit_flag; void process_task(task& tsk); static void* thread_func(void *pContext); }; enum object_task_flags { cObjectTaskFlagDefault = 0, cObjectTaskFlagDeleteAfterExecution = 1 }; template<typename T> class object_task : public task_pool::executable_task { public: object_task(uint flags = cObjectTaskFlagDefault) : m_pObject(NULL), m_pMethod(NULL), m_flags(flags) { } typedef void (T::*object_method_ptr)(uint64 data, void* pData_ptr); object_task(T* pObject, object_method_ptr pMethod, uint flags = cObjectTaskFlagDefault) : m_pObject(pObject), m_pMethod(pMethod), m_flags(flags) { LZHAM_ASSERT(pObject && pMethod); } void init(T* pObject, object_method_ptr pMethod, uint flags = cObjectTaskFlagDefault) { LZHAM_ASSERT(pObject && pMethod); m_pObject = pObject; m_pMethod = pMethod; m_flags = flags; } T* get_object() const { return m_pObject; } object_method_ptr get_method() const { return m_pMethod; } virtual void execute_task(uint64 data, void* pData_ptr) { (m_pObject->*m_pMethod)(data, pData_ptr); if (m_flags & cObjectTaskFlagDeleteAfterExecution) lzham_delete(this); } protected: T* m_pObject; object_method_ptr m_pMethod; uint m_flags; }; template<typename S, typename T> inline bool task_pool::queue_object_task(S* pObject, T pObject_method, uint64 data, void* pData_ptr) { object_task<S> *pTask = lzham_new< object_task<S> >(pObject, pObject_method, cObjectTaskFlagDeleteAfterExecution); if (!pTask) return false; return queue_task(pTask, data, pData_ptr); } template<typename S, typename T> inline bool task_pool::queue_multiple_object_tasks(S* pObject, T pObject_method, uint64 first_data, uint num_tasks, void* pData_ptr) { LZHAM_ASSERT(m_num_threads); LZHAM_ASSERT(pObject); LZHAM_ASSERT(num_tasks); if (!num_tasks) return true; bool status = true; uint i; for (i = 0; i < num_tasks; i++) { task tsk; tsk.m_pObj = lzham_new< object_task<S> >(pObject, pObject_method, cObjectTaskFlagDeleteAfterExecution); if (!tsk.m_pObj) { status = false; break; } tsk.m_data = first_data + i; tsk.m_pData_ptr = pData_ptr; tsk.m_flags = cTaskFlagObject; if (!m_task_stack.try_push(tsk)) { status = false; break; } } if (i) { atomic_add32(&m_num_outstanding_tasks, i); m_tasks_available.release(i); } return status; } inline void lzham_sleep(unsigned int milliseconds) { #ifdef WIN32 struct timespec interval; interval.tv_sec = milliseconds / 1000; interval.tv_nsec = (milliseconds % 1000) * 1000000L; pthread_delay_np(&interval); #else while (milliseconds) { int msecs_to_sleep = LZHAM_MIN(milliseconds, 1000); usleep(msecs_to_sleep * 1000); milliseconds -= msecs_to_sleep; } #endif } // TODO: Implement uint lzham_get_max_helper_threads(); } // namespace lzham #endif // LZHAM_USE_PTHREADS_API
{ "content_hash": "0187f2c021037b4357b2b0b1f6093610", "timestamp": "", "source": "github", "line_count": 380, "max_line_length": 135, "avg_line_length": 24.510526315789473, "alnum_prop": 0.5219025123470045, "repo_name": "f297260/lzham", "id": "69c8232aaa0da817096ae41e8a1de0ac9f032df6", "size": "9432", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "lzhamcomp/lzham_pthreads_threading.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "16955" }, { "name": "C++", "bytes": "680767" }, { "name": "CMake", "bytes": "8936" } ], "symlink_target": "" }
using System; using System.IO; using System.Linq; using System.Text; using System.Collections.Generic; using Microsoft.VisualStudio.TestTools.UnitTesting; using Mozu.Api.Test.Factories; using Mozu.Api.Test.Helpers; namespace Mozu.Api.Test.MsTestCases { /// <summary> /// Summary description for DocumentTest /// </summary> [TestClass] public class DocumentTest : MozuApiTestBase { #region NonTestCaseCode #region Initializers /// <summary> /// This will run once before each test. /// </summary> [TestInitialize] public void TestMethodInit() { tenantId = Convert.ToInt32(Mozu.Api.Test.Helpers.Environment.GetConfigValueByEnvironment("TenantId")); ApiMsgHandler = ServiceClientMessageFactory.GetTestClientMessage(); TestBaseTenant = TenantFactory.GetTenant(handler: ApiMsgHandler,tenantId: tenantId); masterCatalogId = TestBaseTenant.MasterCatalogs.First().Id; catalogId = TestBaseTenant.MasterCatalogs.First().Catalogs.First().Id; ApiMsgHandler = ServiceClientMessageFactory.GetTestClientMessage(tenantId, masterCatalogId:masterCatalogId, catalogId:catalogId); } /// <summary> /// Runs once before any test is run. /// </summary> /// <param name="testContext"></param> [ClassInitialize] public static void InitializeBeforeRun(TestContext testContext) { //Call the base class's static initializer. MozuApiTestBase.TestClassInitialize(testContext); // Populate Test Data. //Generator.PopulateProductsToCatalog(TestBaseTenant.Id, TestBaseTenant.MasterCatalogs.First().Id, // TestBaseTenant.MasterCatalogs.First().Catalogs.First().Id); } #endregion #region CleanupMethods /// <summary> /// This will run once after each test. /// </summary> [TestCleanup] public void TestMethodCleanup() { //Calls the base class's Test Cleanup base.TestCleanup(); } /// <summary> /// Runs once after all of the tests have run. /// </summary> [ClassCleanup] public static void TestsCleanup() { //Calls the Base class's static cleanup. MozuApiTestBase.TestClassCleanup(); } #endregion #endregion [TestMethod] [TestCategory("Mozu SDK Sample")] public void GetDocumentsTest1() { var documentfactory = DocumentFactory.GetDocuments(ApiMsgHandler, "files@mozu"); Assert.IsNotNull(documentfactory.Items); Assert.IsTrue(documentfactory.Items.Count > 0); foreach (var item in documentfactory.Items) { var s = DocumentFactory.GetDocumentContent(ApiMsgHandler, "files@mozu", item.Id); Assert.IsNotNull(s); Assert.IsTrue(s.GetType() == typeof(System.IO.MemoryStream)); using (FileStream file = new FileStream(@"D:\"+item.Name, FileMode.Create, System.IO.FileAccess.Write)) { byte[] bytes = new byte[s.Length]; s.Read(bytes, 0, (int)s.Length); file.Write(bytes, 0, bytes.Length); s.Close(); } } } [TestMethod] [TestCategory("Mozu SDK Sample")] public void GetDocumentsTest2() { var documentfactory = DocumentFactory.GetDocuments(ApiMsgHandler, "files@mozu", filter: "name eq 'Diamine-Syrah 1.jpg'"); Assert.IsTrue(documentfactory.Items.Count == 0); } } }
{ "content_hash": "a72adf78bc9f838314b0b79148676b5d", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 141, "avg_line_length": 34.263636363636365, "alnum_prop": 0.6001591934200053, "repo_name": "ezekielthao/mozu-dotnet", "id": "b97486c67565b032103e6f3b12fe722806662aa7", "size": "3771", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Mozu.Api.Test/MsTestCases/DocumentTest.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "279" }, { "name": "C#", "bytes": "6490384" }, { "name": "F#", "bytes": "1750" }, { "name": "PowerShell", "bytes": "3305" } ], "symlink_target": "" }
package org.motechproject.mobile.core.model; import java.util.Date; /** * Represents information returned by telco gateway to be persisted in the database. * Corresponds to a single {@link org.motechproject.mobile.core.model.GatewayRequest } * * * Date: Jul 24, 2009 * @author Joseph ([email protected]) */ public interface GatewayResponse { /* * * @param id id to set */ public void setId(Long id); /* * * @return the id */ public Long getId(); /* * @return the gatewayMessageId */ public String getGatewayMessageId(); /* * @return the messageId */ public GatewayRequest getGatewayRequest(); /* * @return the messageStatus */ public MStatus getMessageStatus(); /* * @return the recipientNumber */ public String getRecipientNumber(); /* * @return the responseText */ public String getResponseText(); /* * @return the requestId */ public String getRequestId(); /* * @return the dateCreated */ public Date getDateCreated(); /* * @return the lastModified */ public Date getLastModified(); /* * @param gatewayMessageId the gatewayMessageId to set */ public void setGatewayMessageId(String gatewayMessageId); /* * @param messageId the messageId to set */ public void setGatewayRequest(GatewayRequest messageId); /* * @param messageStatus the messageStatus to set */ public void setMessageStatus(MStatus messageStatus); /* * @param recipientNumber the recipientNumber to set */ public void setRecipientNumber(String recipientNumber); /* * @param recipientNumber the recipientNumber to set */ public void setResponseText(String responseText); /* * @param lastModified the lastModified to set */ public void setLastModified(Date lastModified); /* * @param requestId the requestId to set */ public void setRequestId(String requestId); /* * @param date the date to set */ public void setDateCreated(Date date); /* * Helper method to display string value of all properties of the object * @return formated string value of all properties */ @Override public String toString(); /* * @return the version */ int getVersion(); /* * @param version the version to set */ void setVersion(int version); }
{ "content_hash": "5214d0bb363a3ff23dd2ad74ae653fcc", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 86, "avg_line_length": 20.866141732283463, "alnum_prop": 0.5920754716981133, "repo_name": "motech/MOTECH-Mobile", "id": "936e002a4da4a66ada61b46161049253ba29dcde", "size": "4448", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "motech-mobile-core/src/main/java/org/motechproject/mobile/core/model/GatewayResponse.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "1556200" }, { "name": "JavaScript", "bytes": "4648" } ], "symlink_target": "" }
package com.orange.clara.cloud.servicedbdumper.acceptance; import com.orange.clara.cloud.servicedbdumper.Application; import com.orange.clara.cloud.servicedbdumper.exception.CannotFindDatabaseDumperException; import com.orange.clara.cloud.servicedbdumper.exception.DatabaseExtractionException; import com.orange.clara.cloud.servicedbdumper.exception.ServiceKeyException; import com.orange.clara.cloud.servicedbdumper.helper.ByteFormat; import com.orange.clara.cloud.servicedbdumper.integrations.AbstractIntegrationWithRealCfClientTest; import com.orange.clara.cloud.servicedbdumper.model.DatabaseRef; import com.orange.clara.cloud.servicedbdumper.model.DatabaseType; import org.cloudfoundry.community.servicebroker.exception.*; import org.junit.Before; import org.junit.Ignore; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.WebIntegrationTest; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.IfProfileValue; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.io.File; import java.io.IOException; import static org.junit.Assume.assumeTrue; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(Application.class) @WebIntegrationTest(randomPort = true) @ActiveProfiles({"local", "integrationrealcf", "s3"}) @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) @IfProfileValue(name = "test.groups", values = {"local-acceptance-tests"}) public class AcceptanceLocalTest extends AbstractIntegrationWithRealCfClientTest { private final static String fileNameTemplate = "fakedata_%s.sql"; @Value("${accept.cf.service.name.mysql:cleardb}") protected String serviceNameAcceptMysql; @Value("${accept.cf.service.plan.mysql:spark}") protected String servicePlanAcceptMysql; @Value("${accept.cf.service.instance.source.mysql:mysql-db-dumper-src-int}") protected String serviceSourceInstanceAcceptMysql; @Value("${accept.cf.service.instance.target.mysql:mysql-db-dumper-dest-int}") protected String serviceTargetInstanceAcceptMysql; @Value("${accept.cf.service.name.postgresql:elephantsql}") protected String serviceNameAcceptPostgres; @Value("${accept.cf.service.plan.postgresql:turtle}") protected String servicePlanAcceptPostgres; @Value("${accept.cf.service.instance.source.postgresql:postgres-db-dumper-src-int}") protected String serviceSourceInstanceAcceptPostgres; @Value("${accept.cf.service.instance.target.postgresql:postgres-db-dumper-dest-int}") protected String serviceTargetInstanceAcceptPostgres; @Value("${accept.cf.service.name.mongodb:mongolab}") protected String serviceNameAcceptMongo; @Value("${accept.cf.service.plan.mongodb:sandbox}") protected String servicePlanAcceptMongo; @Value("${accept.cf.service.instance.source.mongodb:mongodb-db-dumper-src-int}") protected String serviceSourceInstanceAcceptMongo; @Value("${accept.cf.service.instance.target.mongodb:mongodb-db-dumper-dest-int}") protected String serviceTargetInstanceAcceptMongo; @Value("${accept.cf.service.name.redis:rediscloud}") protected String serviceNameAcceptRedis; @Value("${accept.cf.service.plan.redis:30mb}") protected String servicePlanAcceptRedis; @Value("${accept.cf.service.instance.source.redis:redis-db-dumper-src-int}") protected String serviceSourceInstanceAcceptRedis; @Value("${accept.cf.service.instance.target.redis:redis-db-dumper-dest-int}") protected String serviceTargetInstanceAcceptRedis; @Value("${user.dir}/bin/create_fake_data") protected File scriptCreateFakeData; @Value("${user.dir}") protected File userDir; @Value("${test.accept.file.size:#{null}}") protected String fileSize; @Override @Before public void init() throws DatabaseExtractionException { if (this.fileSize == null) { String skipMessage = "You must set property test.accept.file.size (e.g. test.accept.file.size=100mb"; this.reportIntegration.setSkipped(true); this.reportIntegration.setSkippedReason(skipMessage); assumeTrue(skipMessage, false); } this.serviceNameMongo = this.serviceNameAcceptMongo; this.serviceNameMysql = this.serviceNameAcceptMysql; this.serviceNameRedis = this.serviceNameAcceptRedis; this.serviceNamePostgres = this.serviceNameAcceptPostgres; this.servicePlanMongo = this.servicePlanAcceptMongo; this.servicePlanMysql = this.servicePlanAcceptMysql; this.servicePlanPostgres = this.servicePlanAcceptPostgres; this.servicePlanRedis = this.servicePlanAcceptRedis; this.serviceSourceInstanceMongo = serviceSourceInstanceAcceptMongo; this.serviceSourceInstanceMysql = serviceSourceInstanceAcceptMysql; this.serviceSourceInstancePostgres = serviceSourceInstanceAcceptPostgres; this.serviceSourceInstanceRedis = serviceSourceInstanceAcceptRedis; this.serviceTargetInstanceMongo = serviceTargetInstanceAcceptMongo; this.serviceTargetInstanceMysql = serviceTargetInstanceAcceptMysql; this.serviceTargetInstancePostgres = serviceTargetInstanceAcceptPostgres; this.serviceTargetInstanceRedis = serviceTargetInstanceAcceptRedis; super.init(); this.prefixReportName = this.prefixReportName + " for " + this.fileSize; } @Override public void doBeforeTest(DatabaseType databaseType) throws DatabaseExtractionException, CannotFindDatabaseDumperException, InterruptedException, IOException { boolean isS3urlExists = System.getenv("S3_URL") != null && System.getenv("DYNO") != null; if (!isS3urlExists) { this.skipCleaning = true; String skipMessage = "No s3 server found, please set env var S3_URL and DYNO=true"; this.reportIntegration.setSkipped(true); this.reportIntegration.setSkippedReason(skipMessage); assumeTrue(skipMessage, false); } super.doBeforeTest(databaseType); } @Override public String getDbParamsForDump(DatabaseType databaseType) { return this.databaseAccessMap.get(databaseType).getServiceSourceInstanceName(); } @Override public String getDbParamsForRestore(DatabaseType databaseType) { return this.databaseAccessMap.get(databaseType).getServiceTargetInstanceName(); } @Override protected void dumpAndRestoreTest(DatabaseType databaseType) throws ServiceBrokerException, InterruptedException, ServiceBrokerAsyncRequiredException, IOException, DatabaseExtractionException, CannotFindDatabaseDumperException, ServiceKeyException, ServiceInstanceExistsException, ServiceInstanceUpdateNotSupportedException, ServiceInstanceDoesNotExistException { super.dumpAndRestoreTest(databaseType); logger.info("\u001b[0;32mTest for dump and restore for type {} with data for {} (real size of the file) succeeded.\u001B[0;0m", databaseType.toString(), humanize.Humanize.binaryPrefix(getGeneratedFile().length())); } @Override @Ignore public void when_binding_to_a_db_dumper_i_should_have_correct_information_about_my_dumps() throws InterruptedException, CannotFindDatabaseDumperException, DatabaseExtractionException, IOException, ServiceBrokerException, ServiceInstanceExistsException, ServiceBrokerAsyncRequiredException, ServiceInstanceDoesNotExistException, ServiceInstanceUpdateNotSupportedException, ServiceInstanceBindingExistsException { super.when_binding_to_a_db_dumper_i_should_have_correct_information_about_my_dumps(); } @Override public void populateDataToDatabaseRefFromFile(File fakeData, DatabaseRef databaseServer) throws CannotFindDatabaseDumperException, IOException, InterruptedException { File fakeDataGenerated = getGeneratedFile(); String[] command = new String[]{ this.scriptCreateFakeData.getAbsolutePath(), getFileSize().toString(), fakeDataGenerated.getAbsolutePath() }; long currentTime = System.currentTimeMillis(); this.runCommand(command); this.reportIntegration.setPopulateFakeDataTime(System.currentTimeMillis() - currentTime); logger.info("Time duration to create fake data from command line: {}", humanize.Humanize.duration(this.reportIntegration.getPopulateFakeDataTime())); super.populateDataToDatabaseRefFromFile(fakeDataGenerated, databaseServer); } protected Long getFileSize() { Long size = ByteFormat.parse(this.fileSize); return size; } protected File getGeneratedFile() { return new File(this.userDir.getAbsolutePath() + "/" + String.format(this.fileNameTemplate, getFileSize().toString())); } }
{ "content_hash": "de7fa453207ecc7ec3aabf8bc9a4c2e3", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 415, "avg_line_length": 51.91954022988506, "alnum_prop": 0.76654859419969, "repo_name": "orange-cloudfoundry/db-dumper-service", "id": "f32fecbede875220f9579410dedbdbbeaad53317", "size": "9347", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/test/java/com/orange/clara/cloud/servicedbdumper/acceptance/AcceptanceLocalTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "3660" }, { "name": "Dockerfile", "bytes": "1813" }, { "name": "HTML", "bytes": "23155" }, { "name": "Java", "bytes": "671687" }, { "name": "JavaScript", "bytes": "19737" }, { "name": "Shell", "bytes": "14176" } ], "symlink_target": "" }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Text; using System.Threading; using System.Threading.Tasks; using SuperSocket.Common; using SuperSocket.ProtoBase; using SuperSocket.SocketBase; using SuperSocket.SocketBase.Command; using SuperSocket.SocketBase.Config; using SuperSocket.SocketBase.Protocol; namespace SuperSocket.Facility.PolicyServer { /// <summary> /// PolicyServer base class /// </summary> public abstract class PolicyServer : AppServer<PolicySession, StringPackageInfo> { private string m_PolicyFile; private string m_PolicyRequest = "<policy-file-request/>"; /// <summary> /// Gets the policy response. /// </summary> protected byte[] PolicyResponse { get; private set; } private int m_ExpectedReceivedLength; /// <summary> /// Initializes a new instance of the <see cref="PolicyServer"/> class. /// </summary> public PolicyServer() { } /// <summary> /// Setups the specified root config. /// </summary> /// <param name="rootConfig">The root config.</param> /// <param name="config">The config.</param> /// <returns></returns> protected override bool Setup(IRootConfig rootConfig, IServerConfig config) { var policyRequest = config.Options.GetValue("policyRequest"); if (!string.IsNullOrEmpty(policyRequest)) m_PolicyRequest = policyRequest; m_ExpectedReceivedLength = Encoding.UTF8.GetByteCount(m_PolicyRequest); ReceiveFilterFactory = new PolicyReceiveFilterFactory(m_ExpectedReceivedLength); m_PolicyFile = config.Options.GetValue("policyFile"); if (string.IsNullOrEmpty(m_PolicyFile)) { if(Logger.IsErrorEnabled) Logger.Error("Configuration option policyFile is required!"); return false; } if (!Path.IsPathRooted(m_PolicyFile)) m_PolicyFile = GetFilePath(m_PolicyFile); if (!File.Exists(m_PolicyFile)) { if (Logger.IsErrorEnabled) Logger.Error("The specified policyFile doesn't exist! " + m_PolicyFile); return false; } PolicyResponse = SetupPolicyResponse(File.ReadAllBytes(m_PolicyFile)); this.NewRequestReceived += new RequestHandler<PolicySession, StringPackageInfo>(PolicyServer_NewRequestReceived); return true; } /// <summary> /// Setups the policy response. /// </summary> /// <param name="policyFileData">The policy file data.</param> /// <returns></returns> protected virtual byte[] SetupPolicyResponse(byte[] policyFileData) { return policyFileData; } /// <summary> /// Gets the policy file response. /// </summary> /// <param name="clientEndPoint">The client end point.</param> /// <returns></returns> protected virtual byte[] GetPolicyFileResponse(IPEndPoint clientEndPoint) { return PolicyResponse; } void PolicyServer_NewRequestReceived(PolicySession session, StringPackageInfo requestInfo) { ProcessRequest(session, requestInfo.Body); } /// <summary> /// Processes the request. /// </summary> /// <param name="session">The session.</param> /// <param name="request">The request.</param> protected virtual void ProcessRequest(PolicySession session, string request) { if (string.Compare(request, m_PolicyRequest, StringComparison.InvariantCultureIgnoreCase) != 0) { session.Close(); return; } var response = GetPolicyFileResponse(session.RemoteEndPoint); session.Send(response, 0, response.Length); } } }
{ "content_hash": "97ed81d2c87e75322cc7d3b68f6ab457", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 125, "avg_line_length": 33.24, "alnum_prop": 0.6072202166064982, "repo_name": "mdavid/SuperSocket", "id": "3deb72f06d9a116f55b72ffe4ce745f569781f90", "size": "4157", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Facility/PolicyServer/PolicyServer.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7409" }, { "name": "C#", "bytes": "2918319" }, { "name": "Python", "bytes": "1111" }, { "name": "Shell", "bytes": "55" } ], "symlink_target": "" }
import { callback } from './core'; import { Client } from './client'; import { GooglePaymentDetails } from './google-payment'; import { HostedFieldsAccountDetails } from './hosted-fields'; import { PayPalAccountDetails } from './paypal'; import { ThreeDSecureAccountDetails } from './three-d-secure'; import { UnionPayAccountDetails } from './unionpay'; import { VenmoAccountDetails } from './venmo'; /** * Manages customer's payment methods. * @see https://braintree.github.io/braintree-web/3.75.0/module-braintree-web_vault-manager.html */ export class VaultManager { static create(options: { client?: Client | undefined; authorization?: string | undefined; }): Promise<VaultManager>; static create(options: { client?: Client | undefined; authorization?: string | undefined; }, callback: callback<VaultManager>): void; /** * Fetches payment methods owned by the customer whose id was used to generate the client token used to create the client. * @see https://braintree.github.io/braintree-web/3.75.0/VaultManager.html#fetchPaymentMethods */ fetchPaymentMethods(options?: { defaultFirst: boolean }): Promise<FetchPaymentMethodsPayload[]>; fetchPaymentMethods(options: { defaultFirst: boolean }, callback: callback<FetchPaymentMethodsPayload[]>): void; fetchPaymentMethods(callback: callback<FetchPaymentMethodsPayload[]>): void; /** * Cleanly tear down anything set up by create. * @see https://braintree.github.io/braintree-web/3.75.0/VaultManager.html#teardown */ teardown(): Promise<void>; teardown(callback: callback<void>): void; } /** * The customer's payment methods. * @see https://braintree.github.io/braintree-web/3.75.0/VaultManager.html#~fetchPaymentMethodsPayload */ export interface FetchPaymentMethodsPayload { nonce: string; default: boolean; details?: HostedFieldsAccountDetails | ThreeDSecureAccountDetails | GooglePaymentDetails | PayPalAccountDetails | UnionPayAccountDetails | VenmoAccountDetails | Record<string, any> | undefined; type: string; description: string | null; binData?: Record<string, any> | undefined; }
{ "content_hash": "7849bc591ed7208af10e46db1d05e3b0", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 197, "avg_line_length": 47.6, "alnum_prop": 0.7324929971988795, "repo_name": "markogresak/DefinitelyTyped", "id": "81589c7c28ee8535a68643bd8b64d295892664a8", "size": "2142", "binary": false, "copies": "24", "ref": "refs/heads/master", "path": "types/braintree-web/modules/vault-manager.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "15" }, { "name": "Protocol Buffer", "bytes": "678" }, { "name": "TypeScript", "bytes": "17426898" } ], "symlink_target": "" }
require 'spec_helper' describe Grape::Endpoint do subject do described_class.new(Grape::Util::InheritableSetting.new, path: '/', method: :get) end describe '.content_types_for' do describe 'defined on target_class' do let(:own_json) { 'text/own-json' } let(:own_xml) { 'text/own-xml' } let(:content_types) do { own_json: own_json, own_xml: own_xml } end let(:target_class) { OpenStruct.new(content_types: content_types) } let(:object) { subject.content_types_for(target_class) } specify do expect(object).to eql [own_json, own_xml] end end describe 'not defined' do describe 'format given' do let(:format) { :json } let(:target_class) { OpenStruct.new(format: format) } let(:object) { subject.content_types_for(target_class) } specify do expect(object).to eql ['application/json'] end describe 'format not given' do let(:target_class) { OpenStruct.new } let(:object) { subject.content_types_for(target_class) } specify do expect(object).to eql %w[application/xml application/json text/plain] end end end end end describe 'parse_request_params' do let(:subject) { GrapeSwagger::Endpoint::ParamsParser } before do subject.send(:parse_request_params, params, {}, nil) end context 'when params do not contain an array' do let(:params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }] ] end let(:expected_params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }] ] end it 'parses params correctly' do expect(params).to eq expected_params end end context 'when params contain a simple array' do let(:params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array[String]' }] ] end let(:expected_params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array[String]', is_array: true }] ] end it 'parses params correctly and adds is_array to the array' do expect(params).to eq expected_params end end context 'when params contain a complex array' do let(:params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array' }], ['stuffs[id]', { required: true, type: 'String' }] ] end let(:expected_params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array', is_array: true }], ['stuffs[id]', { required: true, type: 'String' }] ] end it 'parses params correctly and adds is_array to the array and all elements' do expect(params).to eq expected_params end context 'when array params are not contiguous with parent array' do let(:params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array' }], ['stuffs[owners]', { required: true, type: 'Array' }], ['stuffs[creators]', { required: true, type: 'Array' }], ['stuffs[owners][id]', { required: true, type: 'String' }], ['stuffs[creators][id]', { required: true, type: 'String' }], ['stuffs_and_things', { required: true, type: 'String' }] ] end let(:expected_params) do [ ['id', { required: true, type: 'String' }], ['description', { required: false, type: 'String' }], ['stuffs', { required: true, type: 'Array', is_array: true }], ['stuffs[owners]', { required: true, type: 'Array', is_array: true }], ['stuffs[creators]', { required: true, type: 'Array', is_array: true }], ['stuffs[owners][id]', { required: true, type: 'String' }], ['stuffs[creators][id]', { required: true, type: 'String' }], ['stuffs_and_things', { required: true, type: 'String' }] ] end it 'parses params correctly and adds is_array to the array and all elements' do expect(params).to eq expected_params end end end end end
{ "content_hash": "8e8f9995d684126124e301a414f70cc0", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 87, "avg_line_length": 32.602649006622514, "alnum_prop": 0.5356489945155393, "repo_name": "LeFnord/grape-swagger", "id": "a577285fd54d4c85e0b60c8cb76c5294afdb83d4", "size": "4954", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "spec/lib/endpoint_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "422842" } ], "symlink_target": "" }
/* ESP HTTP Client Example This example code is in the Public Domain (or CC0 licensed, at your option.) Unless required by applicable law or agreed to in writing, this software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ #ifndef _APP_WIFI_H_ #define _APP_WIFI_H_ void app_wifi_initialise(void); void app_wifi_wait_connected(); #endif
{ "content_hash": "21907e432d3f030019e127abfa7809e7", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 79, "avg_line_length": 24.705882352941178, "alnum_prop": 0.7309523809523809, "repo_name": "armada-ai/esp-idf", "id": "91c886d5b88362dde587a7f8bad3d6dfa464835c", "size": "420", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "examples/protocols/esp_http_client/main/app_wifi.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "140730" }, { "name": "C", "bytes": "24847639" }, { "name": "C++", "bytes": "952167" }, { "name": "Makefile", "bytes": "73805" }, { "name": "Objective-C", "bytes": "45515" }, { "name": "Python", "bytes": "280411" }, { "name": "Shell", "bytes": "850" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name Cycas basaltica C.A. Gardner ### Remarks null
{ "content_hash": "db2d0bdc03b9bec10b1e622249e23b58", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 11.538461538461538, "alnum_prop": 0.72, "repo_name": "mdoering/backbone", "id": "5db64cd10c297d41ad864f160bf4df90405b345f", "size": "222", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Cycadophyta/Cycadopsida/Cycadales/Cycadaceae/Cycas/Cycas media/Cycas media basaltica/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.kk.muse.perftuning; public interface Application {}
{ "content_hash": "2d12b5c505948a74866f7d456eecb682", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 31, "avg_line_length": 21.666666666666668, "alnum_prop": 0.8, "repo_name": "xylan2004/AciSnippets", "id": "91d310d5c73217bb3367f59368940f8fe06f96e1", "size": "65", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PerfTuning/perftuning/src/main/java/com/kk/muse/perftuning/Application.java", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "677" }, { "name": "HTML", "bytes": "30605" }, { "name": "Java", "bytes": "36814" }, { "name": "JavaScript", "bytes": "5140" }, { "name": "PHP", "bytes": "1541" }, { "name": "Python", "bytes": "1674173" } ], "symlink_target": "" }
module ImprintsHelper def imprint_order_link(imprint) if imprint.order.blank? return imprint.order_name else return link_to imprint.order_name, order_path(imprint.order) end end def imprint_edit_order_link(imprint) unless imprint.order.blank? return bootstrap_edit_button(imprint.order) end end end
{ "content_hash": "4dd1f6519ce865962cf8a29befa58641", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 66, "avg_line_length": 21.75, "alnum_prop": 0.7040229885057471, "repo_name": "AnnArborTees/softwear-production", "id": "4079cde5209fa30fa53b1a87728d907590078c98", "size": "348", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/helpers/imprints_helper.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "10240" }, { "name": "CoffeeScript", "bytes": "2132" }, { "name": "HTML", "bytes": "216434" }, { "name": "JavaScript", "bytes": "28146" }, { "name": "Ruby", "bytes": "594910" }, { "name": "Shell", "bytes": "1062" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "29aafe19eb1e0bc9c4c0f2382bf71b23", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "4fb20136cbeb5f1e0d7b92be6f91bc54884bf0df", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Aspalathus/Aspalathus costulata/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
//package com.handmark.pulltorefresh.samples; //import android.app.Activity; //import android.os.AsyncTask; //import android.os.Bundle; //import android.widget.HorizontalScrollView; //import com.handmark.pulltorefresh.library.PullToRefreshBase; //import com.handmark.pulltorefresh.library.PullToRefreshBase.OnRefreshListener; //import com.handmark.pulltorefresh.library.PullToRefreshHorizontalScrollView; using System; using System.Collections.Generic; using System.Linq; using System.Text; using Android.App; using Android.Content; using Android.OS; using Android.Runtime; using Android.Views; using Android.Widget; using Mode=Com.Handmark.PullToRefresh.Library.PtrMode; using Java.Lang; using Com.Handmark.PullToRefresh.Library; using Com.Handmark.PullToRefresh.Library.Extras; namespace PullToRefresh.Net.Example { [Activity(Label = "PullToRefreshHorizontalScrollViewActivity")] public sealed class PullToRefreshHorizontalScrollViewActivity : Activity, OnRefreshListener<HorizontalScrollView> { PullToRefreshHorizontalScrollView mPullRefreshScrollView; HorizontalScrollView mScrollView; /** Called when the activity is first created. */ //@Override protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); SetContentView(Resource.Layout.activity_ptr_horizontalscrollview); mPullRefreshScrollView = (PullToRefreshHorizontalScrollView)FindViewById(Resource.Id.pull_refresh_horizontalscrollview); mPullRefreshScrollView.setOnRefreshListener(this); //mPullRefreshScrollView.setOnRefreshListener(new OnRefreshListener<HorizontalScrollView>() { // @Override // public void onRefresh(PullToRefreshBase<HorizontalScrollView> refreshView) { // new GetDataTask().Execute(); // } //}); mScrollView = mPullRefreshScrollView.getRefreshableView(); } private class GetDataTask : AsyncTask<Java.Lang.Void, Java.Lang.Void, string[]> { private PullToRefreshHorizontalScrollViewActivity inst; public GetDataTask(PullToRefreshHorizontalScrollViewActivity instance) { inst = instance; } protected override string[] RunInBackground(params Java.Lang.Void[] @params) { // Simulates a background job. try { Thread.Sleep(1000); } catch (InterruptedException e) { } return null; } //@Override protected override void OnPostExecute(Java.Lang.Object result) { // Do some stuff here // Call onRefreshComplete when the list has been refreshed. inst.mPullRefreshScrollView.onRefreshComplete(); base.OnPostExecute(result); } } public void onRefresh(PullToRefreshBase<HorizontalScrollView> refreshView) { new GetDataTask(this).Execute(); } } }
{ "content_hash": "232d84cddb73db60d2e94f1b8060d7a4", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 132, "avg_line_length": 33.67741935483871, "alnum_prop": 0.6730523627075351, "repo_name": "skywolf888/Android-PullToRefresh.Net", "id": "0acd407228b5d5dae6770c3ab8b5b013ae62d22a", "size": "3885", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PullToRefresh.Net.Example/PullToRefreshHorizontalScrollViewActivity.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "256661" } ], "symlink_target": "" }
package mockit.internal.expectations.invocation; import java.util.*; import javax.annotation.*; import mockit.internal.expectations.argumentMatching.*; import mockit.internal.util.*; abstract class ArgumentValuesAndMatchers { @Nonnull final InvocationArguments signature; @Nonnull Object[] values; @Nullable List<ArgumentMatcher<?>> matchers; ArgumentValuesAndMatchers(@Nonnull InvocationArguments signature, @Nonnull Object[] values) { this.signature = signature; this.values = values; } final void setValuesWithNoMatchers(@Nonnull Object[] argsToVerify) { values = argsToVerify; matchers = null; } @Nonnull final Object[] prepareForVerification( @Nonnull Object[] argsToVerify, @Nullable List<ArgumentMatcher<?>> matchersToUse) { Object[] replayArgs = values; values = argsToVerify; matchers = matchersToUse; return replayArgs; } @Nullable final ArgumentMatcher<?> getArgumentMatcher(int parameterIndex) { if (matchers == null) { return null; } ArgumentMatcher<?> matcher = parameterIndex < matchers.size() ? matchers.get(parameterIndex) : null; if (matcher == null && parameterIndex < values.length && values[parameterIndex] == null) { matcher = AlwaysTrueMatcher.ANY_VALUE; } return matcher; } abstract boolean isMatch(@Nonnull Object[] replayArgs, @Nonnull Map<Object, Object> instanceMap); static boolean areEqual( @Nonnull Object[] expectedValues, @Nonnull Object[] actualValues, int count, @Nonnull Map<Object, Object> instanceMap) { for (int i = 0; i < count; i++) { if (isNotEqual(expectedValues[i], actualValues[i], instanceMap)) { return false; } } return true; } private static boolean isNotEqual( @Nullable Object expected, @Nullable Object actual, @Nonnull Map<Object, Object> instanceMap) { return actual == null && expected != null || actual != null && expected == null || actual != null && actual != expected && expected != instanceMap.get(actual) && !EqualityMatcher.areEqualWhenNonNull(actual, expected); } @Nullable abstract Error assertMatch( @Nonnull Object[] replayArgs, @Nonnull Map<Object, Object> instanceMap, @Nullable CharSequence errorMessagePrefix); @Nullable final Error assertEquals( @Nonnull Object[] expectedValues, @Nonnull Object[] actualValues, int count, @Nonnull Map<Object, Object> instanceMap, @Nullable CharSequence errorMessagePrefix) { for (int i = 0; i < count; i++) { Object expected = expectedValues[i]; Object actual = actualValues[i]; if (isNotEqual(expected, actual, instanceMap)) { return signature.argumentMismatchMessage(i, expected, actual, errorMessagePrefix); } } return null; } abstract boolean hasEquivalentMatchers(@Nonnull ArgumentValuesAndMatchers other); static boolean equivalentMatches( @Nonnull ArgumentMatcher<?> matcher1, @Nullable Object arg1, @Nonnull ArgumentMatcher<?> matcher2, @Nullable Object arg2) { boolean matcher1MatchesArg2 = matcher1.matches(arg2); boolean matcher2MatchesArg1 = matcher2.matches(arg1); if (arg1 != null && arg2 != null && matcher1MatchesArg2 && matcher2MatchesArg1) { return true; } if (arg1 == arg2 && matcher1MatchesArg2 == matcher2MatchesArg1) { // both matchers fail ArgumentMismatch desc1 = new ArgumentMismatch(); matcher1.writeMismatchPhrase(desc1); ArgumentMismatch desc2 = new ArgumentMismatch(); matcher2.writeMismatchPhrase(desc2); return desc1.toString().equals(desc2.toString()); } return false; } @SuppressWarnings("unchecked") final <M1 extends ArgumentMatcher<M1>, M2 extends ArgumentMatcher<M2>> int indexOfFirstValueAfterEquivalentMatchers( @Nonnull ArgumentValuesAndMatchers other) { List<ArgumentMatcher<?>> otherMatchers = other.matchers; if (otherMatchers == null || matchers == null || otherMatchers.size() != matchers.size()) { return -1; } int i = 0; int m = matchers.size(); while (i < m) { M1 matcher1 = (M1) matchers.get(i); M2 matcher2 = (M2) otherMatchers.get(i); if (matcher1 == null || matcher2 == null) { if (!EqualityMatcher.areEqual(values[i], other.values[i])) { return -1; } } else if (matcher1 != matcher2) { Class<?> matcherClass = matcher1.getClass(); if (matcherClass != matcher2.getClass()) { return -1; } if (!matcher1.same((M1) matcher2)) { if ( matcherClass == ReflectiveMatcher.class || matcherClass == HamcrestAdapter.class || !equivalentMatches(matcher1, values[i], matcher2, other.values[i]) ){ return -1; } } } i++; } return i; } @Nonnull final String toString(@Nonnull MethodFormatter methodFormatter) { ArgumentMismatch desc = new ArgumentMismatch(); desc.append(":\n").append(methodFormatter.toString()); int parameterCount = values.length; if (parameterCount > 0) { desc.append("\n with arguments: "); if (matchers == null) { desc.appendFormatted(values); } else { List<String> parameterTypes = methodFormatter.getParameterTypes(); String sep = ""; for (int i = 0; i < parameterCount; i++) { ArgumentMatcher<?> matcher = getArgumentMatcher(i); String parameterType = parameterTypes.get(i); desc.append(sep).appendFormatted(parameterType, values[i], matcher); sep = ", "; } } } return desc.toString(); } }
{ "content_hash": "aecdd299656779ede11aed0042ee6bc8", "timestamp": "", "source": "github", "line_count": 199, "max_line_length": 119, "avg_line_length": 30.542713567839197, "alnum_prop": 0.6154985192497532, "repo_name": "russelyang/jmockit1", "id": "5586207257d3fbe0a28b0067f7d1be67cfaa4059", "size": "6206", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "main/src/mockit/internal/expectations/invocation/ArgumentValuesAndMatchers.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3524" }, { "name": "Java", "bytes": "2057320" }, { "name": "JavaScript", "bytes": "3493" } ], "symlink_target": "" }
Aplicação que simula um elevador em JavaScript utilizando : HTML CSS JavaScript - O Elevador possui 5 andares. - Com botões de chamadas para subir e descer em cada andar. - Abertura das portas e fechamento das portas quando chega ao andar. - voce pode escolher o andar para no painel do elevador. - O elevador tem o tempo de viagem proporcional ao ponto de origem em que ele estava. - Caso alguem chame num andar mais abaixo do que outra chamada acima, ele atende a chamada do andar mais inferior. - Layout responsivo até Iphone 5s.
{ "content_hash": "ddd40b44ccd34df4d4e3fd8ee50c6f21", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 114, "avg_line_length": 31.764705882352942, "alnum_prop": 0.7777777777777778, "repo_name": "vagnerasilva/elevador", "id": "34642fabb1e42477899cac8c549a98271db97ab9", "size": "555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1952" }, { "name": "HTML", "bytes": "10010" }, { "name": "JavaScript", "bytes": "3796" } ], "symlink_target": "" }
Ecog GUI ======== Ecog GUI is a Python module to localize intracranial electrodes from MRI. It interactively fits a priori defined grids, strips or rigid array of electrodes to the MRI with a Graphical User Interface. Its main advantages as compared to current alternatives are: * it provides the best fits in real time, and can thus help you adjust your locations interactively. * it can fit curved surfaces, including non-convex ones, but remains realistic thanks to a rigidity constrain. This is currently an alpha version. ![demo](docs/ecoggui_animation.gif) Online Tutorials ================ Click on the following examples to run the tutorial and results preview in your browser. * [`examples/model_displacement.ipynb`](http://mybinder.org/repo/kingjr/ecoggui/examples/model_displacement.ipynb) how the 2D grid/strips are fitted with a rotation and translation to match the 3D locations. * [`examples/model_surface.ipynb`](http://mybinder.org/repo/kingjr/ecoggui/examples/model_surface.ipynb): how the 2D grid/strips are fitted with a 2nd order polynomial with a constrain on the electrodes local distances, as we know the grid can be flexible but is not elastic. * [`examples/gui_mri_ecog.ipynb`](http://mybinder.org/repo/kingjr/ecoggui/examples/gui_mri_ecog.ipynb): try to fit the grid this patient brain. It should take no more than five minutes. Installation ============ Clone this repository and install using setup.py: ```python setup.py develop --user``` Dependencies ============ The required dependencies to use the software are: * Python >= 2.7 * setuptools * Numpy >= 1.6.1 * SciPy >= 0.9 * Scikit-learn >= 0.17 * Nibabel >= 1.1.0 * Matplotlib >= 1.5.1
{ "content_hash": "c82da5a6b3f971f60e4f889bf98e3864", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 185, "avg_line_length": 33.07843137254902, "alnum_prop": 0.7486662714878483, "repo_name": "kingjr/ecoggui", "id": "00aa8007a73ea42141ce54d96441d3d1453eace9", "size": "1687", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "41035" } ], "symlink_target": "" }
using namespace apl; BOOST_AUTO_TEST_SUITE(CommandQueueSuite) BOOST_AUTO_TEST_CASE(SimpleWriteAndRead) { CommandQueue cq; BinaryOutput bo; bo.mRawCode = CC_LATCH_ON; cq.AcceptCommand(bo, 3, 4, NULL); BOOST_REQUIRE_EQUAL(cq.Next(), CT_BINARY_OUTPUT); BinaryOutput bo2; bo2.mRawCode = CC_LATCH_OFF; CommandData info; cq.Read(bo2, info); BOOST_REQUIRE_EQUAL(cq.Next(), CT_NONE); BOOST_REQUIRE_EQUAL(info.mIndex, 3); BOOST_REQUIRE_EQUAL(info.mSequence, 4); BOOST_REQUIRE_EQUAL(bo2.mRawCode, CC_LATCH_ON); } BOOST_AUTO_TEST_CASE(ExecuteCommand) { CommandQueue cq; MockCommandHandler mh; MockResponseAcceptor mr; cq.AcceptCommand(BinaryOutput(CC_PULSE), 0, 0, &mr); BOOST_REQUIRE_EQUAL(cq.Size(), 1); cq.ExecuteCommand(&mh); BOOST_REQUIRE_EQUAL(cq.Size(), 0); BOOST_REQUIRE_EQUAL(mh.num_bo, 1); BOOST_REQUIRE_EQUAL(mh.num_sp, 0); BOOST_REQUIRE_EQUAL(mr.NumResponses(), 1); cq.AcceptCommand(Setpoint(0), 0, 0, &mr); BOOST_REQUIRE_EQUAL(cq.Size(), 1); cq.ExecuteCommand(&mh); BOOST_REQUIRE_EQUAL(cq.Size(), 0); BOOST_REQUIRE_EQUAL(mh.num_bo, 1); BOOST_REQUIRE_EQUAL(mh.num_sp, 1); BOOST_REQUIRE_EQUAL(mr.NumResponses(), 2); BOOST_REQUIRE_EQUAL(mr.PopResponse().Response.mResult, CS_SUCCESS); } BOOST_AUTO_TEST_CASE(OrderMaintained) { CommandQueue cq; size_t testLength = 100; for(size_t i = 0; i < testLength; i++) { int type = rand() % 3; int seq = static_cast<int>(i); if(type == 0) { BinaryOutput bo; bo.mRawCode = (i % 2 == 0) ? CC_LATCH_ON : CC_LATCH_OFF; bo.mCount = (boost::uint8_t)i % 255; cq.AcceptCommand(bo, i, seq, NULL); } else { Setpoint st; st.SetValue(static_cast<boost::int32_t>(i)); cq.AcceptCommand(st, i, seq, NULL); } } for(size_t i = 0; i < testLength; i++) { CommandTypes type = cq.Next(); CommandData info; switch(type) { case CT_BINARY_OUTPUT: { BinaryOutput bo; cq.Read(bo, info); BOOST_REQUIRE_EQUAL(bo.mCount, i); } break; case CT_SETPOINT: { Setpoint st; cq.Read(st, info); BOOST_REQUIRE_EQUAL(st.GetIntValue(), (int)i); } break; case CT_NONE: BOOST_REQUIRE(false); } BOOST_REQUIRE_EQUAL(info.mIndex, i); BOOST_REQUIRE_EQUAL(info.mSequence, (int)i); } BOOST_REQUIRE_EQUAL(cq.Next(), CT_NONE); } BOOST_AUTO_TEST_SUITE_END()
{ "content_hash": "3e658e0ed42c9ceffa147d6db845c6ab", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 68, "avg_line_length": 22.85, "alnum_prop": 0.6713347921225383, "repo_name": "cverges/dnp3", "id": "914a210378690452a3d8641acf3b8a63b6494f14", "size": "3312", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "TestAPL/TestCommandQueue.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1159" }, { "name": "C", "bytes": "104" }, { "name": "C#", "bytes": "94066" }, { "name": "C++", "bytes": "2080621" }, { "name": "HTML", "bytes": "180298" }, { "name": "Java", "bytes": "1356" }, { "name": "Makefile", "bytes": "4106" }, { "name": "Objective-C", "bytes": "173" }, { "name": "Python", "bytes": "4185" }, { "name": "Ruby", "bytes": "16013" }, { "name": "Scala", "bytes": "13144" }, { "name": "XSLT", "bytes": "610805" } ], "symlink_target": "" }
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent"> <android.support.v4.view.ViewPager android:id="@+id/pager" android:layout_width="match_parent" android:layout_height="match_parent" /> </FrameLayout>
{ "content_hash": "d6bd4d930f03e55415a1a1f0fa0128d9", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 71, "avg_line_length": 33.3, "alnum_prop": 0.6936936936936937, "repo_name": "leomindez/Materialize-XYZ-Reader", "id": "77b2f04a3d444e8267d81c44755a6366cb1ac200", "size": "333", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "XYZReader/src/main/res/layout/activity_article_detail.xml", "mode": "33261", "license": "mit", "language": [ { "name": "Java", "bytes": "59494" }, { "name": "Python", "bytes": "14963" } ], "symlink_target": "" }
import { moduleForModel, test } from 'ember-qunit'; moduleForModel('echonest-song', 'EchonestSong', { needs: ['model:echonest-artist'] }); test('it exists', function() { var model = this.subject(); var bucket = model.get('bucket'); ok(bucket.indexOf('audio_summary') >= 0, 'should have audio_summary bucket'); ok(bucket.indexOf('artist_discovery') >= 0, 'should have artist_discovery bucket'); ok(bucket.indexOf('artist_familiarity') >= 0, 'should have artist_familiarity bucket'); ok(bucket.indexOf('artist_hotttnesss') >= 0, 'should have artist_hotttnesss bucket'); ok(bucket.indexOf('artist_location') >= 0, 'should have artist_location bucket'); ok(bucket.indexOf('song_currency') >= 0, 'should have song_currency bucket'); ok(bucket.indexOf('song_discovery') >= 0, 'should have song_discovery bucket'); ok(bucket.indexOf('song_hotttnesss') >= 0, 'should have song_hotttnesss bucket'); ok(bucket.indexOf('tracks') >= 0, 'should have tracks bucket'); });
{ "content_hash": "56c5d5c61b8a1ae8c16b23e6aeebe11d", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 89, "avg_line_length": 44.90909090909091, "alnum_prop": 0.6963562753036437, "repo_name": "argylemachine/music-player", "id": "2a5cc1c141c7844311bac3186753a41a4fa3f199", "size": "988", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/unit/models/echonest-song-test.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "31" }, { "name": "JavaScript", "bytes": "31417" } ], "symlink_target": "" }
class Vector2D(object): def __init__(self, vec2d): """ Initialize your data structure here. :type vec2d: List[List[int]] """ self.cursor = -1 if len(vec2d) > 0: self.data = reduce(lambda a,b:a+b, vec2d) else: self.data = [] self.data_len = len(self.data) def next(self): self.cursor += 1 return self.data[self.cursor] def hasNext(self): return self.cursor < self.data_len - 1 # Your Vector2D object will be instantiated and called as such: # i, v = Vector2D(vec2d), [] # while i.hasNext(): v.append(i.next())
{ "content_hash": "68f9bebeec559cecc7dc61c3d20938fe", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 63, "avg_line_length": 24.035714285714285, "alnum_prop": 0.5200594353640416, "repo_name": "luosch/leetcode", "id": "b2b6f5f0018b7b41629b2de4ceb29abc93fb941b", "size": "673", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/Flatten 2D Vector.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "37027" }, { "name": "Python", "bytes": "175260" }, { "name": "Shell", "bytes": "801" }, { "name": "Swift", "bytes": "121" } ], "symlink_target": "" }
[![npm version](https://badge.fury.io/js/atgrid.css.svg)](https://badge.fury.io/js/atgrid.css) > atGrid is a Flexbox-based CSS Grid System that uses attributes instead of classes. ## Examples ### Columns with the same width ```HTML <div grid> <section column> <p>Auto</p> </section> <section column> <p>Auto</p> </section> </div> ``` ### Columns with a determined width ```HTML <div grid> <section column="8"> <p>8</p> </section> <section column="4"> <p>4</p> </section> </div> ``` ### Columns without gutters ```HTML <div grid="ng"> <section column="8"> <p>8</p> </section> <section column="4"> <p>4</p> </section> </div> ``` ### Columns with offsets ```HTML <div grid> <section column="3"> <p>3</p> </section> <section column="3 +6"> <p>3 +6</p> </section> </div> ``` ### Columns with offsets and without gutters ```HTML <div grid="ng"> <section column="5"> <p>5</p> </section> <section column="5 +2"> <p>5 +2</p> </section> </div> ``` ### Nested grids ```HTML <div grid> <section column="4"> <p>4</p> </section> <section column="8"> <div grid> <section column="6"> <p>6 > 8</p> </section> <section column="6"> <p>6 > 8</p> </section> </div> </section> </div> ``` ### Nested grids without gutters ```HTML <div grid="ng"> <section column="4"> <p>4</p> </section> <section column="8"> <div grid="ng"> <section column="6"> <p>6 > 8</p> </section> <section column="6"> <p>6 > 8</p> </section> </div> </section> </div> ``` ## Installation ``` $ npm install atgrid.css ``` ## Browser Compatibility Since version 4.0.0, atGrid.css is based on Flexbox, you can see the table of compatibility in the [Can I Use](http://caniuse.com/#feat=flexbox) site. ## HTML Attributes `container` Sets the element as a container, the container element will have a fixed max-width and will be centered on the page. `grid` Sets the element as a grid element. This is a required element, and should be direct parent of the column elements. `column` Sets the element as a column, should always be used inside a grid element. A column can also contain a grid element to nest grids. ## Attribute values ### Container The container element doesn't accept any values. ### Grid `ng` Will create a grid where the columns doesn't have gutters. `top` Aligns all the columns inside vertically to the top of the grid. `center` Aligns all the columns inside vertically to the center of the grid. `bottom` Aligns all the columns inside vertically to the bottom of the grid. ### Column `<width>` The width of the column, by default from 1 to 12. `+<offset>` The offset of the column, by default from 1 to 11. `top` Aligns the column vertically to the top of the grid. `center` Aligns the column vertically to the center of the grid. `bottom` Aligns the column vertically to the bottom of the grid. ## Sass configuration `container-width` <br> **Default:** 1200px <br> **Description**: Maximum width of the container element. `gutter` <br> **Default:** 2em <br> **Description:** Space between columns. `num-columns` <br> **Default:** 12 <br> **Description:** Number of columns. `prefix` <br> **Default:** '' <br> **Description:** Prefix for the attributes, use `data-` if you need valid HTML. `breakpoint` <br> **Default:** 420px <br> **Description:** Below this point the columns are expanded to 100%. ## License The MIT License - James Kolce
{ "content_hash": "3721cb5be17505375d30fc4d6a54df42", "timestamp": "", "source": "github", "line_count": 209, "max_line_length": 150, "avg_line_length": 17.071770334928228, "alnum_prop": 0.632847533632287, "repo_name": "KolceThompsonCo/atgrid.css", "id": "b6bb1c1e756daa158d2edf57f4064783fec34ef8", "size": "3627", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4726" }, { "name": "HTML", "bytes": "15233" } ], "symlink_target": "" }
#ifndef __LIB_UTILS_COMPAT_H #define __LIB_UTILS_COMPAT_H #include <unistd.h> /* Compatibility definitions for non-Linux (i.e., BSD-based) hosts. */ #ifndef HAVE_OFF64_T #if _FILE_OFFSET_BITS < 64 #error "_FILE_OFFSET_BITS < 64; large files are not supported on this platform" #endif /* _FILE_OFFSET_BITS < 64 */ typedef off_t off64_t; static inline off64_t lseek64(int fd, off64_t offset, int whence) { return lseek(fd, offset, whence); } #ifdef HAVE_PREAD static inline ssize_t pread64(int fd, void* buf, size_t nbytes, off64_t offset) { return pread(fd, buf, nbytes, offset); } #endif #endif /* !HAVE_OFF64_T */ #endif /* __LIB_UTILS_COMPAT_H */
{ "content_hash": "c0f003a5b3defd1a36138234821b0a7b", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 81, "avg_line_length": 23.785714285714285, "alnum_prop": 0.6846846846846847, "repo_name": "hhool/ffplayer", "id": "18192661cb78d9fa376283e3c95254a083c800ae", "size": "1285", "binary": false, "copies": "72", "ref": "refs/heads/master", "path": "jni/include/aosp-14/frameworks/base/include/utils/Compat.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "1244539" }, { "name": "C", "bytes": "28869230" }, { "name": "C++", "bytes": "9890363" }, { "name": "CMake", "bytes": "1190" }, { "name": "Java", "bytes": "33657" }, { "name": "Makefile", "bytes": "300680" }, { "name": "Objective-C", "bytes": "16714" }, { "name": "Perl", "bytes": "12109" }, { "name": "Python", "bytes": "4767" }, { "name": "Shell", "bytes": "50124" }, { "name": "Verilog", "bytes": "730" } ], "symlink_target": "" }
/*************************************************************************** * Renders a GL_TEXTURE_EXTERNAL_OES texture. ***************************************************************************/ #ifndef OES_SHADER_H_ #define OES_SHADER_H_ #include <memory> #define __gl2_h_ #include "GLES3/gl3.h" #include <GLES2/gl2ext.h> #include "glm/glm.hpp" #include "glm/gtc/type_ptr.hpp" #include "objects/recyclable_object.h" namespace gvr { class GLProgram; class RenderData; class OESShader: public RecyclableObject { public: OESShader(); ~OESShader(); void recycle(); void render(const glm::mat4& mvp_matrix, RenderData* render_data); private: OESShader(const OESShader& oes_shader); OESShader(OESShader&& oes_shader); OESShader& operator=(const OESShader& oes_shader); OESShader& operator=(OESShader&& oes_shader); private: GLProgram* program_; GLuint a_position_; GLuint a_tex_coord_; GLuint u_mvp_; GLuint u_texture_; GLuint u_color_; GLuint u_opacity_; }; } #endif
{ "content_hash": "573b123eeb1eaa255a1c205a51908abd", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 77, "avg_line_length": 20.84, "alnum_prop": 0.5825335892514395, "repo_name": "JonShemitz/GearVRf", "id": "c44b1ad6cedd2fdad1e72b11f383a2e9eb33ac35", "size": "1650", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "GVRf/Framework/jni/shaders/material/oes_shader.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "56027" }, { "name": "C", "bytes": "1029156" }, { "name": "C++", "bytes": "2784208" }, { "name": "CMake", "bytes": "1409" }, { "name": "CSS", "bytes": "869" }, { "name": "Java", "bytes": "12714266" }, { "name": "Makefile", "bytes": "5379" }, { "name": "Python", "bytes": "3057" }, { "name": "R", "bytes": "29550" }, { "name": "Shell", "bytes": "1165" }, { "name": "XSLT", "bytes": "2509" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("xSLx-Jungle")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("xSLx-Jungle")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("980fc1b9-ba5f-4fc3-b50a-5efe6ce2cc31")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "36374b5173f33b762751111f66292e5c", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 84, "avg_line_length": 38.75, "alnum_prop": 0.7433691756272401, "repo_name": "imsosharp/LSharp-Beta", "id": "b126a3fcc6e4e4bee4490ceef5b845ebf8ecd5d4", "size": "1398", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Team-xSLx/xSLx-Jungle/xSLx-Jungle/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1791979" }, { "name": "C++", "bytes": "4496" } ], "symlink_target": "" }
set -e hostname=`hostname -f` # We expect to find zk-client-conf.json in the same folder as this script. script_root=`dirname "${BASH_SOURCE}"` # Set up environment. for pkg in `find $VTROOT/dist -name site-packages`; do export PYTHONPATH=$pkg:$PYTHONPATH done export PYTHONPATH=$VTROOT/py-vtdb:$PYTHONPATH exec env python $script_root/client.py $*
{ "content_hash": "434896981cf709653698ae2eaa95af5d", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 74, "avg_line_length": 23.733333333333334, "alnum_prop": 0.7387640449438202, "repo_name": "mattharden/vitess", "id": "fd7a6eeb0db0f670d40f39dffd75dad5619f83e9", "size": "1020", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "examples/local/client.sh", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "10253" }, { "name": "CSS", "bytes": "213374" }, { "name": "Go", "bytes": "5366073" }, { "name": "HTML", "bytes": "54179" }, { "name": "Java", "bytes": "721903" }, { "name": "JavaScript", "bytes": "41385" }, { "name": "Liquid", "bytes": "6896" }, { "name": "Makefile", "bytes": "7952" }, { "name": "PHP", "bytes": "1054319" }, { "name": "Protocol Buffer", "bytes": "104288" }, { "name": "Python", "bytes": "816941" }, { "name": "Ruby", "bytes": "466" }, { "name": "Shell", "bytes": "41203" }, { "name": "TypeScript", "bytes": "134676" }, { "name": "Yacc", "bytes": "21577" } ], "symlink_target": "" }
.. derive-for-training.rst ######################### Derive a trainable model ######################### Documentation in this section describes one of the possible ways to turn a :abbr:`DL (Deep Learning)` model for inference into one that can be used for training. Additionally, and to provide a more complete walk-through that *also* trains the model, our example includes the use of a simple data loader for uncompressed MNIST data. * :ref:`model_overview` * :ref:`code_structure` - :ref:`inference` - :ref:`loss` - :ref:`backprop` - :ref:`update` .. _automating_graph_construction: Automating graph construction ============================== In a :abbr:`Machine Learning (ML)` ecosystem, it makes sense to use automation and abstraction where possible. nGraph was designed to automatically use the "ops" of tensors provided by a framework when constructing graphs. However, nGraph's graph-construction API operates at a fundamentally lower level than a typical framework's API, and writing a model directly in nGraph would be somewhat akin to programming in assembly language: not impossible, but not the easiest thing for humans to do. To make the task easier for developers who need to customize the "automatic", construction of graphs, we've provided some demonstration code for how this could be done. We know, for example, that a trainable model can be derived from any graph that has been constructed with weight-based updates. The following example named ``mnist_mlp.cpp`` represents a hand-designed inference model being converted to a model that can be trained with nGraph. .. _model_overview: Model overview =============== Due to the lower-level nature of the graph-construction API, the example we've selected to document here is a relatively simple model: a fully-connected topology with one hidden layer followed by ``Softmax``. Remember that in nGraph, the graph is stateless; values for the weights must be provided as parameters along with the normal inputs. Starting with the graph for inference, we will use it to create a graph for training. The training function will return tensors for the updated weights. .. note:: This example illustrates how to convert an inference model into one that can be trained. Depending on the framework, bridge code may do something similar, or the framework might do this operation itself. Here we do the conversion with nGraph because the computation for training a model is significantly larger than for inference, and doing the conversion manually is tedious and error-prone. .. _code_structure: Code structure ============== .. _inference: Inference --------- We begin by building the graph, starting with the input parameter ``X``. We also define a fully-connected layer, including parameters for weights and bias: .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 123-134 Repeat the process for the next layer, .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 137-144 and normalize everything with a ``softmax``. .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 146-147 .. _loss: Loss ---- We use cross-entropy to compute the loss. nGraph does not currenty have a core op for cross-entropy, so we implement it directly, adding clipping to prevent underflow. .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 150-164 .. _backprop: Backprop -------- We want to reduce the loss by adjusting the weights. We compute the adjustments using the reverse-mode autodiff algorithm, commonly referred to as "backprop" because of the way it is implemented in interpreted frameworks. In nGraph, we augment the loss computation with computations for the weight adjustments. This allows the calculations for the adjustments to be further optimized. .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 167-170 For any node ``N``, if the update for ``loss`` is ``delta``, the update computation for ``N`` will be given by the node .. code-block:: cpp auto update = loss->backprop_node(N, delta); .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 177-179 The different update nodes will share intermediate computations. So to get the updated values for the weights as computed with the specified :doc:`backend <../../backends/index>`: .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 184-212 .. _update: Update ------ Since nGraph is stateless, we train by making a function that has the original weights among its inputs and the updated weights among the results. For training, we'll also need the labeled training data as inputs, and we'll return the loss as an additional result. We'll also want to track how well we are doing; this is a function that returns the loss and has the labeled testing data as input. Although we can use the same nodes in different functions, nGraph currently does not allow the same nodes to be compiled in different functions, so we compile clones of the nodes. .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp :language: cpp :lines: 217-224
{ "content_hash": "e6a74e42d5230fc22d13bdf9dcf30e9d", "timestamp": "", "source": "github", "line_count": 171, "max_line_length": 82, "avg_line_length": 31.385964912280702, "alnum_prop": 0.7259176448667785, "repo_name": "NervanaSystems/ngraph", "id": "60d53353d9e27fd169d64ab525ae4e22cbd3438b", "size": "5367", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/sphinx/source/core/constructing-graphs/derive-for-training.rst", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "3147" }, { "name": "C++", "bytes": "19522833" }, { "name": "CMake", "bytes": "223605" }, { "name": "Dockerfile", "bytes": "2036" }, { "name": "Groovy", "bytes": "13002" }, { "name": "MLIR", "bytes": "55258" }, { "name": "Makefile", "bytes": "13532" }, { "name": "Python", "bytes": "331191" }, { "name": "Shell", "bytes": "43252" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE416_Use_After_Free__return_freed_ptr_12.c Label Definition File: CWE416_Use_After_Free__return_freed_ptr.label.xml Template File: point-flaw-12.tmpl.c */ /* * @description * CWE: 416 Use After Free * Sinks: * GoodSink: Use a block of memory returned from a function * BadSink : Use a block of memory returned from a function after it has been freed * Flow Variant: 12 Control flow: if(globalReturnsTrueOrFalse()) * * */ #include "std_testcase.h" static char * helperBad(char * aString) { size_t i = 0; size_t j; char * reversedString = NULL; if (aString != NULL) { i = strlen(aString); reversedString = (char *) malloc(i+1); for (j = 0; j < i; j++) { reversedString[j] = aString[i-j-1]; } reversedString[i] = '\0'; /* FLAW: Freeing a memory block and then returning a pointer to the freed memory */ free(reversedString); return reversedString; } else { return NULL; } } static char * helperGood(char * aString) { size_t i = 0; size_t j; char * reversedString = NULL; if (aString != NULL) { i = strlen(aString); reversedString = (char *) malloc(i+1); for (j = 0; j < i; j++) { reversedString[j] = aString[i-j-1]; } reversedString[i] = '\0'; /* FIX: Do not free the memory before returning */ return reversedString; } else { return NULL; } } #ifndef OMITBAD void CWE416_Use_After_Free__return_freed_ptr_12_bad() { if(globalReturnsTrueOrFalse()) { { /* Call the bad helper function */ char * reversedString = helperBad("BadSink"); printLine(reversedString); /* free(reversedString); * This call to free() was removed because we want the tool to detect the use after free, * but we don't want that function to be free(). Essentially we want to avoid a double free */ } } else { { /* Call the good helper function */ char * reversedString = helperGood("GoodSink"); printLine(reversedString); /* free(reversedString); * This call to free() was removed because we want the tool to detect the use after free, * but we don't want that function to be free(). Essentially we want to avoid a double free */ } } } #endif /* OMITBAD */ #ifndef OMITGOOD /* good1() uses the GoodSink on both sides of the "if" statement */ static void good1() { if(globalReturnsTrueOrFalse()) { { /* Call the good helper function */ char * reversedString = helperGood("GoodSink"); printLine(reversedString); /* free(reversedString); * This call to free() was removed because we want the tool to detect the use after free, * but we don't want that function to be free(). Essentially we want to avoid a double free */ } } else { { /* Call the good helper function */ char * reversedString = helperGood("GoodSink"); printLine(reversedString); /* free(reversedString); * This call to free() was removed because we want the tool to detect the use after free, * but we don't want that function to be free(). Essentially we want to avoid a double free */ } } } void CWE416_Use_After_Free__return_freed_ptr_12_good() { good1(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE416_Use_After_Free__return_freed_ptr_12_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE416_Use_After_Free__return_freed_ptr_12_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
{ "content_hash": "13f04ba3260e4f7cd3ef7b6ca86f2b86", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 103, "avg_line_length": 28.9937106918239, "alnum_prop": 0.5659436008676789, "repo_name": "maurer/tiamat", "id": "f47721b570163b46316e22a0c2f845964c130164", "size": "4610", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "samples/Juliet/testcases/CWE416_Use_After_Free/CWE416_Use_After_Free__return_freed_ptr_12.c", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
@interface AppDelegate : UIResponder <UIApplicationDelegate> @property (strong, nonatomic) UIWindow *window; @end
{ "content_hash": "b3ca5dca9467aa302931b673020908e5", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 60, "avg_line_length": 16.857142857142858, "alnum_prop": 0.7796610169491526, "repo_name": "inmovation/IMVInjection", "id": "015c48c648445326e57cd4d719f39d9e8a2076eb", "size": "283", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "IMVInjection/AppDelegate.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "20636" }, { "name": "Ruby", "bytes": "2154" } ], "symlink_target": "" }
[![CI Status](http://img.shields.io/travis/eugenenguyen/XBLanguage.svg?style=flat)](https://travis-ci.org/eugenenguyen/XBLanguage) [![Version](https://img.shields.io/cocoapods/v/XBLanguage.svg?style=flat)](http://cocoadocs.org/docsets/XBLanguage) [![License](https://img.shields.io/cocoapods/l/XBLanguage.svg?style=flat)](http://cocoadocs.org/docsets/XBLanguage) [![Platform](https://img.shields.io/cocoapods/p/XBLanguage.svg?style=flat)](http://cocoadocs.org/docsets/XBLanguage) ## Usage To run the example project, clone the repo, and run `pod install` from the Example directory first. ## Requirements ## Installation XBLanguage is available through [CocoaPods](http://cocoapods.org). To install it, simply add the following line to your Podfile: pod "XBLanguage" ## Author eugenenguyen, [email protected] ## License XBLanguage is available under the MIT license. See the LICENSE file for more info.
{ "content_hash": "3dd73b6720c0e0efa97639a3ef7819f3", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 130, "avg_line_length": 35.42307692307692, "alnum_prop": 0.762214983713355, "repo_name": "EugeneNguyen/XBAuthentication", "id": "a157c1a3427677d900eee7af2dda6cd1cde3953a", "size": "935", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Example/Pods/XBLanguage/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "15536" }, { "name": "C++", "bytes": "758" }, { "name": "Objective-C", "bytes": "3768940" }, { "name": "Ruby", "bytes": "3353" }, { "name": "Shell", "bytes": "7580" } ], "symlink_target": "" }
import asyncio from functools import partial from unittest import mock import pytest from yarl import URL import aiohttp import aiohttp.helpers import aiohttp.web @pytest.fixture def proxy_test_server(raw_test_server, loop, monkeypatch): """Handle all proxy requests and imitate remote server response.""" _patch_ssl_transport(monkeypatch) default_response = dict( status=200, headers=None, body=None) @asyncio.coroutine def proxy_handler(request, proxy_mock): proxy_mock.request = request proxy_mock.requests_list.append(request) response = default_response.copy() if isinstance(proxy_mock.return_value, dict): response.update(proxy_mock.return_value) headers = response['headers'] if not headers: headers = {} if request.method == 'CONNECT': response['body'] = None response['headers'] = headers resp = aiohttp.web.Response(**response) yield from resp.prepare(request) yield from resp.drain() return resp @asyncio.coroutine def proxy_server(): proxy_mock = mock.Mock() proxy_mock.request = None proxy_mock.requests_list = [] handler = partial(proxy_handler, proxy_mock=proxy_mock) server = yield from raw_test_server(handler) proxy_mock.server = server proxy_mock.url = server.make_url('/') return proxy_mock return proxy_server @asyncio.coroutine def _request(method, url, loop=None, **kwargs): client = aiohttp.ClientSession(loop=loop) try: resp = yield from client.request(method, url, **kwargs) yield from resp.release() return resp finally: yield from client.close() @pytest.fixture() def get_request(loop): return partial(_request, method='GET', loop=loop) @asyncio.coroutine def test_proxy_http_absolute_path(proxy_test_server, get_request): url = 'http://aiohttp.io/path?query=yes' proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path?query=yes' @asyncio.coroutine def test_proxy_http_raw_path(proxy_test_server, get_request): url = 'http://aiohttp.io:2561/space sheep?q=can:fly' raw_url = 'http://aiohttp.io:2561/space%20sheep?q=can:fly' proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) assert proxy.request.host == 'aiohttp.io:2561' assert proxy.request.path_qs == raw_url @asyncio.coroutine def test_proxy_http_idna_support(proxy_test_server, get_request): url = 'http://éé.com/' raw_url = 'http://xn--9caa.com/' proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) assert proxy.request.host == 'xn--9caa.com' assert proxy.request.path_qs == raw_url @asyncio.coroutine def test_proxy_http_connection_error(get_request): url = 'http://aiohttp.io/path' proxy_url = 'http://localhost:2242/' with pytest.raises(aiohttp.ClientConnectorError): yield from get_request(url=url, proxy=proxy_url) @asyncio.coroutine def test_proxy_http_bad_response(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = yield from proxy_test_server() proxy.return_value = dict( status=502, headers={'Proxy-Agent': 'TestProxy'}) resp = yield from get_request(url=url, proxy=proxy.url) assert resp.status == 502 assert resp.headers['Proxy-Agent'] == 'TestProxy' @asyncio.coroutine def test_proxy_http_auth(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers auth = aiohttp.helpers.BasicAuth('user', 'pass') yield from get_request(url=url, auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers yield from get_request(url=url, proxy_auth=auth, proxy=proxy.url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers yield from get_request(url=url, auth=auth, proxy_auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers @asyncio.coroutine def test_proxy_http_auth_utf8(proxy_test_server, get_request): url = 'http://aiohttp.io/path' auth = aiohttp.helpers.BasicAuth('юзер', 'пасс', 'utf-8') proxy = yield from proxy_test_server() yield from get_request(url=url, auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers @asyncio.coroutine def test_proxy_http_auth_from_url(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = yield from proxy_test_server() auth_url = URL(url).with_user('user').with_password('pass') yield from get_request(url=auth_url, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy_url = URL(proxy.url).with_user('user').with_password('pass') yield from get_request(url=url, proxy=proxy_url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers @asyncio.coroutine def test_proxy_http_acquired_cleanup(proxy_test_server, loop): url = 'http://aiohttp.io/path' conn = aiohttp.TCPConnector(loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() assert 0 == len(conn._acquired) resp = yield from sess.get(url, proxy=proxy.url) assert resp.closed assert 0 == len(conn._acquired) sess.close() @pytest.mark.skip('we need to reconsider how we test this') @asyncio.coroutine def test_proxy_http_acquired_cleanup_force(proxy_test_server, loop): url = 'http://aiohttp.io/path' conn = aiohttp.TCPConnector(force_close=True, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() assert 0 == len(conn._acquired) @asyncio.coroutine def request(): resp = yield from sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) yield from resp.release() yield from request() assert 0 == len(conn._acquired) yield from sess.close() @pytest.mark.skip('we need to reconsider how we test this') @asyncio.coroutine def test_proxy_http_multi_conn_limit(proxy_test_server, loop): url = 'http://aiohttp.io/path' limit, multi_conn_num = 1, 5 conn = aiohttp.TCPConnector(limit=limit, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() current_pid = None @asyncio.coroutine def request(pid): # process requests only one by one nonlocal current_pid resp = yield from sess.get(url, proxy=proxy.url) current_pid = pid yield from asyncio.sleep(0.2, loop=loop) assert current_pid == pid yield from resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] responses = yield from asyncio.gather(*requests, loop=loop) assert len(responses) == multi_conn_num assert set(resp.status for resp in responses) == {200} yield from sess.close() # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_connect(proxy_test_server, get_request): proxy = yield from proxy_test_server() url = 'https://www.google.com.ua/search?q=aiohttp proxy' yield from get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'www.google.com.ua:443' assert connect.host == 'www.google.com.ua' assert proxy.request.host == 'www.google.com.ua' assert proxy.request.path_qs == '/search?q=aiohttp+proxy' # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_connect_with_port(proxy_test_server, get_request): proxy = yield from proxy_test_server() url = 'https://secure.aiohttp.io:2242/path' yield from get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'secure.aiohttp.io:2242' assert connect.host == 'secure.aiohttp.io:2242' assert proxy.request.host == 'secure.aiohttp.io:2242' assert proxy.request.path_qs == '/path' # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_send_body(proxy_test_server, loop): sess = aiohttp.ClientSession(loop=loop) proxy = yield from proxy_test_server() proxy.return_value = {'status': 200, 'body': b'1'*(2**20)} url = 'https://www.google.com.ua/search?q=aiohttp proxy' resp = yield from sess.get(url, proxy=proxy.url) body = yield from resp.read() yield from resp.release() yield from sess.close() assert body == b'1'*(2**20) # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_idna_support(proxy_test_server, get_request): url = 'https://éé.com/' proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'xn--9caa.com:443' assert connect.host == 'xn--9caa.com' @asyncio.coroutine def test_proxy_https_connection_error(get_request): url = 'https://secure.aiohttp.io/path' proxy_url = 'http://localhost:2242/' with pytest.raises(aiohttp.ClientConnectorError): yield from get_request(url=url, proxy=proxy_url) @asyncio.coroutine def test_proxy_https_bad_response(proxy_test_server, get_request): url = 'https://secure.aiohttp.io/path' proxy = yield from proxy_test_server() proxy.return_value = dict( status=502, headers={'Proxy-Agent': 'TestProxy'}) with pytest.raises(aiohttp.ClientHttpProxyError): yield from get_request(url=url, proxy=proxy.url) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'CONNECT' assert proxy.request.path == 'secure.aiohttp.io:443' # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_auth(proxy_test_server, get_request): url = 'https://secure.aiohttp.io/path' auth = aiohttp.helpers.BasicAuth('user', 'pass') proxy = yield from proxy_test_server() yield from get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' not in connect.headers assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = yield from proxy_test_server() yield from get_request(url=url, auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' not in connect.headers assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = yield from proxy_test_server() yield from get_request(url=url, proxy_auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' in connect.headers assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = yield from proxy_test_server() yield from get_request(url=url, auth=auth, proxy_auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' in connect.headers assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_acquired_cleanup(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' conn = aiohttp.TCPConnector(loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() assert 0 == len(conn._acquired) @asyncio.coroutine def request(): resp = yield from sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) yield from resp.release() yield from request() assert 0 == len(conn._acquired) yield from sess.close() # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_acquired_cleanup_force(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' conn = aiohttp.TCPConnector(force_close=True, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() assert 0 == len(conn._acquired) @asyncio.coroutine def request(): resp = yield from sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) yield from resp.release() yield from request() assert 0 == len(conn._acquired) yield from sess.close() # @pytest.mark.xfail @asyncio.coroutine def _test_proxy_https_multi_conn_limit(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' limit, multi_conn_num = 1, 5 conn = aiohttp.TCPConnector(limit=limit, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = yield from proxy_test_server() current_pid = None @asyncio.coroutine def request(pid): # process requests only one by one nonlocal current_pid resp = yield from sess.get(url, proxy=proxy.url) current_pid = pid yield from asyncio.sleep(0.2, loop=loop) assert current_pid == pid yield from resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] responses = yield from asyncio.gather(*requests, loop=loop) assert len(responses) == multi_conn_num assert set(resp.status for resp in responses) == {200} yield from sess.close() def _patch_ssl_transport(monkeypatch): """Make ssl transport substitution to prevent ssl handshake.""" def _make_ssl_transport_dummy(self, rawsock, protocol, sslcontext, waiter=None, **kwargs): return self._make_socket_transport(rawsock, protocol, waiter, extra=kwargs.get('extra'), server=kwargs.get('server')) monkeypatch.setattr( "asyncio.selector_events.BaseSelectorEventLoop._make_ssl_transport", _make_ssl_transport_dummy)
{ "content_hash": "8ad9e74e43243bb24032e396e98db3e4", "timestamp": "", "source": "github", "line_count": 507, "max_line_length": 76, "avg_line_length": 29.775147928994084, "alnum_prop": 0.6758081611022787, "repo_name": "Eyepea/aiohttp", "id": "5cc57164f810e3fced39bbd79f4643a336ac612e", "size": "15108", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_proxy_functional.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "1738" }, { "name": "PowerShell", "bytes": "3361" }, { "name": "Python", "bytes": "935198" } ], "symlink_target": "" }
A SettingFilter is a filter wich is used by a SettingBrowser in order to select which nodes of the setting trees are to be shown. A SettingFilter subclass must redefine the #keepHandler: method which return true if the argument handler is to be kept. Instance Variables
{ "content_hash": "6226eee8e0e0d42fa9a6bc0b375be31b", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 250, "avg_line_length": 90.33333333333333, "alnum_prop": 0.8118081180811808, "repo_name": "vineetreddyrajula/pharo", "id": "0301420bcb5730d81cd528ac1df77c765aa6e47b", "size": "271", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/System-Settings.package/SettingFilter.class/README.md", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
namespace ui_devtools { ui::KeyEvent ConvertToUIKeyEvent(protocol::DOM::KeyEvent* event) { ui::EventType event_type = event->getType() == protocol::DOM::KeyEvent::TypeEnum::KeyPressed ? ui::ET_KEY_PRESSED : ui::ET_KEY_RELEASED; return ui::KeyEvent( event_type, static_cast<ui::KeyboardCode>(event->getKeyCode()), static_cast<ui::DomCode>(event->getCode()), event->getFlags(), event->getIsChar() ? ui::DomKey::FromCharacter(event->getKey()) : ui::DomKey(event->getKey()), ui::EventTimeForNow(), event->getIsChar()); } } // namespace ui_devtools
{ "content_hash": "52e1650e72db887b9b1c600d62643f19", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 71, "avg_line_length": 39.0625, "alnum_prop": 0.6288, "repo_name": "scheib/chromium", "id": "779bbd4f6c99cdbb24947a66c5884349a0e8958e", "size": "894", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "components/ui_devtools/views/devtools_event_util.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?php namespace Checkdomain\TeleCash\IPG\API\Model; /** * Class Payment */ class Payment implements ElementInterface { const CURRENCY_EUR = "978"; /** @var string $hostedDataId */ private $hostedDataId; /** @var float $amount */ private $amount; /** * @param string|null $hostedDataId * @param float|null $amount */ public function __construct($hostedDataId = null, $amount = null) { $this->hostedDataId = $hostedDataId; $this->amount = $amount; } /** * @param \DOMDocument $document * * @return \DOMElement */ public function getXML(\DOMDocument $document) { $xml = $document->createElement('ns1:Payment'); if (!empty($this->hostedDataId)) { $hostedDataId = $document->createElement('ns1:HostedDataID'); $hostedDataId->textContent = $this->hostedDataId; $xml->appendChild($hostedDataId); } if (!empty($this->amount)) { $amount = $document->createElement('ns1:ChargeTotal'); $amount->textContent = $this->amount; $currency = $document->createElement('ns1:Currency'); $currency->textContent = self::CURRENCY_EUR; $xml->appendChild($amount); $xml->appendChild($currency); } return $xml; } }
{ "content_hash": "b34bc39ef9591aedb5feb31085d0028d", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 81, "avg_line_length": 24.964285714285715, "alnum_prop": 0.555793991416309, "repo_name": "checkdomain/TeleCash", "id": "2e5bf05f0eb9a8ecaa2280d9a327c0d03e335ab6", "size": "1398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/IPG/API/Model/Payment.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "93786" } ], "symlink_target": "" }
from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class DataProtectionClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for DataProtectionClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription Id. Required. :type subscription_id: str :keyword api_version: Api Version. Default value is "2022-09-01-preview". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: super(DataProtectionClientConfiguration, self).__init__(**kwargs) api_version = kwargs.pop("api_version", "2022-09-01-preview") # type: str if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") self.credential = credential self.subscription_id = subscription_id self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-dataprotection/{}".format(VERSION)) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( self.credential, *self.credential_scopes, **kwargs )
{ "content_hash": "9f929f2bb9d827bb763d5c55ee919995", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 107, "avg_line_length": 53.05172413793103, "alnum_prop": 0.7198570035749107, "repo_name": "Azure/azure-sdk-for-python", "id": "d04a341320d8020cc4a134f776fd463d362771f4", "size": "3545", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/dataprotection/azure-mgmt-dataprotection/azure/mgmt/dataprotection/aio/_configuration.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
using System.Threading.Tasks; using Microsoft.Extensions.FileProviders; using OrchardCore.Recipes.Models; namespace OrchardCore.Recipes.Services { public interface IRecipeReader { Task<RecipeDescriptor> GetRecipeDescriptor(string recipeBasePath, IFileInfo recipeFileInfo, IFileProvider fileProvider); } }
{ "content_hash": "0b65bbe3cc2de76b279b0be37a2e6282", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 128, "avg_line_length": 29.636363636363637, "alnum_prop": 0.8006134969325154, "repo_name": "stevetayloruk/Orchard2", "id": "8461dbb40d4c77f138b5532c9cb950eeeec4870f", "size": "326", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "src/OrchardCore/OrchardCore.Recipes.Abstractions/Services/IRecipeReader.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C#", "bytes": "8040998" }, { "name": "CSS", "bytes": "3214206" }, { "name": "Dockerfile", "bytes": "424" }, { "name": "HTML", "bytes": "1470286" }, { "name": "JavaScript", "bytes": "2441859" }, { "name": "Liquid", "bytes": "43273" }, { "name": "PHP", "bytes": "2484" }, { "name": "Pug", "bytes": "56076" }, { "name": "SCSS", "bytes": "220077" }, { "name": "TypeScript", "bytes": "41736" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.6.0_34) on Mon Apr 06 09:33:59 MDT 2015 --> <title>Uses of Class com.google.android.gms.R.raw</title> <meta name="date" content="2015-04-06"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class com.google.android.gms.R.raw"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/google/android/gms/R.raw.html" title="class in com.google.android.gms">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>PREV</li> <li>NEXT</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/google/android/gms//class-useR.raw.html" target="_top">FRAMES</a></li> <li><a href="R.raw.html" target="_top">NO FRAMES</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class com.google.android.gms.R.raw" class="title">Uses of Class<br>com.google.android.gms.R.raw</h2> </div> <div class="classUseContainer">No usage of com.google.android.gms.R.raw</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../com/google/android/gms/R.raw.html" title="class in com.google.android.gms">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>PREV</li> <li>NEXT</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?com/google/android/gms//class-useR.raw.html" target="_top">FRAMES</a></li> <li><a href="R.raw.html" target="_top">NO FRAMES</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "content_hash": "011a74ec2701ef2054036a8bbaccd220", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 119, "avg_line_length": 35.243478260869566, "alnum_prop": 0.6005428077966938, "repo_name": "CMPUT301W15T02/TeamTo", "id": "247632f54c412f4a32e718be3da99a3ac53b90bb", "size": "4053", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/JavaDoc/com/google/android/gms/class-use/R.raw.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "270353" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE PolicySet [ <!ENTITY xml "http://www.w3.org/2001/XMLSchema#"> <!ENTITY rule-combine "urn:oasis:names:tc:xacml:1.0:rule-combining-algorithm:"> <!ENTITY policy-combine "urn:oasis:names:tc:xacml:1.0:policy-combining-algorithm:"> <!ENTITY function "urn:oasis:names:tc:xacml:1.0:function:"> <!ENTITY subject-category "urn:oasis:names:tc:xacml:1.0:subject-category:"> <!ENTITY subject "urn:oasis:names:tc:xacml:1.0:subject:"> <!ENTITY role "urn:oasis:names:tc:xacml:2.0:subject:role"> <!ENTITY roles "urn:oasis:names:tc:xacml:2.0:subject:role-values:"> <!ENTITY tenant "urn:oasis:names:tc:xacml:2.0:subject:tenant"> <!ENTITY tenants "urn:oasis:names:tc:xacml:2.0:subject:tenant-values:"> <!ENTITY resource "urn:oasis:names:tc:xacml:1.0:resource:"> <!ENTITY resource-tenant "urn:oasis:names:tc:xacml:2.0:resource:tenant"> <!ENTITY resource-tenants "urn:oasis:names:tc:xacml:2.0:resource:tenant-values:"> <!ENTITY action "urn:oasis:names:tc:xacml:1.0:action:"> <!ENTITY actions "urn:oasis:names:tc:xacml:2.0:actions:"> <!ENTITY environment "urn:oasis:names:tc:xacml:1.0:environment:"> <!ENTITY policyset-id "urn:oasis:names:tc:xacml:2.0:rbac:"> ]> <Request xmlns="urn:oasis:names:tc:xacml:2.0:context:schema:os" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <Subject> <Attribute AttributeId="&subject;subject-id" DataType="&xml;string" IncludeInResult="false"> <AttributeValue>Anne</AttributeValue> </Attribute> <Attribute AttributeId="&role;" DataType="&xml;anyURI" IncludeInResult="false"> <AttributeValue>&roles;i0914:manager</AttributeValue> </Attribute> <Attribute AttributeId="&tenant;" DataType="&xml;anyURI" IncludeInResult="false"> <AttributeValue>&tenants;i0913</AttributeValue> </Attribute> </Subject> <Resource> <Attribute AttributeId="&resource;resource-id" DataType="&xml;string" IncludeInResult="false"> <AttributeValue>purchase order</AttributeValue> </Attribute> <Attribute AttributeId="&resource-tenant;" DataType="&xml;anyURI" IncludeInResult="false"> <AttributeValue>&resource-tenants;i0914</AttributeValue> </Attribute> </Resource> <Action> <Attribute AttributeId="&action;action-id" DataType="&xml;string" IncludeInResult="false"> <AttributeValue>create</AttributeValue> </Attribute> </Action> </Request>
{ "content_hash": "39726d1351a258ecb7b1b6427787d17e", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 83, "avg_line_length": 43.654545454545456, "alnum_prop": 0.7176176593086214, "repo_name": "townbull/mtaaas", "id": "01bacdc28453349badb9de34e78629084d0b99eb", "size": "2401", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PEPClient/testcases/MTAS0914CRequest.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1440895" }, { "name": "Shell", "bytes": "8283" } ], "symlink_target": "" }
package org.kie.workbench.common.screens.projecteditor.client.build.exec.impl.executors.build; import com.google.gwtmockito.GwtMockitoTestRunner; import org.junit.Before; import org.junit.runner.RunWith; import org.kie.workbench.common.screens.projecteditor.client.build.exec.impl.executors.validators.SnapshotContextValidator; import static org.mockito.Mockito.spy; @RunWith(GwtMockitoTestRunner.class) public class SnapshotBuildExecutorTest extends AbstractBuildExecutorTest { @Before public void setup() { super.setup(); context = getSnapshotContext(); runner = spy(new BuildExecutor(buildService, buildResultsEvent, notificationEvent, buildDialog, new SnapshotContextValidator())); } }
{ "content_hash": "4f01d6d8d0db97195a12f2b68ca11af4", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 137, "avg_line_length": 31.91304347826087, "alnum_prop": 0.7888283378746594, "repo_name": "jhrcek/kie-wb-common", "id": "6b992fa79ede8a7122adff25fccfea3ea79b4b7f", "size": "1353", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "kie-wb-common-screens/kie-wb-common-project-editor/kie-wb-common-project-editor-client/src/test/java/org/kie/workbench/common/screens/projecteditor/client/build/exec/impl/executors/build/SnapshotBuildExecutorTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2591" }, { "name": "CSS", "bytes": "115195" }, { "name": "Dockerfile", "bytes": "210" }, { "name": "FreeMarker", "bytes": "36496" }, { "name": "GAP", "bytes": "86275" }, { "name": "HTML", "bytes": "331778" }, { "name": "Java", "bytes": "38263821" }, { "name": "JavaScript", "bytes": "20277" }, { "name": "Shell", "bytes": "905" }, { "name": "Visual Basic", "bytes": "84832" } ], "symlink_target": "" }
set -e if [ -d tinytest ]; then (cd tinytest; git pull origin master) else git clone https://github.com/eagletmt/tinytest.git fi if [ -d vimproc ]; then cd vimproc git pull origin master else git clone https://github.com/Shougo/vimproc.git cd vimproc fi make -f make_unix.mak cabal update cabal install ghc-mod
{ "content_hash": "a720f49c4e0b984af9c4419b9d83b026", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 52, "avg_line_length": 17.157894736842106, "alnum_prop": 0.7208588957055214, "repo_name": "carlohamalainen/imagetrove", "id": "a7fe0e63831b0d75b10891a0c57a798951edb717", "size": "336", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "3.5/vim/bundle/ghcmod-vim/install-deps.sh", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "1476" }, { "name": "C", "bytes": "79976" }, { "name": "Erlang", "bytes": "1972" }, { "name": "JavaScript", "bytes": "1064" }, { "name": "Makefile", "bytes": "6115" }, { "name": "Python", "bytes": "29356" }, { "name": "Shell", "bytes": "11504" }, { "name": "VimL", "bytes": "897048" } ], "symlink_target": "" }
using System; using System.Reflection; using System.Text; using NMocha.Internal; using NMock2; using NMock2.Monitoring; namespace NMocha.Monitoring { /// <summary> /// Represents the invocation of a method on an object (receiver). /// </summary> public class Invocation : ISelfDescribing { /// <summary> /// Holds the method that is being invoked. /// </summary> public readonly MethodInfo Method; /// <summary> /// Holds the parameterlist of the invocation. /// </summary> public readonly ParameterList Parameters; /// <summary> /// Holds the receiver providing the method. /// </summary> public readonly object Receiver; /// <summary> /// Holds the exception to be thrown. When this field has been set, <see cref="isThrowing"/> will become true. /// </summary> private Exception exception; /// <summary> /// Holds a boolean value whether the method is throwing an exception or not. /// </summary> private bool isThrowing; /// <summary> /// Holds the result of the invocation. /// </summary> private object result = Missing.Value; /// <summary> /// Initializes a new instance of the <see cref="Invocation"/> class. /// </summary> /// <param name="receiver">The receiver providing the method.</param> /// <param name="method">The method.</param> /// <param name="parameters">The parameters passed to the method..</param> public Invocation(object receiver, MethodInfo method, object[] parameters) { Receiver = receiver; Method = method; Parameters = new ParameterList(method, parameters); } /// <summary> /// Gets or sets the result of the invocation. /// </summary> /// <value>The result.</value> public object Result { get { return result; } set { CheckReturnType(value); result = value; exception = null; isThrowing = false; } } /// <summary> /// Gets or sets the exception that is thrown on the invocation. /// </summary> /// <value>The exception.</value> public Exception Exception { get { return exception; } set { if (value == null) { throw new ArgumentNullException("value"); } exception = value; result = null; isThrowing = true; } } /// <summary> /// Gets a value indicating whether an exception is thrown an this invocation. /// </summary> /// <value> /// <c>true</c> if this invocation is throwing an exception; otherwise, <c>false</c>. /// </value> public bool IsThrowing { get { return isThrowing; } } #region ISelfDescribing Members public void DescribeOn(IDescription description) { // This should really be a mock object in most cases, but a few testcases // seem to supply strings etc as a Receiver. var mock = Receiver as IMockObject; if (mock != null) { description.AppendText(mock.MockName); } else { description.AppendText(Receiver.ToString()); } if (MethodIsIndexerGetter()) { DescribeAsIndexerGetter(description); } else if (MethodIsIndexerSetter()) { DescribeAsIndexerSetter(description); } else if (MethodIsProperty()) { DescribeAsProperty(description); } else { DescribeNormalMethod(description); } } #endregion /// <summary> /// Invokes this invocation on the specified receiver and stores the result and exception /// returns/thrown by the invocation. /// </summary> /// <param name="otherReceiver">The other receiver.</param> public void InvokeOn(object otherReceiver) { try { Result = Method.Invoke(otherReceiver, Parameters.AsArray); Parameters.MarkAllValuesAsSet(); } catch (TargetInvocationException e) { Exception = e.InnerException; } } /// <summary> /// Checks the returnType of the initialized method if it is valid to be mocked. /// </summary> /// <param name="value">The return value to be checked.</param> private void CheckReturnType(object value) { if (Method.ReturnType == typeof (void) && value != null) { throw new ArgumentException("cannot return a value from a void method", "value"); } if (Method.ReturnType != typeof (void) && Method.ReturnType.IsValueType && value == null) { if ( !(Method.ReturnType.IsGenericType && Method.ReturnType.GetGenericTypeDefinition() == typeof (Nullable<>))) { throw new ArgumentException("cannot return a null value type", "value"); } } if (value != null && !Method.ReturnType.IsInstanceOfType(value)) { throw new ArgumentException( "cannot return a value of type " + DescribeType(value) + " from a method returning " + Method.ReturnType, "value"); } } /// <summary> /// Determines whether the initialized method is a property. /// </summary> /// <returns> /// Returns true if initialized method is a property; false otherwise. /// </returns> private bool MethodIsProperty() { return Method.IsSpecialName && ((Method.Name.StartsWith("get_") && Parameters.Count == 0) || (Method.Name.StartsWith("set_") && Parameters.Count == 1)); } /// <summary> /// Determines whether the initialized method is an index getter. /// </summary> /// <returns> /// Returns true if initialized method is an index getter; false otherwise. /// </returns> private bool MethodIsIndexerGetter() { return Method.IsSpecialName && Method.Name == "get_Item" && Parameters.Count >= 1; } /// <summary> /// Determines whether the initialized method is an index setter. /// </summary> /// <returns> /// Returns true if initialized method is an index setter; false otherwise. /// </returns> private bool MethodIsIndexerSetter() { return Method.IsSpecialName && Method.Name == "set_Item" && Parameters.Count >= 2; } /// <summary> /// Determines whether the initialized method is an event adder. /// </summary> /// <returns> /// Returns true if initialized method is an event adder; false otherwise. /// </returns> private bool MethodIsEventAdder() { return Method.IsSpecialName && Method.Name.StartsWith("add_") && Parameters.Count == 1 && typeof (Delegate).IsAssignableFrom(Method.GetParameters()[0].ParameterType); } /// <summary> /// Determines whether the initialized method is an event remover. /// </summary> /// <returns> /// Returns true if initialized method is an event remover; false otherwise. /// </returns> private bool MethodIsEventRemover() { return Method.IsSpecialName && Method.Name.StartsWith("remove_") && Parameters.Count == 1 && typeof (Delegate).IsAssignableFrom(Method.GetParameters()[0].ParameterType); } /// <summary> /// Describes the property with parameters to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> private void DescribeAsProperty(IDescription writer) { writer.AppendText(".") .AppendText(Method.Name.Substring(4)); if (Parameters.Count > 0) { writer.AppendText(" = ") .AppendValue(Parameters[0]); } } /// <summary> /// Describes the index setter with parameters to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> private void DescribeAsIndexerGetter(IDescription writer) { writer.AppendText("["); WriteParameterList(writer, Parameters.Count); writer.AppendText("]"); } /// <summary> /// Describes the index setter with parameters to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> private void DescribeAsIndexerSetter(IDescription writer) { writer.AppendText("["); WriteParameterList(writer, Parameters.Count - 1); writer.AppendText("] = ") .AppendValue(Parameters[Parameters.Count - 1]); } /// <summary> /// Describes the method with parameters to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> private void DescribeNormalMethod(IDescription writer) { writer.AppendText(".") .AppendText(Method.Name); WriteTypeParams(writer); writer.AppendText("("); WriteParameterList(writer, Parameters.Count); writer.AppendText(")"); } /// <summary> /// Writes the generic parameters of the method to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> private void WriteTypeParams(IDescription writer) { Type[] types = Method.GetGenericArguments(); if (types.Length > 0) { writer.AppendText("<"); for (int i = 0; i < types.Length; i++) { if (i > 0) { writer.AppendText(", "); } writer.AppendText(types[i].FullName); } writer.AppendText(">"); } } /// <summary> /// Writes the parameter list to the specified <paramref name="writer"/>. /// </summary> /// <param name="writer">The writer where the description is written to.</param> /// <param name="count">The count of parameters to describe.</param> private void WriteParameterList(IDescription writer, int count) { for (int i = 0; i < count; i++) { if (i > 0) { writer.AppendText(", "); } if (Method.GetParameters()[i].IsOut) { writer.AppendText("out"); } else { writer.AppendValue(Parameters[i]); } } } /// <summary> /// Describes the interfaces used for <see cref="DescribeOn"/>. /// </summary> /// <param name="obj">The object which interfaces to describe.</param> /// <returns> /// Returns a string containing the description of the given object's interfaces. /// </returns> private string DescribeType(object obj) { Type type = obj.GetType(); var sb = new StringBuilder(); sb.Append(type); Type[] interfaceTypes = type.GetInterfaces(); if (interfaceTypes.Length > 0) { sb.Append(": "); foreach (Type interfaceType in interfaceTypes) { sb.Append(interfaceType); sb.Append(", "); } sb.Length -= 2; // cut away last ", " } return sb.ToString(); } } }
{ "content_hash": "427d9df9dae4e8caf492e743d1082510", "timestamp": "", "source": "github", "line_count": 374, "max_line_length": 118, "avg_line_length": 34.69786096256684, "alnum_prop": 0.5149109963782076, "repo_name": "isaiah-perumalla/NMocha", "id": "70783196dd749db63da31991f53be82d56eecd14", "size": "13821", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/NMocha/Monitoring/Invocation.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "587764" }, { "name": "Ruby", "bytes": "1866" }, { "name": "Shell", "bytes": "55" } ], "symlink_target": "" }
package de.cronn.jira.sync.config; public class CacheConfig { private static final String DEFAULT_DIRECTORY = "cache"; private boolean persistent; private String directory = DEFAULT_DIRECTORY; public boolean isPersistent() { return persistent; } public void setPersistent(boolean persistent) { this.persistent = persistent; } public String getDirectory() { return directory; } public void setDirectory(String directory) { this.directory = directory; } }
{ "content_hash": "4ce22c3c3d24b512d6a83b7b145612ff", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 57, "avg_line_length": 19.2, "alnum_prop": 0.75, "repo_name": "cronn-de/jira-sync", "id": "1aee2ad419063a5f1216fa751358505c8dd46ce3", "size": "480", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/de/cronn/jira/sync/config/CacheConfig.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "337363" } ], "symlink_target": "" }
import os import datetime import time import platform import mimetypes from tempfile import NamedTemporaryFile import warnings # DJANGO IMPORTS from django.core.files import File from django.utils.six import string_types # FILEBROWSER IMPORTS from filebrowser.settings import EXTENSIONS, VERSIONS, ADMIN_VERSIONS, VERSIONS_BASEDIR, VERSION_QUALITY, PLACEHOLDER, FORCE_PLACEHOLDER, SHOW_PLACEHOLDER, STRICT_PIL, IMAGE_MAXBLOCK, DEFAULT_PERMISSIONS from filebrowser.utils import path_strip, scale_and_crop from django.utils.encoding import python_2_unicode_compatible, smart_str # PIL import if STRICT_PIL: from PIL import Image from PIL import ImageFile else: try: from PIL import Image from PIL import ImageFile except ImportError: import Image import ImageFile ImageFile.MAXBLOCK = IMAGE_MAXBLOCK # default is 64k class FileListing(): """ The FileListing represents a group of FileObjects/FileDirObjects. An example:: from filebrowser.base import FileListing filelisting = FileListing(path, sorting_by='date', sorting_order='desc') print filelisting.files_listing_total() print filelisting.results_listing_total() for fileobject in filelisting.files_listing_total(): print fileobject.filetype where path is a relative path to a storage location """ # Four variables to store the length of a listing obtained by various listing methods # (updated whenever a particular listing method is called). _results_listing_total = None _results_walk_total = None _results_listing_filtered = None _results_walk_total = None def __init__(self, path, filter_func=None, sorting_by=None, sorting_order=None, site=None): self.path = path self.filter_func = filter_func self.sorting_by = sorting_by self.sorting_order = sorting_order if not site: from filebrowser.sites import site as default_site site = default_site self.site = site # HELPER METHODS # sort_by_attr def sort_by_attr(self, seq, attr): """ Sort the sequence of objects by object's attribute Arguments: seq - the list or any sequence (including immutable one) of objects to sort. attr - the name of attribute to sort by Returns: the sorted list of objects. """ from operator import attrgetter if isinstance(attr, string_types): # Backward compatibility hack attr = (attr, ) return sorted(seq, key=attrgetter(*attr)) _is_folder_stored = None @property def is_folder(self): if self._is_folder_stored is None: self._is_folder_stored = self.site.storage.isdir(self.path) return self._is_folder_stored def listing(self): "List all files for path" if self.is_folder: dirs, files = self.site.storage.listdir(self.path) return (f for f in dirs + files) return [] def _walk(self, path, filelisting): """ Recursively walks the path and collects all files and directories. Danger: Symbolic links can create cycles and this function ends up in a regression. """ dirs, files = self.site.storage.listdir(path) if dirs: for d in dirs: self._walk(os.path.join(path, d), filelisting) filelisting.extend([path_strip(os.path.join(path, d), self.site.directory)]) if files: for f in files: filelisting.extend([path_strip(os.path.join(path, f), self.site.directory)]) def walk(self): "Walk all files for path" filelisting = [] if self.is_folder: self._walk(self.path, filelisting) return filelisting # Cached results of files_listing_total (without any filters and sorting applied) _fileobjects_total = None def files_listing_total(self): "Returns FileObjects for all files in listing" if self._fileobjects_total is None: self._fileobjects_total = [] for item in self.listing(): fileobject = FileObject(os.path.join(self.path, item), site=self.site) self._fileobjects_total.append(fileobject) files = self._fileobjects_total if self.sorting_by: files = self.sort_by_attr(files, self.sorting_by) if self.sorting_order == "desc": files.reverse() self._results_listing_total = len(files) return files def files_walk_total(self): "Returns FileObjects for all files in walk" files = [] for item in self.walk(): fileobject = FileObject(os.path.join(self.site.directory, item), site=self.site) files.append(fileobject) if self.sorting_by: files = self.sort_by_attr(files, self.sorting_by) if self.sorting_order == "desc": files.reverse() self._results_walk_total = len(files) return files def files_listing_filtered(self): "Returns FileObjects for filtered files in listing" if self.filter_func: listing = list(filter(self.filter_func, self.files_listing_total())) else: listing = self.files_listing_total() self._results_listing_filtered = len(listing) return listing def files_walk_filtered(self): "Returns FileObjects for filtered files in walk" if self.filter_func: listing = list(filter(self.filter_func, self.files_walk_total())) else: listing = self.files_walk_total() self._results_walk_filtered = len(listing) return listing def results_listing_total(self): "Counter: all files" if self._results_listing_total is not None: return self._results_listing_total return len(self.files_listing_total()) def results_walk_total(self): "Counter: all files" if self._results_walk_total is not None: return self._results_walk_total return len(self.files_walk_total()) def results_listing_filtered(self): "Counter: filtered files" if self._results_listing_filtered is not None: return self._results_listing_filtered return len(self.files_listing_filtered()) def results_walk_filtered(self): "Counter: filtered files" if self._results_walk_filtered is not None: return self._results_walk_filtered return len(self.files_walk_filtered()) @python_2_unicode_compatible class FileObject(): """ The FileObject represents a file (or directory) on the server. An example:: from filebrowser.base import FileObject fileobject = FileObject(path) where path is a relative path to a storage location """ def __init__(self, path, site=None): if not site: from filebrowser.sites import site as default_site site = default_site self.site = site if platform.system() == 'Windows': self.path = path.replace('\\', '/') else: self.path = path self.head = os.path.dirname(path) self.filename = os.path.basename(path) self.filename_lower = self.filename.lower() self.filename_root, self.extension = os.path.splitext(self.filename) self.mimetype = mimetypes.guess_type(self.filename) def __str__(self): return smart_str(self.path) @property def name(self): return self.path def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self or "None") def __len__(self): return len(self.path) # HELPER METHODS # _get_file_type def _get_file_type(self): "Get file type as defined in EXTENSIONS." file_type = '' for k, v in EXTENSIONS.items(): for extension in v: if self.extension.lower() == extension.lower(): file_type = k return file_type # GENERAL ATTRIBUTES/PROPERTIES # filetype # filesize # date # datetime # exists _filetype_stored = None @property def filetype(self): "Filetype as defined with EXTENSIONS" if self._filetype_stored is not None: return self._filetype_stored if self.is_folder: self._filetype_stored = 'Folder' else: self._filetype_stored = self._get_file_type() return self._filetype_stored _filesize_stored = None @property def filesize(self): "Filesize in bytes" if self._filesize_stored is not None: return self._filesize_stored if self.exists: self._filesize_stored = self.site.storage.size(self.path) return self._filesize_stored return None _date_stored = None @property def date(self): "Modified time (from site.storage) as float (mktime)" if self._date_stored is not None: return self._date_stored if self.exists: self._date_stored = time.mktime(self.site.storage.modified_time(self.path).timetuple()) return self._date_stored return None @property def datetime(self): "Modified time (from site.storage) as datetime" if self.date: return datetime.datetime.fromtimestamp(self.date) return None _exists_stored = None @property def exists(self): "True, if the path exists, False otherwise" if self._exists_stored is None: self._exists_stored = self.site.storage.exists(self.path) return self._exists_stored # PATH/URL ATTRIBUTES/PROPERTIES # path (see init) # path_relative_directory # path_full # dirname # url @property def path_relative_directory(self): "Path relative to site.directory" return path_strip(self.path, self.site.directory) @property def path_full(self): "Absolute path as defined with site.storage" return self.site.storage.path(self.path) @property def dirname(self): "The directory (not including site.directory)" return os.path.dirname(self.path_relative_directory) @property def url(self): "URL for the file/folder as defined with site.storage" return self.site.storage.url(self.path) # IMAGE ATTRIBUTES/PROPERTIES # dimensions # width # height # aspectratio # orientation _dimensions_stored = None @property def dimensions(self): "Image dimensions as a tuple" if self.filetype != 'Image': return None if self._dimensions_stored is not None: return self._dimensions_stored try: im = Image.open(self.site.storage.open(self.path)) self._dimensions_stored = im.size except: pass return self._dimensions_stored @property def width(self): "Image width in px" if self.dimensions: return self.dimensions[0] return None @property def height(self): "Image height in px" if self.dimensions: return self.dimensions[1] return None @property def aspectratio(self): "Aspect ratio (float format)" if self.dimensions: return float(self.width) / float(self.height) return None @property def orientation(self): "Image orientation, either 'Landscape' or 'Portrait'" if self.dimensions: if self.dimensions[0] >= self.dimensions[1]: return "Landscape" else: return "Portrait" return None # FOLDER ATTRIBUTES/PROPERTIES # directory (deprecated) # folder (deprecated) # is_folder # is_empty @property def directory(self): "Folder(s) relative from site.directory" warnings.warn("directory will be removed with 3.6, use path_relative_directory instead.", DeprecationWarning) return path_strip(self.path, self.site.directory) @property def folder(self): "Parent folder(s)" warnings.warn("directory will be removed with 3.6, use dirname instead.", DeprecationWarning) return os.path.dirname(path_strip(os.path.join(self.head, ''), self.site.directory)) _is_folder_stored = None @property def is_folder(self): "True, if path is a folder" if self._is_folder_stored is None: self._is_folder_stored = self.site.storage.isdir(self.path) return self._is_folder_stored @property def is_empty(self): "True, if folder is empty. False otherwise, or if the object is not a folder." if self.is_folder: dirs, files = self.site.storage.listdir(self.path) if not dirs and not files: return True return False # VERSION ATTRIBUTES/PROPERTIES # is_version # versions_basedir # original # original_filename @property def is_version(self): "True if file is a version, false otherwise" tmp = self.filename_root.split("_") if tmp[len(tmp) - 1] in VERSIONS: return True return False @property def versions_basedir(self): "Main directory for storing versions (either VERSIONS_BASEDIR or site.directory)" if VERSIONS_BASEDIR: return VERSIONS_BASEDIR elif self.site.directory: return self.site.directory else: return "" @property def original(self): "Returns the original FileObject" if self.is_version: relative_path = self.head.replace(self.versions_basedir, "").lstrip("/") return FileObject(os.path.join(self.site.directory, relative_path, self.original_filename), site=self.site) return self @property def original_filename(self): "Get the filename of an original image from a version" tmp = self.filename_root.split("_") if tmp[len(tmp) - 1] in VERSIONS: return u"%s%s" % (self.filename_root.replace("_%s" % tmp[len(tmp) - 1], ""), self.extension) return self.filename # VERSION METHODS # versions() # admin_versions() # version_name(suffix) # version_path(suffix) # version_generate(suffix) def versions(self): "List of versions (not checking if they actually exist)" version_list = [] if self.filetype == "Image" and not self.is_version: for version in sorted(VERSIONS): version_list.append(os.path.join(self.versions_basedir, self.dirname, self.version_name(version))) return version_list def admin_versions(self): "List of admin versions (not checking if they actually exist)" version_list = [] if self.filetype == "Image" and not self.is_version: for version in ADMIN_VERSIONS: version_list.append(os.path.join(self.versions_basedir, self.dirname, self.version_name(version))) return version_list def version_name(self, version_suffix): "Name of a version" # FIXME: version_name for version? return self.filename_root + "_" + version_suffix + self.extension def version_path(self, version_suffix): "Path to a version (relative to storage location)" # FIXME: version_path for version? return os.path.join(self.versions_basedir, self.dirname, self.version_name(version_suffix)) def version_generate(self, version_suffix): "Generate a version" # FIXME: version_generate for version? path = self.path version_path = self.version_path(version_suffix) if not self.site.storage.isfile(version_path): version_path = self._generate_version(version_suffix) elif self.site.storage.modified_time(path) > self.site.storage.modified_time(version_path): version_path = self._generate_version(version_suffix) return FileObject(version_path, site=self.site) def _generate_version(self, version_suffix): """ Generate Version for an Image. value has to be a path relative to the storage location. """ tmpfile = File(NamedTemporaryFile()) try: f = self.site.storage.open(self.path) except IOError: return "" im = Image.open(f) version_path = self.version_path(version_suffix) version_dir, version_basename = os.path.split(version_path) root, ext = os.path.splitext(version_basename) version = scale_and_crop(im, VERSIONS[version_suffix]['width'], VERSIONS[version_suffix]['height'], VERSIONS[version_suffix]['opts']) if not version: version = im # version methods as defined with VERSIONS if 'methods' in VERSIONS[version_suffix].keys(): for m in VERSIONS[version_suffix]['methods']: if callable(m): version = m(version) # save version try: version.save(tmpfile, format=Image.EXTENSION[ext.lower()], quality=VERSION_QUALITY, optimize=(os.path.splitext(version_path)[1] != '.gif')) except IOError: version.save(tmpfile, format=Image.EXTENSION[ext.lower()], quality=VERSION_QUALITY) # remove old version, if any if version_path != self.site.storage.get_available_name(version_path): self.site.storage.delete(version_path) self.site.storage.save(version_path, tmpfile) # set permissions if DEFAULT_PERMISSIONS is not None: os.chmod(self.site.storage.path(version_path), DEFAULT_PERMISSIONS) return version_path # DELETE METHODS # delete() # delete_versions() # delete_admin_versions() def delete(self): "Delete FileObject (deletes a folder recursively)" if self.is_folder: self.site.storage.rmtree(self.path) else: self.site.storage.delete(self.path) def delete_versions(self): "Delete versions" for version in self.versions(): try: self.site.storage.delete(version) except: pass def delete_admin_versions(self): "Delete admin versions" for version in self.admin_versions(): try: self.site.storage.delete(version) except: pass
{ "content_hash": "29d115976615c86f769cc3a61ffc1e3e", "timestamp": "", "source": "github", "line_count": 570, "max_line_length": 203, "avg_line_length": 32.80701754385965, "alnum_prop": 0.6154545454545455, "repo_name": "nemesisdesign/django-filebrowser", "id": "6881a43faf382d89406cb5f94a7b1600a218edbd", "size": "18734", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "filebrowser/base.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "11293" }, { "name": "HTML", "bytes": "49025" }, { "name": "JavaScript", "bytes": "50111" }, { "name": "Python", "bytes": "166625" } ], "symlink_target": "" }
<?php /** * This file is automatically generated. Use 'arc liberate' to rebuild it. * @generated * @phutil-library-version 2 */ phutil_register_library_map(array( '__library_version__' => 2, 'class' => array( 'TravicatorArcanistConfiguration' => 'src/TravicatorArcanistConfiguration.php', ), 'function' => array( ), 'xmap' => array( 'TravicatorArcanistConfiguration' => 'ArcanistConfiguration', ), ));
{ "content_hash": "33c290efbeb73cca1e6d2e85e079bd3c", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 83, "avg_line_length": 19.818181818181817, "alnum_prop": 0.6559633027522935, "repo_name": "afterburner/travicator", "id": "f50f733f344d4136546e9ae92a511232e576d6e0", "size": "436", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "assets/arc/__phutil_library_map__.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "1224" }, { "name": "Ruby", "bytes": "16749" } ], "symlink_target": "" }
from typing import ( # pylint: disable=unused-import Union, Optional, Any, TYPE_CHECKING ) from azure.storage.blob import generate_account_sas as generate_blob_account_sas from azure.storage.blob import generate_container_sas, generate_blob_sas if TYPE_CHECKING: from datetime import datetime from ._models import AccountSasPermissions, FileSystemSasPermissions, FileSasPermissions, ResourceTypes, \ UserDelegationKey def generate_account_sas( account_name, # type: str account_key, # type: str resource_types, # type: Union[ResourceTypes, str] permission, # type: Union[AccountSasPermissions, str] expiry, # type: Optional[Union[datetime, str]] **kwargs # type: Any ): # type: (...) -> str """Generates a shared access signature for the DataLake service. Use the returned signature as the credential parameter of any DataLakeServiceClient, FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient. :param str account_name: The storage account name used to generate the shared access signature. :param str account_key: The access key to generate the shared access signature. :param resource_types: Specifies the resource types that are accessible with the account SAS. :type resource_types: str or ~azure.storage.filedatalake.ResourceTypes :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :type permission: str or ~azure.storage.filedatalake.AccountSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: ~datetime.datetime or str :keyword start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :paramtype start: ~datetime.datetime or str :keyword str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. :keyword str encryption_scope: Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str """ return generate_blob_account_sas( account_name=account_name, account_key=account_key, resource_types=resource_types, permission=permission, expiry=expiry, **kwargs ) def generate_file_system_sas( account_name, # type: str file_system_name, # type: str credential, # type: Union[str, UserDelegationKey] permission=None, # type: Optional[Union[FileSystemSasPermissions, str]] expiry=None, # type: Optional[Union[datetime, str]] **kwargs # type: Any ): # type: (...) -> str """Generates a shared access signature for a file system. Use the returned signature with the credential parameter of any DataLakeServiceClient, FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient. :param str account_name: The storage account name used to generate the shared access signature. :param str file_system_name: The name of the file system. :param str credential: Credential could be either account key or user delegation key. If use account key is used as credential, then the credential type should be a str. Instead of an account key, the user could also pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling :func:`~azure.storage.filedatalake.DataLakeServiceClient.get_user_delegation_key`. When present, the SAS is signed with the user delegation key instead. :type credential: str or ~azure.storage.filedatalake.UserDelegationKey :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered racwdlmeop. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :type permission: str or ~azure.storage.filedatalake.FileSystemSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :keyword start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :paramtype start: datetime or str :keyword str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. :keyword str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :keyword str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :keyword str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :keyword str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :keyword str preauthorized_agent_object_id: The AAD object ID of a user assumed to be authorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission check for the agent object id will be performed. :keyword str agent_object_id: The AAD object ID of a user assumed to be unauthorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access and the service will perform an additional POSIX ACL check to determine if this user is authorized to perform the requested operation. :keyword str correlation_id: The correlation id to correlate the storage audit logs with the audit logs used by the principal generating and distributing the SAS. :keyword str encryption_scope: Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str """ return generate_container_sas( account_name=account_name, container_name=file_system_name, account_key=credential if isinstance(credential, str) else None, user_delegation_key=credential if not isinstance(credential, str) else None, permission=permission, expiry=expiry, **kwargs) def generate_directory_sas( account_name, # type: str file_system_name, # type: str directory_name, # type: str credential, # type: Union[str, UserDelegationKey] permission=None, # type: Optional[Union[FileSasPermissions, str]] expiry=None, # type: Optional[Union[datetime, str]] **kwargs # type: Any ): # type: (...) -> str """Generates a shared access signature for a directory. Use the returned signature with the credential parameter of any DataLakeServiceClient, FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient. :param str account_name: The storage account name used to generate the shared access signature. :param str file_system_name: The name of the file system. :param str directory_name: The name of the directory. :param str credential: Credential could be either account key or user delegation key. If use account key is used as credential, then the credential type should be a str. Instead of an account key, the user could also pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling :func:`~azure.storage.filedatalake.DataLakeServiceClient.get_user_delegation_key`. When present, the SAS is signed with the user delegation key instead. :type credential: str or ~azure.storage.filedatalake.UserDelegationKey :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered racwdlmeop. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :type permission: str or ~azure.storage.filedatalake.FileSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: ~datetime.datetime or str :keyword start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :paramtype start: ~datetime.datetime or str :keyword str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. :keyword str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :keyword str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :keyword str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :keyword str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :keyword str preauthorized_agent_object_id: The AAD object ID of a user assumed to be authorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission check for the agent object id will be performed. :keyword str agent_object_id: The AAD object ID of a user assumed to be unauthorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access and the service will perform an additional POSIX ACL check to determine if this user is authorized to perform the requested operation. :keyword str correlation_id: The correlation id to correlate the storage audit logs with the audit logs used by the principal generating and distributing the SAS. :keyword str encryption_scope: Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str """ depth = len(directory_name.strip("/").split("/")) return generate_blob_sas( account_name=account_name, container_name=file_system_name, blob_name=directory_name, account_key=credential if isinstance(credential, str) else None, user_delegation_key=credential if not isinstance(credential, str) else None, permission=permission, expiry=expiry, sdd=depth, is_directory=True, **kwargs) def generate_file_sas( account_name, # type: str file_system_name, # type: str directory_name, # type: str file_name, # type: str credential, # type: Union[str, UserDelegationKey] permission=None, # type: Optional[Union[FileSasPermissions, str]] expiry=None, # type: Optional[Union[datetime, str]] **kwargs # type: Any ): # type: (...) -> str """Generates a shared access signature for a file. Use the returned signature with the credential parameter of any BDataLakeServiceClient, FileSystemClient, DataLakeDirectoryClient or DataLakeFileClient. :param str account_name: The storage account name used to generate the shared access signature. :param str file_system_name: The name of the file system. :param str directory_name: The name of the directory. :param str file_name: The name of the file. :param str credential: Credential could be either account key or user delegation key. If use account key is used as credential, then the credential type should be a str. Instead of an account key, the user could also pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling :func:`~azure.storage.filedatalake.DataLakeServiceClient.get_user_delegation_key`. When present, the SAS is signed with the user delegation key instead. :type credential: str or ~azure.storage.filedatalake.UserDelegationKey :param permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered racwdlmeop. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :type permission: str or ~azure.storage.filedatalake.FileSasPermissions :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: ~datetime.datetime or str :keyword start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :paramtype start: ~datetime.datetime or str :keyword str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :keyword str protocol: Specifies the protocol permitted for a request made. The default value is https. :keyword str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :keyword str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :keyword str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :keyword str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :keyword str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :keyword str preauthorized_agent_object_id: The AAD object ID of a user assumed to be authorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission check for the agent object id will be performed. :keyword str agent_object_id: The AAD object ID of a user assumed to be unauthorized by the owner of the user delegation key to perform the action granted by the SAS token. The service will validate the SAS token and ensure that the owner of the user delegation key has the required permissions before granting access and the service will perform an additional POSIX ACL check to determine if this user is authorized to perform the requested operation. :keyword str correlation_id: The correlation id to correlate the storage audit logs with the audit logs used by the principal generating and distributing the SAS. This can only be used when to generate sas with delegation key. :keyword str encryption_scope: Specifies the encryption scope for a request made so that all write operations will be service encrypted. :return: A Shared Access Signature (sas) token. :rtype: str """ if directory_name: path = directory_name.rstrip('/') + "/" + file_name else: path = file_name return generate_blob_sas( account_name=account_name, container_name=file_system_name, blob_name=path, account_key=credential if isinstance(credential, str) else None, user_delegation_key=credential if not isinstance(credential, str) else None, permission=permission, expiry=expiry, **kwargs)
{ "content_hash": "d9949409657237071d1db7491bdcb64c", "timestamp": "", "source": "github", "line_count": 396, "max_line_length": 118, "avg_line_length": 54.64646464646464, "alnum_prop": 0.711275415896488, "repo_name": "Azure/azure-sdk-for-python", "id": "6555dce5d2ec590f0db8ef5068e2c1f616bd56dd", "size": "21950", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared_access_signature.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
package org.res4j.util; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.regex.Pattern; /** * Utility class that finds all matching <i>local</i> classes and resources that can be reached from the given * {@link ClassLoader}. <b>Limitation:</b> currently only URLClassLoader and only "file://" urls are supported. * <b>Workaround:</b> list all class names, separated with vertical bars ('|'). In this case, the {@link ClassLoader} * will not even be used. * * This implementation uses a refactored, res4j-specific version of private, nested <a * href="https://code.google.com/p/guava-libraries/wiki/Release15">Google Guava 15.0</a> class <a href= * "http://docs.guava-libraries.googlecode.com/git-history/v15.0/javadoc/com/google/common/reflect/ClassPath.html" * >com.google.common.reflect.ClassPath.Scanner</a>. */ public final class ClassPath { /** Separator for the Class-Path manifest attribute value in jar files. Why not "\\s+"? */ private static final Pattern CLASS_PATH_ATTRIBUTE_SEPARATOR = Pattern.compile("\\s*"); private static final Pattern WILDCARD_CHARACTERS = Pattern.compile("[\\*\\?]"); private static final Pattern SPLITTER = Pattern.compile("\\s*\\|\\s*"); private ClassPath() { // Utility class never instantiated. throw new AssertionError(); } public static Collection<String> getResources(String wildResourceNames, ClassLoader loader) throws Exception { wildResourceNames = wildResourceNames.trim(); Collection<String> result = getWithoutWildcard(wildResourceNames); if (result == null) { Pattern regex = Wildcard.FILE.toPattern(wildResourceNames); result = getResources(regex, loader); } return result; } public static Collection<String> getResources(Pattern regex, ClassLoader loader) throws IOException { if (loader == null) { loader = Thread.currentThread().getContextClassLoader(); } Scanner scanner = new Scanner(regex); for (Map.Entry<URI, ClassLoader> entry : getClassPathEntries(loader).entrySet()) { scanner.scan(entry.getKey(), entry.getValue()); } return scanner.getResources(); } /** * Returns the list of class names matching the given list of wildcard expressions. Expressions are separated with * vertical bar ('|') and any number of whitespace characters: [ \t\n\x0B\f\r]. Use wildcard ** to match 0 or more * packages. Use wildcard * to match all classes in the package. * * @param wildClassNames * space-separated wildcard expressions. * @return The list of all fully-qualified class names. * @throws IOException */ public static Collection<String> getClasses(String wildClassNames, ClassLoader loader) throws IOException { wildClassNames = wildClassNames.trim(); Collection<String> result = getWithoutWildcard(wildClassNames); if (result == null) { Pattern regex = Wildcard.CLASS.toPattern(wildClassNames); result = getClasses(regex, loader); } return result; } public static Collection<String> getClasses(Pattern regex, ClassLoader loader) throws IOException { Collection<String> classResources = getResources(regex, loader); List<String> result = new ArrayList<String>(classResources.size()); for (String classResource : classResources) { // Removes ".class" and then replaces any "/" with a ".". int classNameEnd = classResource.length() - 6; String className = classResource.substring(0, classNameEnd).replace('/', '.'); result.add(className); } return result; } /** * Optimize for the simple case where the expression is a simple list of class names. When the class loader uses * protocol other that "file", this will be the workaround. */ private static Collection<String> getWithoutWildcard(String names) { Collection<String> result = null; if (!WILDCARD_CHARACTERS.matcher(names).find()) { String[] nameArray = SPLITTER.split(names); result = Arrays.asList(nameArray); } return result; } private static Map<URI, ClassLoader> getClassPathEntries(ClassLoader classloader) { LinkedHashMap<URI, ClassLoader> entries = new LinkedHashMap<URI, ClassLoader>(); // Search parent first, since it's the order ClassLoader#loadClass() uses. ClassLoader parent = classloader.getParent(); if (parent != null) { entries.putAll(getClassPathEntries(parent)); } if (classloader instanceof URLClassLoader) { URLClassLoader urlClassLoader = (URLClassLoader) classloader; for (URL entry : urlClassLoader.getURLs()) { URI uri; try { uri = entry.toURI(); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } if (!entries.containsKey(uri)) { entries.put(uri, classloader); } } } return entries; } private static final class Scanner { private final Pattern regex; private final SortedSet<String> resources = new TreeSet<String>(); private final Set<URI> scannedUris = new HashSet<URI>(); private Scanner(Pattern regex) { this.regex = regex; } private SortedSet<String> getResources() { return resources; } private void add(String resourceName) { if (regex == null || regex.matcher(resourceName).matches()) { resources.add(resourceName); } } private void scan(URI uri, ClassLoader classloader) throws IOException { if (uri.getScheme().equals("file") && scannedUris.add(uri)) { scanFrom(new File(uri), classloader); } } private void scanFrom(File file, ClassLoader classloader) throws IOException { if (!file.exists()) { return; } if (file.isDirectory()) { scanDirectory(file, classloader); } else { scanJar(file, classloader); } } private void scanDirectory(File directory, ClassLoader classloader) throws IOException { scanDirectory(directory, classloader, "", new HashSet<File>()); } private void scanDirectory(File directory, ClassLoader classloader, String packagePrefix, Set<File> ancestors) throws IOException { File canonical = directory.getCanonicalFile(); if (ancestors.contains(canonical)) { // A cycle in the filesystem, for example due to a symbolic link. return; } File[] files = directory.listFiles(); if (files == null) { LogUtil.warning("Cannot read directory " + directory); // IO error, just skip the directory return; } Set<File> newAncestors = new HashSet<File>(); newAncestors.addAll(ancestors); newAncestors.add(canonical); for (File f : files) { String name = f.getName(); if (f.isDirectory()) { scanDirectory(f, classloader, packagePrefix + name + "/", newAncestors); } else { String resourceName = packagePrefix + name; if (!resourceName.equals(JarFile.MANIFEST_NAME)) { add(resourceName); } } } } private void scanJar(File file, ClassLoader classloader) throws IOException { JarFile jarFile; try { jarFile = new JarFile(file); } catch (IOException e) { // Not a jar file return; } try { for (URI uri : getClassPathFromManifest(file, jarFile.getManifest())) { scan(uri, classloader); } Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); if (entry.isDirectory() || entry.getName().equals(JarFile.MANIFEST_NAME)) { continue; } add(entry.getName()); } } finally { try { jarFile.close(); } catch (IOException ignored) { } } } /** * Returns the class path URIs specified by the {@code Class-Path} manifest attribute, according to <a * href="http://docs.oracle.com/javase/6/docs/technotes/guides/jar/jar.html#Main%20Attributes"> JAR File * Specification</a>. If {@code manifest} is null, it means the jar file has no manifest, and an empty set will * be returned. */ private static Set<URI> getClassPathFromManifest(File jarFile, Manifest manifest) { if (manifest == null) { return Collections.emptySet(); } Set<URI> builder = new HashSet<URI>(); String classpathAttribute = manifest.getMainAttributes().getValue(Attributes.Name.CLASS_PATH.toString()); if (classpathAttribute != null) { for (String path : CLASS_PATH_ATTRIBUTE_SEPARATOR.split(classpathAttribute)) { URI uri; try { uri = getClassPathEntry(jarFile, path); } catch (URISyntaxException e) { // Ignore bad entry LogUtil.warning("Invalid Class-Path entry: " + path); continue; } builder.add(uri); } } return builder; } /** * Returns the absolute uri of the Class-Path entry value as specified in <a * href="http://docs.oracle.com/javase/6/docs/technotes/guides/jar/jar.html#Main%20Attributes"> JAR File * Specification</a>. Even though the specification only talks about relative urls, absolute urls are actually * supported too (for example, in Maven surefire plugin). */ private static URI getClassPathEntry(File jarFile, String path) throws URISyntaxException { URI uri = new URI(path); if (uri.isAbsolute()) { return uri; } else { return new File(jarFile.getParentFile(), path.replace('/', File.separatorChar)).toURI(); } } } }
{ "content_hash": "c334d44c951bb185eececae4cde8e3e6", "timestamp": "", "source": "github", "line_count": 282, "max_line_length": 117, "avg_line_length": 34.36524822695036, "alnum_prop": 0.703229800846146, "repo_name": "res4j/res4j", "id": "b14b7660c999b0eee59b6496086513f2222875e6", "size": "9691", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "res4j-jar/src/main/java/org/res4j/util/ClassPath.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "272926" } ], "symlink_target": "" }
help(){ echo "Usage: $0 [OPTION]..." echo "Run gitlint's test suite(s) or some convience commands" echo " -h, --help Show this help output" echo " -p, --pep8 Run pep8 checks" echo " -l, --lint Run pylint checks" echo " -s, --stats Show some project stats" echo " --no-coverage Don't make a unit test coverage report" echo "" exit 0; } run_pep8_check(){ # FLAKE 8 # H307: like imports should be grouped together # H405: multi line docstring summary not separated with an empty line # H803: git title must end with a period # H904: Wrap long lines in parentheses instead of a backslash # H802: git commit title should be under 50 chars # H701: empty localization string FLAKE8_IGNORE="H307,H405,H803,H904,H802,H701" # exclude settings files and virtualenvs FLAKE8_EXCLUDE="*settings.py,*.venv/*.py" echo "Running flake8..." flake8 --ignore=$FLAKE8_IGNORE --max-line-length=120 --exclude=$FLAKE8_EXCLUDE gitlint } run_unit_tests(){ OMIT="*dist-packages*,*site-packages*,gitlint/tests/*,.venv/*,virtualenv/*" if [ -n "$testargs" ]; then # if the test is specified, do some string manipulation to replace paths with qualified paths # this way, you can pass a test file path to the CLI which is convenient testargs="${testargs//\//.}" # replace slashes with dots testargs="${testargs/.py/}" # remove trailing .py coverage run --omit=$OMIT -m unittest -v "$testargs" else coverage run --omit=$OMIT -m unittest discover -v fi TEST_RESULT=$? if [ $include_coverage -eq 1 ]; then COVERAGE_REPORT=$(coverage report -m) echo "$COVERAGE_REPORT" fi if [ $TEST_RESULT -gt 0 ]; then exit $TEST_RESULT; fi } run_stats(){ echo "*** Code ***" radon raw -s gitlint | tail -n 6 } # default behavior just_pep8=0 just_lint=0 just_stats=0 include_coverage=1 testargs="" while [ "$#" -gt 0 ]; do case "$1" in -h|--help) shift; help;; -p|--pep8) shift; just_pep8=1;; -l|--lint) shift; just_lint=1;; -s|--stats) shift; just_stats=1;; --no-coverage)shift; include_coverage=0;; *) testargs="$1"; shift; esac done if [ $just_pep8 -eq 1 ]; then run_pep8_check exit $? fi if [ $just_stats -eq 1 ]; then run_stats exit $? fi run_unit_tests || exit
{ "content_hash": "a6d0021a1dea31db96a3c73242e7126f", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 101, "avg_line_length": 29.36144578313253, "alnum_prop": 0.6060730406237177, "repo_name": "tobyoxborrow/gitlint", "id": "b17e4348735fac5f2ae811557a4d0d0f574b8890", "size": "2454", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "run_tests.sh", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "69254" }, { "name": "Shell", "bytes": "3411" } ], "symlink_target": "" }
package com.buransky.javaStaticHell import java.io.{BufferedWriter, PrintWriter, File} import java.util.jar.JarFile import com.buransky.javaStaticHell.api.impl.ClassVisitor import org.apache.bcel.classfile.{ClassFormatException, ClassParser} import scala.collection.JavaConversions._ import scala.collection.mutable.ListBuffer /** * Application command line arguments. * fdp -Tsvg -o graph.svg ./graph.dot * * @param jarFilePath Path to jar file to be analysed. */ case class JavaStaticHellAppArgs(jarFilePath: File) { if (!jarFilePath.exists()) throw new IllegalArgumentException(s"JAR file does not exist! [$jarFilePath]") } case class Dep(from: String, to: Iterable[String]) case class Depth(className: String, direct: Int, indirect: Int) /** * Application entry point. */ object JavaStaticHellApp { def main(args: Array[String]) = { // Get application arguments val appArgs = getAppAgrs(args) // Buffer with all static dependencies val buffer = new ListBuffer[Dep] // Go through all entries val jarEntries = new JarFile(appArgs.jarFilePath).entries() while (jarEntries.hasMoreElements) { // Get next entry val jarEntry = jarEntries.nextElement() if (!jarEntry.isDirectory && jarEntry.getName.endsWith(".class")) { val classParser = new ClassParser(appArgs.jarFilePath.getAbsolutePath, jarEntry.getName) try { val javaClass = classParser.parse() if (filterClass(javaClass.getClassName)) { val classVisitor = new ClassVisitor(javaClass) val deps = classVisitor.staticDependencies().filter(d => filterClass(d) && d != javaClass.getClassName) if (deps.nonEmpty) buffer += Dep(javaClass.getClassName, deps) } } catch { case ex: ClassFormatException => Console.err.println(s"Parser error! [${jarEntry.getName} $ex]") } } } // Recursively generate DOT files of individual levels generateAndRake(buffer, 1) // Print dependency depth to CSV printDepths(distinctClassNames(buffer).map(classNameToDepth(_, buffer))) } private def filterClass(className: String): Boolean = className.startsWith("com.avitech") || className.startsWith("com.pixelpark") private def generateAndRake(deps: Iterable[Dep], level: Int): Unit = { // Generate DOT file generateDotFile(new File(s"./build/graph$level.dot"), deps) // Recurse val raked = rake(deps) if (raked.size < deps.size) generateAndRake(raked, level + 1) } private def rake(deps: Iterable[Dep]): Iterable[Dep] = { val allTo = deps.flatMap(_.to) deps.filter(d => allTo.contains(d.from)) } private def distinctClassNames(deps: Iterable[Dep]): Seq[String] = deps.toSeq.flatMap(dep => dep.to.seq ++ Seq(dep.from)).distinct private def classNameToDepth(className: String, deps: Iterable[Dep]): Depth = Depth(className, directDeps(className, deps), indirectDeps(className, deps)) private def generateDotFile(dotFile: File, deps: Iterable[Dep]): Unit = { val output = new BufferedWriter(new PrintWriter(dotFile)) try { output.write("digraph javaStaticHell {\n") try { // First print all classes and label them val all = distinctClassNames(deps) // Get all depths val depths = all.map(classNameToDepth(_, deps)) all.foreach { dep => val dotName = classNameForDot(dep) val dotLabel = labelForDot(depths.find(_.className == dep).head) output.write(s" $dotName [label=$dotLabel];\n") } // And now print all dependencies deps.foreach { dep => val fromName = classNameForDot(dep.from) dep.to.foreach { depTo => val toName = classNameForDot(depTo) output.write(s" $fromName -> $toName;\n") } } } finally { output.write("}\n") } } finally { output.close() } Console.out.println(s"DOT file generated. [${dotFile.getAbsolutePath}]") } private def printDepths(depths: Iterable[Depth]): Unit = { Console.out.println("") Console.out.println("CSV dependency depths (class name, indirect count, direct count):") depths.toSeq.sortBy(d => (-1 * d.indirect, -1 * d.direct)).foreach { depth => Console.out.println(s"${depth.className};${depth.indirect};${depth.direct};") } } private def directDeps(className: String, all: Iterable[Dep]): Int = all.count(_.to.exists(_ == className)) private def indirectDeps(rootClassName: String, all: Iterable[Dep]): Int = { def rec(className: String, acc: Set[String]): Int = { if (!acc.contains(className)) { val directDeps = all.filter(_.to.exists(_ == className)) val accWithClassName = acc + className directDeps.size + directDeps.map(directDep => rec(directDep.from, accWithClassName)).sum } else 0 } rec(rootClassName, Set.empty) } private def classNameForDot(className: String): String = className.replaceAll("[\\.$]", "") private def labelForDot(depth: Depth): String = { val label = classNameForDot(depth.className.substring(depth.className.lastIndexOf('.') + 1)) label + s"_${depth.direct}_${depth.indirect}" } private def getAppAgrs(args: Array[String]) = JavaStaticHellAppArgs(new File(args(0))) }
{ "content_hash": "7397a79e3b814b0a6aeb92c3cc19f6f4", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 115, "avg_line_length": 34.094339622641506, "alnum_prop": 0.6557830658550083, "repo_name": "RadoBuransky/java-static-hell", "id": "52c718d7b952bc4a5d64d33286db412bf62c5cc4", "size": "5421", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/scala/com/buransky/javaStaticHell/JavaStaticHellApp.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "301" }, { "name": "Scala", "bytes": "8868" } ], "symlink_target": "" }
import { enableProdMode } from '@angular/core'; import { platformBrowserDynamic } from '@angular/platform-browser-dynamic'; import { AppModule } from './app.module'; enableProdMode(); platformBrowserDynamic().bootstrapModule(AppModule);
{ "content_hash": "48c1dc2143c15bfd12b194c96a2ff7db", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 75, "avg_line_length": 39.666666666666664, "alnum_prop": 0.773109243697479, "repo_name": "Izak88/abstruse", "id": "8fc2f5afcca5640594ed865807225ae115abe841", "size": "238", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/main.prod.ts", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1373" }, { "name": "CSS", "bytes": "176797" }, { "name": "HTML", "bytes": "104388" }, { "name": "JavaScript", "bytes": "18305" }, { "name": "Shell", "bytes": "974" }, { "name": "TypeScript", "bytes": "1430485" } ], "symlink_target": "" }
package com.imageloader.adapter; /** * Created by wangzhiguo on 15/8/31. */ public class ItemType { public static final int FIRSTITEM = 0; public static final int GENERAL = 1; }
{ "content_hash": "0d645c227fabd2b810d86d747a2e442d", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 42, "avg_line_length": 21, "alnum_prop": 0.6931216931216931, "repo_name": "MissBears/CustomImageLoader", "id": "9720dc230ecaf05a1785a0cfdf11f1f17d5cb473", "size": "189", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/imageloader/adapter/ItemType.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "19028" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.spark</groupId> <artifactId>spark-parent_2.12</artifactId> <version>3.1.0-SNAPSHOT</version> <relativePath>../../pom.xml</relativePath> </parent> <groupId>org.apache.spark</groupId> <artifactId>spark-token-provider-kafka-0-10_2.12</artifactId> <properties> <sbt.project.name>token-provider-kafka-0-10</sbt.project.name> </properties> <packaging>jar</packaging> <name>Kafka 0.10+ Token Provider for Streaming</name> <url>http://spark.apache.org/</url> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${project.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${project.version}</version> <type>test-jar</type> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>${kafka.version}</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>${hadoop-client-runtime.artifact}</artifactId> <scope>${hadoop.deps.scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-tags_${scala.binary.version}</artifactId> </dependency> <!-- This spark-tags test-dep is needed even though it isn't used in this module, otherwise testing-cmds that exclude them will yield errors. --> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-tags_${scala.binary.version}</artifactId> <type>test-jar</type> <scope>test</scope> </dependency> </dependencies> <build> <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory> <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory> </build> </project>
{ "content_hash": "43e58b48028292987d98d9d9a11578c0", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 204, "avg_line_length": 37.39325842696629, "alnum_prop": 0.6887019230769231, "repo_name": "shuangshuangwang/spark", "id": "1b0d6d322917f6da0454d6ae70ac4624f3b666cf", "size": "3328", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "external/kafka-0-10-token-provider/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "38971" }, { "name": "Batchfile", "bytes": "30468" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "26884" }, { "name": "Dockerfile", "bytes": "8672" }, { "name": "HTML", "bytes": "70197" }, { "name": "HiveQL", "bytes": "1823426" }, { "name": "Java", "bytes": "3443566" }, { "name": "JavaScript", "bytes": "196704" }, { "name": "Makefile", "bytes": "9397" }, { "name": "PLpgSQL", "bytes": "191716" }, { "name": "PowerShell", "bytes": "3856" }, { "name": "Python", "bytes": "2868359" }, { "name": "R", "bytes": "1177706" }, { "name": "Roff", "bytes": "16021" }, { "name": "SQLPL", "bytes": "3603" }, { "name": "Scala", "bytes": "28311528" }, { "name": "Shell", "bytes": "202769" }, { "name": "Thrift", "bytes": "33605" }, { "name": "q", "bytes": "146878" } ], "symlink_target": "" }
define([ 'kernel/runtime', 'kernel/MessageHandler', 'kernel/browserAction', 'kernel/persistent', 'common/storedSettings', 'common/iframeSize', 'background/messageSender', 'background/translator' ], function ( runtime, MessageHandler, browserAction, persistent, storedSettings, iframeSize, messageSender, translator ) { //translator.translate({text: 'since'}); //translator.translate({text: 'Since content scripts run in the context of a web page and not the extension, they often need some way of communicating with the rest of the extension.'}); var selectedText; var handlers = { checkSelectedText: function (data, sender) { if (!data || !data.selectedText || !storedSettings.enableSelection) { return; } // TODO: 做检查,看选中文本是否能翻译 var canTranslate = true; if (!canTranslate) { return; } // 缓存选中的文本 selectedText = data.selectedText; var tabId = sender.tab.id; var autoTranslate = storedSettings.autoTranslate; messageSender.showWidget(tabId, { autoTranslate: autoTranslate, size: autoTranslate ? iframeSize.loading : iframeSize.button }); // 不显示翻译按钮时,直接开始翻译 autoTranslate && translate(selectedText, tabId); }, checkEnabledSelection: function (data, sender) { var tabId = sender.tab.id; messageSender.feedbackSelectionEnabled(tabId, { enabled: storedSettings.enableSelection } ); }, // 点击翻译按钮后,显示 loading 并开始翻译 showLoading: function (data, sender) { var tabId = sender.tab.id; messageSender.showLoading(tabId, { size: iframeSize.loading }); translate(selectedText, tabId); }, // 利用默认引擎之外的引擎翻译 translateWithOtherEngines: function (data, sender) { var tabId = sender.tab.id; data.text = data.text || selectedText; translator.translateWithOtherEngines(data) .forEach(function (deferred) { deferred && deferred.then(function (result) { messageSender.showOtherResult(tabId, { result: result }) }); }); } }; function onIconClicked() { var enableSelection = storedSettings.enableSelection; var title = enableSelection ? '划词翻译已禁用' : '划词翻译已启用'; var icon = enableSelection ? { "19": "images/translate-gray-19x19.png", "38": "images/[email protected]" } : { "19": "images/translate-19x19.png", "38": "images/[email protected]" }; browserAction.setTitle(title); browserAction.setIcon(icon); persistent.set('enableSelection', !enableSelection); //messageSender.feedbackSelectionEnabled({ // enabled: !enableSelection //}); } // Helpers // ------- function translate(text, targetTabId) { // 如果翻译引擎设置错误将返回 false var deferred = translator.translate({text: text}); var data = { size: iframeSize.result //offset: { // left: -20, // top: -20 //} }; if (deferred) { deferred.then(function (result) { data.result = result; messageSender.showResult(targetTabId, data); }); } else { data.result = {error: '未知的翻译引擎'}; messageSender.showResult(targetTabId, data); } } // 消息处理 // -------- runtime.addTabMessageListener(new MessageHandler(handlers, handlers)); chrome.browserAction.onClicked.addListener(onIconClicked); });
{ "content_hash": "75514db13fb5cd956ed287dd3747035b", "timestamp": "", "source": "github", "line_count": 136, "max_line_length": 190, "avg_line_length": 29.602941176470587, "alnum_prop": 0.5412319920516642, "repo_name": "bubkoo/crx-selection-translator", "id": "918241eb0cd0ca85ea36127a646a471943bcc155", "size": "4230", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/js/background/messageHandler.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "18690" }, { "name": "HTML", "bytes": "227" }, { "name": "Handlebars", "bytes": "4372" }, { "name": "JavaScript", "bytes": "65593" } ], "symlink_target": "" }
from PyDAQmx import * import numpy def take_data_2chan(num_points, sampling_rate): """ Take `num_points` from the NIDAQ """ analog_input = Task() read = int32() data_size = 2*num_points data = numpy.zeros((data_size), dtype=numpy.float64) # DAQmx Configure Code analog_input.CreateAIVoltageChan("Dev1/ai0:1", "", DAQmx_Val_RSE, -10.0, 10.0, DAQmx_Val_Volts, None) analog_input.CfgSampClkTiming("", sampling_rate, DAQmx_Val_Rising, DAQmx_Val_FiniteSamps,num_points) # DAQmx Start Code analog_input.StartTask() # DAQmx Read Code analog_input.ReadAnalogF64(num_points,10.0, DAQmx_Val_GroupByChannel, data, data_size,byref(read),None) return data
{ "content_hash": "0e78617e3d82ee163cc680bfb04017f9", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 104, "avg_line_length": 26.56, "alnum_prop": 0.7259036144578314, "repo_name": "BBN-Q/Qlab", "id": "ce57411c51123263ef0f55f256cc9096c6fff39f", "size": "664", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "common/+deviceDrivers/@NIDAQ/take_data_2chan.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "25742" }, { "name": "C++", "bytes": "15513" }, { "name": "M", "bytes": "30640" }, { "name": "MATLAB", "bytes": "1630623" }, { "name": "Objective-C", "bytes": "397" }, { "name": "Processing", "bytes": "49601" }, { "name": "Python", "bytes": "63960" } ], "symlink_target": "" }
__test__ = {"doctest": """ >>> from testsuite.tsearch2_gis.models import Location >>> from django.contrib.gis.geos import Point >>> Location.objects.create(name=u"Mario's Pizza", latlon=Point(12.4604, 43.9420)) <Location: Mario's Pizza> >>> Location.objects.update_index() >>> Location.objects.search("mario") [<Location: Mario's Pizza>] >>> Location.objects.search("luigi") [] """}
{ "content_hash": "260d5a25f0e480df3df0eb5434e42576", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 82, "avg_line_length": 24.25, "alnum_prop": 0.6855670103092784, "repo_name": "hcarvalhoalves/django-tsearch2", "id": "eb612a09f41b12dfd3397161796c36087fc175ee", "size": "405", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "testsuite/tsearch2_gis/tests.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "17640" } ], "symlink_target": "" }
<?php namespace yiiunit\framework\caching; use yii\caching\MemCache; /** * Class for testing memcache cache backend * @group memcache * @group caching */ class MemCacheTest extends CacheTestCase { private $_cacheInstance = null; /** * @return MemCache */ protected function getCacheInstance() { if (!extension_loaded("memcache")) { $this->markTestSkipped("memcache not installed. Skipping."); } if ($this->_cacheInstance === null) { $this->_cacheInstance = new MemCache(); } return $this->_cacheInstance; } public function testExpire() { if (getenv('TRAVIS') == 'true') { $this->markTestSkipped('Can not reliably test memcache expiry on travis-ci.'); } parent::testExpire(); } public function testExpireAdd() { if (getenv('TRAVIS') == 'true') { $this->markTestSkipped('Can not reliably test memcache expiry on travis-ci.'); } parent::testExpireAdd(); } }
{ "content_hash": "915c0fe866ad75d46b85ecde6e8966cd", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 81, "avg_line_length": 20.31111111111111, "alnum_prop": 0.6717724288840262, "repo_name": "yourcodehere/yii2", "id": "63f8be1640207141a431c30b259962dbdbeb16a3", "size": "914", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tests/unit/framework/caching/MemCacheTest.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "51903" }, { "name": "JavaScript", "bytes": "127842" }, { "name": "PHP", "bytes": "3937465" }, { "name": "Perl", "bytes": "4280" }, { "name": "Ruby", "bytes": "207" }, { "name": "Shell", "bytes": "11326" } ], "symlink_target": "" }
module Imports ( module X ) where import Application.Types as X import Application.Api as X import Network.Wai.Trans as X import Network.HTTP.Types as X import Web.Routes.Nested as X
{ "content_hash": "ca54353599d86640f7dae4f603dd2320", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 30, "avg_line_length": 17.818181818181817, "alnum_prop": 0.7448979591836735, "repo_name": "athanclark/contact-logger", "id": "37dd65172b9160b23c6510b0f635960b4fc2300b", "size": "196", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Imports.hs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Haskell", "bytes": "46518" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Windows.Forms; using System.IO; using System.Globalization; namespace SelfControlApplication { public partial class MainForm : Form { private string path = Path.GetPathRoot(Environment.SystemDirectory); private string temp = Path.GetTempPath(); private string appdata = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData); private string ShellCmdLocation = null; private string system32location = null; private string hostslocation = null; private string baklocation = null; Timer timer = new Timer(); Timer timer1 = new Timer(); private int timeLeft = 0; private int timerValue = 0; private DirectoryInfo dir = Directory.CreateDirectory(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + @"\SelfControlApp"); // создаем директорию приложения в AppData private string blockListPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + @"\SelfControlApp\blocklist.rtf"; // файл с сохраненным списком блокируемых сайтов private RichTextBox rtbListP = new RichTextBox(); List<string> phostdomains = new List<string>(); public MainForm() { if (File.Exists(path + @"Windows\Sysnative\cmd.exe")) { ShellCmdLocation = path + @"Windows\Sysnative\cmd.exe"; system32location = path + @"Windows\System32\"; hostslocation = system32location + @"drivers\etc\hosts"; baklocation = temp + @"hosts.bak"; } else { ShellCmdLocation = path + @"Windows\System32\cmd.exe"; system32location = path + @"Windows\System32\"; hostslocation = system32location + @"drivers\etc\hosts"; baklocation = temp + @"hosts.bak"; } System.Threading.Thread.CurrentThread.CurrentUICulture = CultureInfo.GetCultureInfo(Properties.Settings.Default.Language); System.Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo(Properties.Settings.Default.Language); InitializeComponent(); } private void button1_Click(object sender, EventArgs e) { BlockList blocklist = new BlockList(); blocklist.ShowDialog(); this.Show(); } private void выходToolStripMenuItem_Click(object sender, EventArgs e) { Application.Exit(); } private void помощьToolStripMenuItem1_Click(object sender, EventArgs e) { MessageBox.Show(LangStrings.HelpMessage, LangStrings.HelpTitle); } private void оПрограммеToolStripMenuItem_Click(object sender, EventArgs e) { About about = new About(); about.ShowDialog(); this.Show(); } private string ProcStartargs(string name, string args) // процесс для перезаписи hosts через консоль { try { var proc = new System.Diagnostics.Process { StartInfo = new System.Diagnostics.ProcessStartInfo { FileName = name, Arguments = args, UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardInput = true, CreateNoWindow = true, StandardOutputEncoding = Encoding.UTF8, } }; proc.Start(); string line = null; while (!proc.StandardOutput.EndOfStream) { line += "\n" + proc.StandardOutput.ReadLine(); } return line; } catch (Exception ex) { return ex.ToString(); } } private void DeleteFile(string filepath) { ProcStartargs(ShellCmdLocation, "/c del /F /Q " + filepath); } private void btnStart_Click(object sender, EventArgs e) { timeLeft = tbarTime.Value * 15 * 60; timerValue = tbarTime.Value * 15 * 60; rtbListP.LoadFile(blockListPath); var hostsdomains = rtbListP.Lines; IdnMapping idn = new IdnMapping(); for (int i = 0; i < hostsdomains.Length; i++) //все не ascii домены в punycode { if (!string.IsNullOrEmpty(hostsdomains[i])) { string punycode = idn.GetAscii(hostsdomains[i]); phostdomains.Add(punycode); } } File.Copy(hostslocation, baklocation, true); timer = new Timer(); timer1 = new Timer(); timer.Interval = timerValue * 1000; // 15 * 60 * 1000; // 15 минут. timer.Tick += timer_Tick; timer.Enabled = true; timer1.Interval = 1000; // Ежесекундный таймер. timer1.Tick += timer_Tick1; timer1.Enabled = true; lblTime.ForeColor = Color.Red; btnStart.Enabled = false; btnBlock.Enabled = false; tbarTime.Enabled = false; var hostsarray = phostdomains.ToArray(); MessageBox.Show(LangStrings.WarningMessage, LangStrings.WarningTitle); if (true) { try { string hosts = null; if (File.Exists(hostslocation)) { hosts = File.ReadAllText(hostslocation, Encoding.UTF8); File.SetAttributes(hostslocation, FileAttributes.Normal); DeleteFile(hostslocation); } File.Create(hostslocation).Close(); File.WriteAllText(hostslocation, hosts + "\r\n#!!!DON'T MODIFY TEXT BELOW!!!\r\n#!!!SELF CONTROL APP BLOCKLIST!!!\r\n", Encoding.UTF8); for (int i = 0; i < hostsarray.Length; i++) { if (hosts.IndexOf(hostsarray[i]) == -1) { ProcStartargs(ShellCmdLocation, "/c echo " + "0.0.0.0 " + hostsarray[i] + " >> \"" + hostslocation + "\""); ProcStartargs(ShellCmdLocation, "/c echo " + "0.0.0.0 www." + hostsarray[i] + " >> \"" + hostslocation + "\""); } } } catch (Exception) { MessageBox.Show("Ошибка добавления в hosts.\nЗапустите программу от имени Администратора!"); } } } void timer_Tick(object sender, EventArgs e) { timer.Enabled = false; // Останавливаем таймер. btnStart.Enabled = true; btnBlock.Enabled = true; tbarTime.Enabled = true; // Восстанавливаемся из резервной копии. File.Copy(baklocation, hostslocation, true); File.Delete(baklocation); } void timer_Tick1(object sender, EventArgs e) { if (timeLeft != 0) { timeLeft--; var s = TimeSpan.FromSeconds(timeLeft); lblTime.Text = s.ToString(); } else { timer1.Enabled = false; lblTime.ForeColor = Color.Black; } } private void tbarTime_Scroll(object sender, EventArgs e) { int lValue = tbarTime.Value * 15; string hour = Convert.ToString(lValue / 60); string mins = Convert.ToString(lValue % 60); if (mins.Length < 2) mins = "0" + mins; string lbl = String.Format("0{0}:{1}:00", hour, mins); lblTime.Text = lbl; } private void niTray_Click(object sender, EventArgs e) { Show(); WindowState = FormWindowState.Normal; } private void cmExit_Click(object sender, EventArgs e) { Application.Exit(); } private void MainForm_Resize(object sender, EventArgs e) { if (FormWindowState.Minimized == WindowState) Hide(); } private void MainForm_FormClosing(object sender, FormClosingEventArgs e) { if (e.CloseReason == CloseReason.UserClosing) { e.Cancel = true; Hide(); WindowState = FormWindowState.Minimized; } } private void englishToolStripMenuItem_Click(object sender, EventArgs e) { if (System.Threading.Thread.CurrentThread.CurrentUICulture.Name == "ru-RU") { System.Threading.Thread.CurrentThread.CurrentUICulture = CultureInfo.GetCultureInfo("en-US"); System.Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); Properties.Settings.Default.Language = "en-US"; Properties.Settings.Default.Save(); Application.Restart(); } else { System.Threading.Thread.CurrentThread.CurrentUICulture = CultureInfo.GetCultureInfo("ru-RU"); System.Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("ru-RU"); Properties.Settings.Default.Language = "ru-RU"; Properties.Settings.Default.Save(); Application.Restart(); } } } }
{ "content_hash": "f8f45a27131d72cf7142df5144dba865", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 196, "avg_line_length": 38.38783269961977, "alnum_prop": 0.5297147385103012, "repo_name": "sw0rl0k/selfcontrolapp", "id": "37ee984aaf20a78c82250a10f290c4383716622a", "size": "10362", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SelfControlApplication/MainForm.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "17648" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "6b38b9d0a5609c089bb30722d13ef2de", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "c7411be969e6e8bcebe70a2fa95a532137c932e7", "size": "174", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Caryophyllaceae/Silene/Silene elymaitica/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<div class="left-sidebar" id="left-sidebar"> <div class="bottom_sidebar"> <ul id="global_filters" class="filters"> {# Special-case this link so we don't actually go to page top. #} <li data-name="home" class="global-filter active-filter"><span class="filter-icon"><i class="icon-vector-home"></i></span><a href="#">{{ _('Home') }} <span class="count"><span class="value"></span></span></a></li> <li data-name="private" class="global-filter"><span class="filter-icon"><i class="icon-vector-user"></i></span><a href="#narrow/is/private">{{ _('Private messages') }} <span class="count"><span class="value"></span></span></a></li> <li data-name="starred" class="global-filter"><span class="filter-icon"><i class="icon-vector-star"></i></span><a href="#narrow/is/starred">{{ _('Starred messages') }}</a></li> <li data-name="mentioned" class="global-filter"><span class="filter-icon"><i class="icon-vector-tag"></i></span><a href="#narrow/is/mentioned">{{ _('@-mentions') }}<span class="count"><span class="value"></span></span></a></li> </ul> <div id="streams_list" class="zoom-out"> <div id="streams_header" class="zoom-in-hide"><h4 class="sidebar-title" data-toggle="tooltip" title="Subscribed streams"><a href="">{{ _('STREAMS') }}</a></h4> <a href=""><i id="streams_inline_cog" class='icon-vector-cog' data-toggle="tooltip" title="Subscribe, add, or configure streams"></i></a> <a href=""><i id='streams_filter_icon' class='icon-vector-search' data-toggle="tooltip" title="Filter streams list"></i></a> </div> <div id="topics_header"> <div class="all-streams-padding"> <ul class="filters"> <li data-name="all-streams"> <i class="icon-vector-chevron-left"></i> <a href="" class="show-all-streams">{{ _('All streams') }}</a> </li> </ul> </div> </div> <div id="stream-filters-container" class="scrolling_list"> <input class="stream-list-filter notdisplayed" type="text" placeholder="{{ _('Search streams') }}" /> <ul id="stream_filters" class="filters"></ul> </div> </div> <div id="share-the-love"> <div id="share-the-love-expand-collapse"> <i class="toggle icon-vector-caret-right"></i><div id="sharethelove-header"><h4 class="share-the-love-title">{{ _('SHARE THE LOVE') }}<span class="still-have-invites"> (<span class="invite-count">0</span>)</span></h4></div> </div> <div id="share-the-love-contents"> <div id="tell-a-friend-success" class="alert alert-success"> {% trans %}<strong>Thanks!</strong> A hand-crafted, artisanal invite is on the way.{% endtrans %} </div> <div class="still-have-invites" id="encouraging-invite-message"> <p> {{ _("Know someone who would love Zulip for their company or group? Invite 'em!") }} </p> </div> <div class="no-more-invites"> <p> {% trans %}We'll have more invites for you soon, but for now, enjoy this <a target="_blank" href="http://www.youtube.com/watch?v=PW71En5Pa5s#t=2m01s">song that expresses how we feel when you're logged out</a>.{% endtrans %} </p> </div> <div class="still-have-invites"> {# Many of these values are set by the initialization code in referral.js #} <form id="referral-form"> <input class="input-block-level required" type="email" name="email" /> <label for="email" generated="true" class="text-error"></label> </form> </div> <div class="invite-count-area"> <span id="invite-hearts"></span> <small class="pull-right"><span class="invite-count">0 {{ _('invites remaining') }}</span></small> </div> </div> </div> </div> </div>
{ "content_hash": "5ea11723b69d49e3537e75c5c375f8dd", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 257, "avg_line_length": 77.39393939393939, "alnum_prop": 0.44498825371965545, "repo_name": "mohsenSy/zulip", "id": "11d0c26bca1347151b2a32e0dc6bb67f4be45490", "size": "5108", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "templates/zerver/left-sidebar.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "231946" }, { "name": "Groovy", "bytes": "5516" }, { "name": "HTML", "bytes": "441403" }, { "name": "JavaScript", "bytes": "1416046" }, { "name": "Nginx", "bytes": "1229" }, { "name": "Pascal", "bytes": "1113" }, { "name": "Perl", "bytes": "401825" }, { "name": "Puppet", "bytes": "82780" }, { "name": "Python", "bytes": "2718958" }, { "name": "Ruby", "bytes": "249738" }, { "name": "Shell", "bytes": "34483" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "bffd836a6bca65e401d9ceb1eb89ff77", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "2dc6f542c3c223013f8cc5985d7b90cee90dbf27", "size": "199", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Ericaceae/Leucothoë/Leucothoë ambigua/Leucothoë ambigua hispidula/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
module IncomeTax module Countries class Sudan < Models::FixedRate register "Sudan", "SD", "SDN" currency "SDG" rate "15%" end end end
{ "content_hash": "17899b1b1b7307aa3bba3ce984a3066f", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 35, "avg_line_length": 18.666666666666668, "alnum_prop": 0.5892857142857143, "repo_name": "rkh/income-tax", "id": "90ba16a92131abf9635c975ce920ae16589a548e", "size": "168", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/income_tax/countries/sudan.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "1931005" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Code Coverage for /home/gerard/sites/modules.w.doctrine/modules.zendframework.com/module/ZfModule/config</title> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <link href="css/bootstrap.min.css" rel="stylesheet"> <link href="css/bootstrap-responsive.min.css" rel="stylesheet"> <link href="css/style.css" rel="stylesheet"> <!--[if lt IE 9]> <script src="js/html5shiv.js"></script> <![endif]--> </head> <body> <header> <div class="container"> <div class="row"> <div class="span12"> <ul class="breadcrumb"> <li><a href="index.html">/home/gerard/sites/modules.w.doctrine/modules.zendframework.com</a> <span class="divider">/</span></li> <li><a href="module.html">module</a> <span class="divider">/</span></li> <li><a href="module_ZfModule.html">ZfModule</a> <span class="divider">/</span></li> <li class="active">config</li> <li>(<a href="module_ZfModule_config.dashboard.html">Dashboard</a>)</li> </ul> </div> </div> </div> </header> <div class="container"> <table class="table table-bordered"> <thead> <tr> <td>&nbsp;</td> <td colspan="9"><div align="center"><strong>Code Coverage</strong></div></td> </tr> <tr> <td>&nbsp;</td> <td colspan="3"><div align="center"><strong>Lines</strong></div></td> <td colspan="3"><div align="center"><strong>Functions and Methods</strong></div></td> <td colspan="3"><div align="center"><strong>Classes and Traits</strong></div></td> </tr> </thead> <tbody> <tr> <td class="success">Total</td> <td class="success big"> <div class="progress progress-success" style="width: 100px;"> <div class="bar" style="width: 100.00%;"></div> </div> </td> <td class="success small"><div align="right">100.00%</div></td> <td class="success small"><div align="right">175&nbsp;/&nbsp;175</div></td> <td class="None big">&nbsp;</td> <td class="None small"><div align="right"></div></td> <td class="None small"><div align="right">&nbsp;</div></td> <td class="None big">&nbsp;</td> <td class="None small"><div align="right"></div></td> <td class="None small"><div align="right">&nbsp;</div></td> </tr> <tr> <td class="success"><i class="icon-file"></i> <a href="module_ZfModule_config_module.config.php.html">module.config.php</a></td> <td class="success big"> <div class="progress progress-success" style="width: 100px;"> <div class="bar" style="width: 100.00%;"></div> </div> </td> <td class="success small"><div align="right">100.00%</div></td> <td class="success small"><div align="right">175&nbsp;/&nbsp;175</div></td> <td class="None big">&nbsp;</td> <td class="None small"><div align="right"></div></td> <td class="None small"><div align="right">&nbsp;</div></td> <td class="None big">&nbsp;</td> <td class="None small"><div align="right"></div></td> <td class="None small"><div align="right">&nbsp;</div></td> </tr> </tbody> </table> <footer> <h4>Legend</h4> <p> <span class="danger"><strong>Low</strong>: 0% to 35%</span> <span class="warning"><strong>Medium</strong>: 35% to 70%</span> <span class="success"><strong>High</strong>: 70% to 100%</span> </p> <p> <small>Generated by <a href="http://github.com/sebastianbergmann/php-code-coverage" target="_top">PHP_CodeCoverage 1.2.13</a> using <a href="http://www.php.net/" target="_top">PHP 5.3.10-1ubuntu3.8</a> and <a href="http://phpunit.de/">PHPUnit 3.7.27</a> at Sat Oct 12 3:31:18 CEST 2013.</small> </p> </footer> </div> <script src="js/jquery.min.js" type="text/javascript"></script> <script src="js/bootstrap.min.js" type="text/javascript"></script> </body> </html>
{ "content_hash": "f743b6d2e2da2b8d091fa82d627969d0", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 299, "avg_line_length": 41.21875, "alnum_prop": 0.5941369724538792, "repo_name": "gerardZf2Comments/modules", "id": "9cab052545d1bfa3266a049a68fbb38a30cb119f", "size": "3957", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "module/ZfModule/test/clover-html/module_ZfModule_config.html", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "19031" }, { "name": "JavaScript", "bytes": "20678" }, { "name": "PHP", "bytes": "474044" } ], "symlink_target": "" }
/* @flow */ import React, { PropTypes } from 'react'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import { actions as gameActions } from '../../redux/modules/games'; import TicTacToeBoard from 'components/TicTacToeBoard'; import MoveList from 'components/MoveList'; import GameDetailHeader from 'components/GameDetailHeader'; import styles from './GameDetailView.scss'; // import GameList from 'components/GameList'; // We can use Flow (http://flowtype.org/) to type our component's props // and state. For convenience we've included both regular propTypes and // Flow types, but if you want to try just using Flow you'll want to // disable the eslint rule `react/prop-types`. // NOTE: You can run `npm run flow:check` to check for any errors in your // code, or `npm i -g flow-bin` to have access to the binary globally. // Sorry Windows users :(. // type Props = {}; // We avoid using the `@connect` decorator on the class definition so // that we can export the undecorated component for testing. // See: http://rackt.github.io/redux/docs/recipes/WritingTests.html export class GameDetailView extends React.Component { constructor () { super(); this.state = { currentlyDisplayedMove: 0, }; } // props: Props; static propTypes = { gameActions: PropTypes.object.isRequired, currentGame: PropTypes.object.isRequired, params: PropTypes.object.isRequired, }; componentDidMount = () => { this.props.gameActions.getGameDetail(this.props.params.id); } componentWillUnmount = () => { this.props.gameActions.clearGameDetail(); } componentWillReceiveProps = (nextProps) => { this.setState({ ...this.state, currentlyDisplayedMove: nextProps.currentGame.moves.length - 1, }); } handleSliderOnChange = (e) => { this.setState({ ...this.state, currentlyDisplayedMove: e.target.value, }); } handleMoveOnClick = (moveIdx) => { this.setState({ ...this.state, currentlyDisplayedMove: moveIdx, }); } render () { if (!this.props.currentGame) { return (<span>Loading...</span>); } const lastMove = this.props.currentGame.moves[this.props.currentGame.moves.length - 1]; const currentGameState = this.props.currentGame.moves[this.state.currentlyDisplayedMove].gameState; const previousMove = this.props.currentGame.moves[this.state.currentlyDisplayedMove - 1]; let previousGameState; if (previousMove === undefined) { previousGameState = undefined; } else { previousGameState = previousMove.gameState; } return ( <div> <GameDetailHeader game={this.props.currentGame} lastMove={lastMove} /> <div className={styles.boardContainer}> <TicTacToeBoard currentGameState={currentGameState} previousGameState={previousGameState} /> </div> <div className={styles.movesContainer}> <h3>Moves</h3> <p className={styles.helpText}> Adjust the slider or click on a move to view that move on the board. </p> <input type='range' className={styles.movesSlider} min='0' max={this.props.currentGame.moves.length - 1} step='1' onChange={this.handleSliderOnChange} value={this.state.currentlyDisplayedMove} /> <MoveList moves={this.props.currentGame.moves} moveOnClick={this.handleMoveOnClick} highlightMoveIndex={this.state.currentlyDisplayedMove} /> </div> </div> ); } } const mapStateToProps = (state) => ({ currentGame: state.games.currentGame, }); const mapDisptachToProps = (dispatch) => { return { gameActions: bindActionCreators(gameActions, dispatch), }; }; export default connect(mapStateToProps, mapDisptachToProps)(GameDetailView);
{ "content_hash": "c8b4ff99dcb50463eb2d9ab680ede605", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 103, "avg_line_length": 30.692307692307693, "alnum_prop": 0.6508771929824562, "repo_name": "mleonard87/merknera-ui", "id": "bbff5ef9e5ffa0981e36ba10e624bbe502a9aa3f", "size": "3990", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/views/GameDetailView/GameDetailView.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7512" }, { "name": "HTML", "bytes": "426" }, { "name": "JavaScript", "bytes": "111388" } ], "symlink_target": "" }
Testing:
{ "content_hash": "719a4144c04c07442a8513e254130fc7", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 8, "avg_line_length": 9, "alnum_prop": 0.7777777777777778, "repo_name": "webkom/lego-webapp", "id": "67ed1a994157d539a96cd98398c14fc054a42727", "size": "9", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/components/Header/Readme.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "160617" }, { "name": "Dockerfile", "bytes": "1251" }, { "name": "JavaScript", "bytes": "119697" }, { "name": "Shell", "bytes": "5553" }, { "name": "TypeScript", "bytes": "1635763" } ], "symlink_target": "" }
Clazz.declarePackage ("JU"); Clazz.load (["java.io.OutputStream", "javajs.api.GenericOutputChannel"], "JU.OC", ["java.io.BufferedWriter", "$.ByteArrayOutputStream", "$.OutputStreamWriter", "JU.Base64", "$.SB"], function () { c$ = Clazz.decorateAsClass (function () { this.bytePoster = null; this.fileName = null; this.bw = null; this.isLocalFile = false; this.byteCount = 0; this.isCanceled = false; this.closed = false; this.os = null; this.sb = null; this.type = null; this.$isBase64 = false; this.os0 = null; this.bytes = null; this.bigEndian = true; Clazz.instantialize (this, arguments); }, JU, "OC", java.io.OutputStream, javajs.api.GenericOutputChannel); Clazz.overrideMethod (c$, "isBigEndian", function () { return this.bigEndian; }); Clazz.defineMethod (c$, "setBigEndian", function (TF) { this.bigEndian = TF; }, "~B"); Clazz.defineMethod (c$, "setParams", function (bytePoster, fileName, asWriter, os) { this.bytePoster = bytePoster; this.fileName = fileName; this.$isBase64 = ";base64,".equals (fileName); if (this.$isBase64) { fileName = null; this.os0 = os; os = null; }this.os = os; this.isLocalFile = (fileName != null && !JU.OC.isRemote (fileName)); if (asWriter && !this.$isBase64 && os != null) this.bw = new java.io.BufferedWriter ( new java.io.OutputStreamWriter (os)); return this; }, "javajs.api.BytePoster,~S,~B,java.io.OutputStream"); Clazz.defineMethod (c$, "setBytes", function (b) { this.bytes = b; return this; }, "~A"); Clazz.defineMethod (c$, "getFileName", function () { return this.fileName; }); Clazz.defineMethod (c$, "getName", function () { return (this.fileName == null ? null : this.fileName.substring (this.fileName.lastIndexOf ("/") + 1)); }); Clazz.defineMethod (c$, "getByteCount", function () { return this.byteCount; }); Clazz.defineMethod (c$, "setType", function (type) { this.type = type; }, "~S"); Clazz.defineMethod (c$, "getType", function () { return this.type; }); Clazz.defineMethod (c$, "append", function (s) { try { if (this.bw != null) { this.bw.write (s); } else if (this.os == null) { if (this.sb == null) this.sb = new JU.SB (); this.sb.append (s); } else { var b = s.getBytes (); this.os.write (b, 0, b.length); this.byteCount += b.length; return this; }} catch (e) { if (Clazz.exceptionOf (e, java.io.IOException)) { } else { throw e; } } this.byteCount += s.length; return this; }, "~S"); Clazz.overrideMethod (c$, "reset", function () { this.sb = null; this.initOS (); }); Clazz.defineMethod (c$, "initOS", function () { if (this.sb != null) { var s = this.sb.toString (); this.reset (); this.append (s); return; }try { { this.os = null; }if (this.os == null) this.os = new java.io.ByteArrayOutputStream (); if (this.bw != null) { this.bw.close (); this.bw = new java.io.BufferedWriter ( new java.io.OutputStreamWriter (this.os)); }} catch (e) { if (Clazz.exceptionOf (e, Exception)) { System.out.println (e.toString ()); } else { throw e; } } this.byteCount = 0; }); Clazz.overrideMethod (c$, "write", function (buf, i, len) { if (this.os == null) this.initOS (); try { this.os.write (buf, i, len); } catch (e) { if (Clazz.exceptionOf (e, java.io.IOException)) { } else { throw e; } } this.byteCount += len; }, "~A,~N,~N"); Clazz.overrideMethod (c$, "writeByteAsInt", function (b) { if (this.os == null) this.initOS (); { this.os.writeByteAsInt(b); }this.byteCount++; }, "~N"); Clazz.defineMethod (c$, "cancel", function () { this.isCanceled = true; this.closeChannel (); }); Clazz.overrideMethod (c$, "closeChannel", function () { if (this.closed) return null; try { if (this.bw != null) { this.bw.flush (); this.bw.close (); } else if (this.os != null) { this.os.flush (); this.os.close (); }if (this.os0 != null && this.isCanceled) { this.os0.flush (); this.os0.close (); }} catch (e) { if (Clazz.exceptionOf (e, Exception)) { } else { throw e; } } if (this.isCanceled) { this.closed = true; return null; }if (this.fileName == null) { if (this.$isBase64) { var s = this.getBase64 (); if (this.os0 != null) { this.os = this.os0; this.append (s); }this.sb = new JU.SB (); this.sb.append (s); this.$isBase64 = false; return this.closeChannel (); }return (this.sb == null ? null : this.sb.toString ()); }this.closed = true; var jmol = null; var _function = null; { jmol = self.J2S || Jmol; _function = (typeof this.fileName == "function" ? this.fileName : null); }if (jmol != null) { var data = (this.sb == null ? this.toByteArray () : this.sb.toString ()); if (_function == null) jmol._doAjax (this.fileName, null, data); else jmol._apply (this.fileName, data); }return null; }); Clazz.defineMethod (c$, "isBase64", function () { return this.$isBase64; }); Clazz.defineMethod (c$, "getBase64", function () { return JU.Base64.getBase64 (this.toByteArray ()).toString (); }); Clazz.defineMethod (c$, "toByteArray", function () { return (this.bytes != null ? this.bytes : Clazz.instanceOf (this.os, java.io.ByteArrayOutputStream) ? (this.os).toByteArray () : null); }); Clazz.defineMethod (c$, "close", function () { this.closeChannel (); }); Clazz.overrideMethod (c$, "toString", function () { if (this.bw != null) try { this.bw.flush (); } catch (e) { if (Clazz.exceptionOf (e, java.io.IOException)) { } else { throw e; } } if (this.sb != null) return this.closeChannel (); return this.byteCount + " bytes"; }); Clazz.defineMethod (c$, "postByteArray", function () { var bytes = (this.sb == null ? this.toByteArray () : this.sb.toString ().getBytes ()); return this.bytePoster.postByteArray (this.fileName, bytes); }); c$.isRemote = Clazz.defineMethod (c$, "isRemote", function (fileName) { if (fileName == null) return false; var itype = JU.OC.urlTypeIndex (fileName); return (itype >= 0 && itype != 4); }, "~S"); c$.isLocal = Clazz.defineMethod (c$, "isLocal", function (fileName) { if (fileName == null) return false; var itype = JU.OC.urlTypeIndex (fileName); return (itype < 0 || itype == 4); }, "~S"); c$.urlTypeIndex = Clazz.defineMethod (c$, "urlTypeIndex", function (name) { if (name == null) return -2; for (var i = 0; i < JU.OC.urlPrefixes.length; ++i) { if (name.startsWith (JU.OC.urlPrefixes[i])) { return i; }} return -1; }, "~S"); Clazz.defineStatics (c$, "urlPrefixes", Clazz.newArray (-1, ["http:", "https:", "sftp:", "ftp:", "file:"]), "URL_LOCAL", 4); });
{ "content_hash": "0b64f64c01c567cfbf134345fea32f7c", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 195, "avg_line_length": 26.612244897959183, "alnum_prop": 0.6263803680981596, "repo_name": "ezekutor/jsdelivr", "id": "1f46d9f71be75c16ef1225612a660703191b147c", "size": "6520", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "files/jsmol/14.6.2/j2s/JU/OC.js", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
require 'active_support/core_ext/array/conversions' require 'active_support/core_ext/string/inflections' require 'active_support/core_ext/object/deep_dup' require 'active_support/core_ext/string/filters' module ActiveModel # == Active \Model \Errors # # Provides a modified +Hash+ that you can include in your object # for handling error messages and interacting with Action View helpers. # # A minimal implementation could be: # # class Person # # Required dependency for ActiveModel::Errors # extend ActiveModel::Naming # # def initialize # @errors = ActiveModel::Errors.new(self) # end # # attr_accessor :name # attr_reader :errors # # def validate! # errors.add(:name, :blank, message: "cannot be nil") if name.nil? # end # # # The following methods are needed to be minimally implemented # # def read_attribute_for_validation(attr) # send(attr) # end # # def self.human_attribute_name(attr, options = {}) # attr # end # # def self.lookup_ancestors # [self] # end # end # # The last three methods are required in your object for +Errors+ to be # able to generate error messages correctly and also handle multiple # languages. Of course, if you extend your object with <tt>ActiveModel::Translation</tt> # you will not need to implement the last two. Likewise, using # <tt>ActiveModel::Validations</tt> will handle the validation related methods # for you. # # The above allows you to do: # # person = Person.new # person.validate! # => ["cannot be nil"] # person.errors.full_messages # => ["name cannot be nil"] # # etc.. class Errors include Enumerable CALLBACKS_OPTIONS = [:if, :unless, :on, :allow_nil, :allow_blank, :strict] MESSAGE_OPTIONS = [:message] attr_reader :messages, :details # Pass in the instance of the object that is using the errors object. # # class Person # def initialize # @errors = ActiveModel::Errors.new(self) # end # end def initialize(base) @base = base @messages = apply_default_array({}) @details = apply_default_array({}) end def initialize_dup(other) # :nodoc: @messages = other.messages.dup @details = other.details.deep_dup super end # Copies the errors from <tt>other</tt>. # # other - The ActiveModel::Errors instance. # # Examples # # person.errors.copy!(other) def copy!(other) # :nodoc: @messages = other.messages.dup @details = other.details.dup end # Clear the error messages. # # person.errors.full_messages # => ["name cannot be nil"] # person.errors.clear # person.errors.full_messages # => [] def clear messages.clear details.clear end # Returns +true+ if the error messages include an error for the given key # +attribute+, +false+ otherwise. # # person.errors.messages # => {:name=>["cannot be nil"]} # person.errors.include?(:name) # => true # person.errors.include?(:age) # => false def include?(attribute) messages.key?(attribute) && messages[attribute].present? end alias :has_key? :include? alias :key? :include? # Get messages for +key+. # # person.errors.messages # => {:name=>["cannot be nil"]} # person.errors.get(:name) # => ["cannot be nil"] # person.errors.get(:age) # => [] def get(key) ActiveSupport::Deprecation.warn(<<-MESSAGE.squish) ActiveModel::Errors#get is deprecated and will be removed in Rails 5.1. To achieve the same use model.errors[:#{key}]. MESSAGE messages[key] end # Set messages for +key+ to +value+. # # person.errors[:name] # => ["cannot be nil"] # person.errors.set(:name, ["can't be nil"]) # person.errors[:name] # => ["can't be nil"] def set(key, value) ActiveSupport::Deprecation.warn(<<-MESSAGE.squish) ActiveModel::Errors#set is deprecated and will be removed in Rails 5.1. Use model.errors.add(:#{key}, #{value.inspect}) instead. MESSAGE messages[key] = value end # Delete messages for +key+. Returns the deleted messages. # # person.errors[:name] # => ["cannot be nil"] # person.errors.delete(:name) # => ["cannot be nil"] # person.errors[:name] # => [] def delete(key) details.delete(key) messages.delete(key) end # When passed a symbol or a name of a method, returns an array of errors # for the method. # # person.errors[:name] # => ["cannot be nil"] # person.errors['name'] # => ["cannot be nil"] # # Note that, if you try to get errors of an attribute which has # no errors associated with it, this method will instantiate # an empty error list for it and +keys+ will return an array # of error keys which includes this attribute. # # person.errors.keys # => [] # person.errors[:name] # => [] # person.errors.keys # => [:name] def [](attribute) messages[attribute.to_sym] end # Adds to the supplied attribute the supplied error message. # # person.errors[:name] = "must be set" # person.errors[:name] # => ['must be set'] def []=(attribute, error) ActiveSupport::Deprecation.warn(<<-MESSAGE.squish) ActiveModel::Errors#[]= is deprecated and will be removed in Rails 5.1. Use model.errors.add(:#{attribute}, #{error.inspect}) instead. MESSAGE messages[attribute.to_sym] << error end # Iterates through each error key, value pair in the error messages hash. # Yields the attribute and the error for that attribute. If the attribute # has more than one error message, yields once for each error message. # # person.errors.add(:name, :blank, message: "can't be blank") # person.errors.each do |attribute, error| # # Will yield :name and "can't be blank" # end # # person.errors.add(:name, :not_specified, message: "must be specified") # person.errors.each do |attribute, error| # # Will yield :name and "can't be blank" # # then yield :name and "must be specified" # end def each messages.each_key do |attribute| messages[attribute].each { |error| yield attribute, error } end end # Returns the number of error messages. # # person.errors.add(:name, :blank, message: "can't be blank") # person.errors.size # => 1 # person.errors.add(:name, :not_specified, message: "must be specified") # person.errors.size # => 2 def size values.flatten.size end alias :count :size # Returns all message values. # # person.errors.messages # => {:name=>["cannot be nil", "must be specified"]} # person.errors.values # => [["cannot be nil", "must be specified"]] def values messages.values end # Returns all message keys. # # person.errors.messages # => {:name=>["cannot be nil", "must be specified"]} # person.errors.keys # => [:name] def keys messages.keys end # Returns +true+ if no errors are found, +false+ otherwise. # If the error message is a string it can be empty. # # person.errors.full_messages # => ["name cannot be nil"] # person.errors.empty? # => false def empty? size.zero? end alias :blank? :empty? # Returns an xml formatted representation of the Errors hash. # # person.errors.add(:name, :blank, message: "can't be blank") # person.errors.add(:name, :not_specified, message: "must be specified") # person.errors.to_xml # # => # # <?xml version=\"1.0\" encoding=\"UTF-8\"?> # # <errors> # # <error>name can't be blank</error> # # <error>name must be specified</error> # # </errors> def to_xml(options={}) to_a.to_xml({ root: "errors", skip_types: true }.merge!(options)) end # Returns a Hash that can be used as the JSON representation for this # object. You can pass the <tt>:full_messages</tt> option. This determines # if the json object should contain full messages or not (false by default). # # person.errors.as_json # => {:name=>["cannot be nil"]} # person.errors.as_json(full_messages: true) # => {:name=>["name cannot be nil"]} def as_json(options=nil) to_hash(options && options[:full_messages]) end # Returns a Hash of attributes with their error messages. If +full_messages+ # is +true+, it will contain full messages (see +full_message+). # # person.errors.to_hash # => {:name=>["cannot be nil"]} # person.errors.to_hash(true) # => {:name=>["name cannot be nil"]} def to_hash(full_messages = false) if full_messages self.messages.each_with_object({}) do |(attribute, array), messages| messages[attribute] = array.map { |message| full_message(attribute, message) } end else without_default_proc(self.messages) end end # Adds +message+ to the error messages and used validator type to +details+ on +attribute+. # More than one error can be added to the same +attribute+. # If no +message+ is supplied, <tt>:invalid</tt> is assumed. # # person.errors.add(:name) # # => ["is invalid"] # person.errors.add(:name, :not_implemented, message: "must be implemented") # # => ["is invalid", "must be implemented"] # # person.errors.messages # # => {:name=>["is invalid", "must be implemented"]} # # person.errors.details # # => {:name=>[{error: :not_implemented}, {error: :invalid}]} # # If +message+ is a symbol, it will be translated using the appropriate # scope (see +generate_message+). # # If +message+ is a proc, it will be called, allowing for things like # <tt>Time.now</tt> to be used within an error. # # If the <tt>:strict</tt> option is set to +true+, it will raise # ActiveModel::StrictValidationFailed instead of adding the error. # <tt>:strict</tt> option can also be set to any other exception. # # person.errors.add(:name, :invalid, strict: true) # # => ActiveModel::StrictValidationFailed: name is invalid # person.errors.add(:name, :invalid, strict: NameIsInvalid) # # => NameIsInvalid: name is invalid # # person.errors.messages # => {} # # +attribute+ should be set to <tt>:base</tt> if the error is not # directly associated with a single attribute. # # person.errors.add(:base, :name_or_email_blank, # message: "either name or email must be present") # person.errors.messages # # => {:base=>["either name or email must be present"]} # person.errors.details # # => {:base=>[{error: :name_or_email_blank}]} def add(attribute, message = :invalid, options = {}) message = message.call if message.respond_to?(:call) detail = normalize_detail(message, options) message = normalize_message(attribute, message, options) if exception = options[:strict] exception = ActiveModel::StrictValidationFailed if exception == true raise exception, full_message(attribute, message) end details[attribute.to_sym] << detail messages[attribute.to_sym] << message end # Will add an error message to each of the attributes in +attributes+ # that is empty. # # person.errors.add_on_empty(:name) # person.errors.messages # # => {:name=>["can't be empty"]} def add_on_empty(attributes, options = {}) ActiveSupport::Deprecation.warn(<<-MESSAGE.squish) ActiveModel::Errors#add_on_empty is deprecated and will be removed in Rails 5.1. To achieve the same use: errors.add(attribute, :empty, options) if value.nil? || value.empty? MESSAGE Array(attributes).each do |attribute| value = @base.send(:read_attribute_for_validation, attribute) is_empty = value.respond_to?(:empty?) ? value.empty? : false add(attribute, :empty, options) if value.nil? || is_empty end end # Will add an error message to each of the attributes in +attributes+ that # is blank (using Object#blank?). # # person.errors.add_on_blank(:name) # person.errors.messages # # => {:name=>["can't be blank"]} def add_on_blank(attributes, options = {}) ActiveSupport::Deprecation.warn(<<-MESSAGE.squish) ActiveModel::Errors#add_on_blank is deprecated and will be removed in Rails 5.1. To achieve the same use: errors.add(attribute, :blank, options) if value.blank? MESSAGE Array(attributes).each do |attribute| value = @base.send(:read_attribute_for_validation, attribute) add(attribute, :blank, options) if value.blank? end end # Returns +true+ if an error on the attribute with the given message is # present, or +false+ otherwise. +message+ is treated the same as for +add+. # # person.errors.add :name, :blank # person.errors.added? :name, :blank # => true # person.errors.added? :name, "can't be blank" # => true # # If the error message requires an option, then it returns +true+ with # the correct option, or +false+ with an incorrect or missing option. # # person.errors.add :name, :too_long, { count: 25 } # person.errors.added? :name, :too_long, count: 25 # => true # person.errors.added? :name, "is too long (maximum is 25 characters)" # => true # person.errors.added? :name, :too_long, count: 24 # => false # person.errors.added? :name, :too_long # => false # person.errors.added? :name, "is too long" # => false def added?(attribute, message = :invalid, options = {}) message = message.call if message.respond_to?(:call) message = normalize_message(attribute, message, options) self[attribute].include? message end # Returns all the full error messages in an array. # # class Person # validates_presence_of :name, :address, :email # validates_length_of :name, in: 5..30 # end # # person = Person.create(address: '123 First St.') # person.errors.full_messages # # => ["Name is too short (minimum is 5 characters)", "Name can't be blank", "Email can't be blank"] def full_messages map { |attribute, message| full_message(attribute, message) } end alias :to_a :full_messages # Returns all the full error messages for a given attribute in an array. # # class Person # validates_presence_of :name, :email # validates_length_of :name, in: 5..30 # end # # person = Person.create() # person.errors.full_messages_for(:name) # # => ["Name is too short (minimum is 5 characters)", "Name can't be blank"] def full_messages_for(attribute) messages[attribute].map { |message| full_message(attribute, message) } end # Returns a full message for a given attribute. # # person.errors.full_message(:name, 'is invalid') # => "Name is invalid" def full_message(attribute, message) return message if attribute == :base attr_name = attribute.to_s.tr('.', '_').humanize attr_name = @base.class.human_attribute_name(attribute, default: attr_name) I18n.t(:"errors.format", { default: "%{attribute} %{message}", attribute: attr_name, message: message }) end # Translates an error message in its default scope # (<tt>activemodel.errors.messages</tt>). # # Error messages are first looked up in <tt>activemodel.errors.models.MODEL.attributes.ATTRIBUTE.MESSAGE</tt>, # if it's not there, it's looked up in <tt>activemodel.errors.models.MODEL.MESSAGE</tt> and if # that is not there also, it returns the translation of the default message # (e.g. <tt>activemodel.errors.messages.MESSAGE</tt>). The translated model # name, translated attribute name and the value are available for # interpolation. # # When using inheritance in your models, it will check all the inherited # models too, but only if the model itself hasn't been found. Say you have # <tt>class Admin < User; end</tt> and you wanted the translation for # the <tt>:blank</tt> error message for the <tt>title</tt> attribute, # it looks for these translations: # # * <tt>activemodel.errors.models.admin.attributes.title.blank</tt> # * <tt>activemodel.errors.models.admin.blank</tt> # * <tt>activemodel.errors.models.user.attributes.title.blank</tt> # * <tt>activemodel.errors.models.user.blank</tt> # * any default you provided through the +options+ hash (in the <tt>activemodel.errors</tt> scope) # * <tt>activemodel.errors.messages.blank</tt> # * <tt>errors.attributes.title.blank</tt> # * <tt>errors.messages.blank</tt> def generate_message(attribute, type = :invalid, options = {}) type = options.delete(:message) if options[:message].is_a?(Symbol) if @base.class.respond_to?(:i18n_scope) defaults = @base.class.lookup_ancestors.map do |klass| [ :"#{@base.class.i18n_scope}.errors.models.#{klass.model_name.i18n_key}.attributes.#{attribute}.#{type}", :"#{@base.class.i18n_scope}.errors.models.#{klass.model_name.i18n_key}.#{type}" ] end else defaults = [] end defaults << :"#{@base.class.i18n_scope}.errors.messages.#{type}" if @base.class.respond_to?(:i18n_scope) defaults << :"errors.attributes.#{attribute}.#{type}" defaults << :"errors.messages.#{type}" defaults.compact! defaults.flatten! key = defaults.shift defaults = options.delete(:message) if options[:message] value = (attribute != :base ? @base.send(:read_attribute_for_validation, attribute) : nil) options = { default: defaults, model: @base.model_name.human, attribute: @base.class.human_attribute_name(attribute), value: value, object: @base }.merge!(options) I18n.translate(key, options) end def marshal_dump # :nodoc: [@base, without_default_proc(@messages), without_default_proc(@details)] end def marshal_load(array) # :nodoc: @base, @messages, @details = array apply_default_array(@messages) apply_default_array(@details) end def init_with(coder) # :nodoc: coder.map.each { |k, v| instance_variable_set(:"@#{k}", v) } @details ||= {} apply_default_array(@messages) apply_default_array(@details) end private def normalize_message(attribute, message, options) case message when Symbol generate_message(attribute, message, options.except(*CALLBACKS_OPTIONS)) else message end end def normalize_detail(message, options) { error: message }.merge(options.except(*CALLBACKS_OPTIONS + MESSAGE_OPTIONS)) end def without_default_proc(hash) hash.dup.tap do |new_h| new_h.default_proc = nil end end def apply_default_array(hash) hash.default_proc = proc { |h, key| h[key] = [] } hash end end # Raised when a validation cannot be corrected by end users and are considered # exceptional. # # class Person # include ActiveModel::Validations # # attr_accessor :name # # validates_presence_of :name, strict: true # end # # person = Person.new # person.name = nil # person.valid? # # => ActiveModel::StrictValidationFailed: Name can't be blank class StrictValidationFailed < StandardError end # Raised when attribute values are out of range. class RangeError < ::RangeError end # Raised when unknown attributes are supplied via mass assignment. class UnknownAttributeError < NoMethodError attr_reader :record, :attribute def initialize(record, attribute) @record = record @attribute = attribute super("unknown attribute '#{attribute}' for #{@record.class}.") end end end
{ "content_hash": "a9f13b17fe86173b5d82689b345dfe8d", "timestamp": "", "source": "github", "line_count": 582, "max_line_length": 116, "avg_line_length": 35.2852233676976, "alnum_prop": 0.6152610050642774, "repo_name": "best-summer/project-dystopia", "id": "671dabb3a8b157e79455d6b4a411ab724a82a14d", "size": "20536", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "api-server/vendor/bundle/gems/activemodel-5.0.4/lib/active_model/errors.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "996" }, { "name": "HTML", "bytes": "4495" }, { "name": "JavaScript", "bytes": "18925" }, { "name": "Python", "bytes": "2598" }, { "name": "Ruby", "bytes": "38046" }, { "name": "Shell", "bytes": "140" } ], "symlink_target": "" }
package com.intel.analytics.bigdl.nn import org.scalatest.{FlatSpec, Matchers} import com.intel.analytics.bigdl.numeric.NumericFloat import com.intel.analytics.bigdl.tensor.{SparseTensor, Tensor} import com.intel.analytics.bigdl.utils.T import scala.util.Random class SparseLinearSpec extends FlatSpec with Matchers { "Sparse Linear" should "return the same result with Linear" in { val weight = Tensor.range(1, 8, 1).resize(2, 4) val bias = Tensor(2) val gradOutput = Tensor.range(1, 4, 1).resize(2, 2) val sl = SparseLinear(4, 2) val l = Linear(4, 2) l.weight.copy(weight) l.bias.copy(bias) sl.weight.copy(weight) sl.bias.copy(bias) val input = Tensor(2, 4) input.setValue(1, 1, 1f) input.setValue(2, 3, 3f) val sparseInput = Tensor.sparse(input) val out1 = sl.forward(sparseInput) sl.backward(sparseInput, gradOutput) val out2 = l.forward(input) l.backward(input, gradOutput) out1 should be (out2) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 2" in { val gradOutput = Tensor(2, 2).rand() val input = Tensor(2, 4).rand() val sl = SparseLinear(4, 2) val l = Linear(4, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val sparseInput = Tensor.sparse(input) val out1 = sl.forward(sparseInput) sl.backward(sparseInput, gradOutput) val out2 = l.forward(input) l.backward(input, gradOutput) out1 should be (out2) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 3" in { val gradOutput = Tensor(2, 2).rand() val input = Tensor(2, 4).rand() val sl = SparseLinear(4, 2, backwardStart = 1, backwardLength = 4) val l = Linear(4, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val sparseInput = Tensor.sparse(input) val out1 = sl.forward(sparseInput) val gradInput1 = sl.backward(sparseInput, gradOutput) val out2 = l.forward(input) val gradInput2 = l.backward(input, gradOutput) out1 should be (out2) gradInput1 should be (gradInput2) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 4" in { val gradOutput = Tensor(3, 2).rand() val input = Tensor(3, 4).rand() val sl = SparseLinear(4, 2, backwardStart = 1, backwardLength = 4) val l = Linear(4, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val sparseInput = Tensor.sparse(input) val out1 = sl.forward(sparseInput) val gradInput1 = sl.backward(sparseInput, gradOutput) val out2 = l.forward(input) val gradInput2 = l.backward(input, gradOutput) out1 should be (out2) gradInput1 should be (gradInput2) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 5" in { val gradOutput = Tensor(4, 2).rand() val input = Tensor(4, 10).apply1(_ => Random.nextInt(10) / 9 * Random.nextFloat()) val sl = SparseLinear(10, 2, backwardStart = 5, backwardLength = 5) val l = Linear(10, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val sparseInput = Tensor.sparse(input) val out1 = sl.forward(sparseInput) val gradInput1 = sl.backward(sparseInput, gradOutput) val out2 = l.forward(input) val gradInput2 = l.backward(input, gradOutput) out1 should be (out2) gradInput1 should be (gradInput2.narrow(2, 5, 5)) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 6" in { val gradOutput = Tensor(4, 2).rand() val input = Tensor(4, 3).apply1(_ => Random.nextInt(5) / 4 * Random.nextFloat()) val input2 = Tensor(4, 2).apply1(_ => Random.nextInt(2) * Random.nextFloat()) val sl = SparseLinear(5, 2, backwardStart = 1, backwardLength = 5) val sparseModel = Sequential().add(ParallelTable().add(Identity()).add(Identity())) .add(SparseJoinTable(2)) .add(sl) val l = Linear(5, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val denseInput = Tensor(4, 5) denseInput.narrow(2, 1, 3).copy(input) denseInput.narrow(2, 4, 2).copy(input2) val sparseInput = T(Tensor.sparse(input), Tensor.sparse(input2)) Tensor.sparse(denseInput) val out1 = sparseModel.forward(sparseInput).toTensor[Float] val gradInput1 = sparseModel.backward(sparseInput, gradOutput) val out2 = l.forward(denseInput) val gradInput2 = l.backward(denseInput, gradOutput) out1 shouldEqual out2 sl.gradInput should be (gradInput2) sl.getParameters()._2 should be (l.getParameters()._2) } "Sparse Linear" should "return the same result with Linear 7" in { val gradOutput = Tensor(4, 2).rand() val input = Tensor(4, 1023213).apply1(_ => Random.nextInt(100000) / 99999 * Random.nextFloat()) val input2 = Tensor(4, 50).apply1(_ => Random.nextInt(2) * Random.nextFloat()) val sl = SparseLinear(1023263, 2, backwardStart = 1, backwardLength = 1023263) val sj = SparseJoinTable(2) val sparseModel = Sequential().add(ParallelTable().add(Identity()).add(Identity())) .add(sj) .add(sl) val l = Linear(1023263, 2) l.weight.copy(sl.weight) l.bias.copy(sl.bias) val denseInput = Tensor(4, 1023263) denseInput.narrow(2, 1, 1023213).copy(input) denseInput.narrow(2, 1023214, 50).copy(input2) val sparseInput = T(Tensor.sparse(input), Tensor.sparse(input2)) val si = Tensor.sparse(denseInput) val aaa = sl.forward(si).toTensor[Float].clone() val out1 = sparseModel.forward(sparseInput).toTensor[Float] val gradInput1 = sparseModel.backward(sparseInput, gradOutput) // val out2 = l.forward(denseInput) val gradInput2 = l.backward(denseInput, gradOutput) aaa shouldEqual out2 sj.output shouldEqual si out1 shouldEqual out2 sl.gradInput should be (gradInput2) sl.getParameters()._2.equals(l.getParameters()._2) shouldEqual true } }
{ "content_hash": "b0407a3f28fb33d8f1170b47f7fdbcc3", "timestamp": "", "source": "github", "line_count": 160, "max_line_length": 99, "avg_line_length": 37.9125, "alnum_prop": 0.6750741839762612, "repo_name": "jenniew/BigDL", "id": "392d48bd7ba60f6f2f4247114e50fd1eda1c9ef2", "size": "6667", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/SparseLinearSpec.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "6829" }, { "name": "Lua", "bytes": "1904" }, { "name": "Python", "bytes": "654411" }, { "name": "RobotFramework", "bytes": "10720" }, { "name": "Scala", "bytes": "5697448" }, { "name": "Shell", "bytes": "50738" } ], "symlink_target": "" }
var GameObject = function (body, opts) { THREE.Object3D.call(this); opts = opts || {}; this.velocity = opts.velocity || new THREE.Vector3(); /** @var {THREE.Object3D} */ this.body = body; this.body.userData.entity = this; this.children.push(this.body); this.mouseOverBody = opts.mouseOverBody || body; if (this.mouseOverBody != this.body) { this.mouseOverBody.userData.entity = this; this.body.add(this.mouseOverBody); } // TODO: handle collision cases this.shouldCollide = false; this.body.position.copy(opts.position); this.body.rotation.order = "ZXY"; this.body.rotation.y = Math.PI / 2; this.body.rotation.z = opts.hasOwnProperty('angle') ? opts.angle : 0; // Whether this object has collided since the previous update this.isColliding = false; this.intersections = []; this.isMouseOver = false; }; GameObject.prototype = Object.create(THREE.Object3D.prototype); GameObject.prototype.update = function (dt) { this.isColliding = false; this.intersections.splice(0); this.body.position.add(this.velocity.clone().multiplyScalar(dt)); }; GameObject.prototype.handleCollision = function (intersection) { return Collider.CollisionBehavior.PASS; }; GameObject.createBox = function (width, height, position) { var body = new THREE.Mesh( new THREE.BoxGeometry(10, height, width), new THREE.MeshBasicMaterial( { visible: false } ) ); return new GameObject(body, { position: position }); };
{ "content_hash": "51f6263685769f08257364a752ad150c", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 123, "avg_line_length": 29.22, "alnum_prop": 0.7063655030800822, "repo_name": "zacharyliu/light-rays", "id": "ff6aecacd55c8fd986e2aadccca26729cc6d2af5", "size": "1461", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/gameObject.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2199" }, { "name": "HTML", "bytes": "3826" }, { "name": "JavaScript", "bytes": "90553" } ], "symlink_target": "" }
package com.pearson.statsagg.webui.api; import com.pearson.statsagg.database_objects.notifications.NotificationGroupsDao; import com.pearson.statsagg.utilities.StackTrace; import java.io.PrintWriter; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Prashant Kumar (prashant4nov) */ @WebServlet(name="API_NotificationGroupsList", urlPatterns={"/api/notification-groups-list"}) public class NotificationGroupsList extends HttpServlet { private static final Logger logger = LoggerFactory.getLogger(NotificationGroupsList.class.getName()); public static final String PAGE_NAME = "API_NotificationGroupsList"; /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return PAGE_NAME; } /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) { logger.debug("doGet"); try { JSONObject json = getNotificationGroupsList(request, new NotificationGroupsDao()); PrintWriter out = null; response.setContentType("application/json"); out = response.getWriter(); out.println(json); } catch (Exception e) { logger.error(e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e)); } } /** * Returns a json object containing a list of notification groups. * * @param request servlet request * @param NotificationGroupsDao notificationGroupsDao object * @return list of notification groups */ protected JSONObject getNotificationGroupsList(HttpServletRequest request, NotificationGroupsDao notificationGroupsDao) { logger.debug("getNotificationGroupsList"); JSONObject errorMsg = null; JSONObject notificationGroupsList = null; int pageNumber = 0, pageSize = 0; try { if (request.getParameter(Helper.pageNumber) != null) { pageNumber = Integer.parseInt(request.getParameter(Helper.pageNumber)); } if (request.getParameter(Helper.pageSize) != null) { pageSize = Integer.parseInt(request.getParameter(Helper.pageSize)); } notificationGroupsList = notificationGroupsDao.getNotificationGroups(pageNumber * pageSize, pageSize); } catch (Exception e) { logger.error(e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e)); errorMsg = new JSONObject(); errorMsg.put(Helper.error, Helper.errorMsg); } if (notificationGroupsList != null) return notificationGroupsList; else if (errorMsg != null) return errorMsg; else return null; } }
{ "content_hash": "5f05669a020a1b0c6e6d7b90e796b05f", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 125, "avg_line_length": 35.75824175824176, "alnum_prop": 0.6696373693915181, "repo_name": "karimgarza/StatsAgg", "id": "483f1b8135ec4de22b5faaf4691ace35ff4f93f7", "size": "3254", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/pearson/statsagg/webui/api/NotificationGroupsList.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "152985" }, { "name": "HTML", "bytes": "806480" }, { "name": "Java", "bytes": "1990496" }, { "name": "JavaScript", "bytes": "806120" } ], "symlink_target": "" }
#import <objc/runtime.h> #import "TiProxy.h" #import "TiHost.h" #import "KrollCallback.h" #import "KrollContext.h" #import "KrollBridge.h" #import "TiModule.h" #import "ListenerEntry.h" #import "TiComplexValue.h" #import "TiViewProxy.h" #import "TiBindingEvent.h" #include <libkern/OSAtomic.h> //Common exceptions to throw when the function call was improper NSString * const TiExceptionInvalidType = @"Invalid type passed to function"; NSString * const TiExceptionNotEnoughArguments = @"Invalid number of arguments to function"; NSString * const TiExceptionRangeError = @"Value passed to function exceeds allowed range"; NSString * const TiExceptionOSError = @"The iOS reported an error"; //Should be rare, but also useful if arguments are used improperly. NSString * const TiExceptionInternalInconsistency = @"Value was not the value expected"; //Rare exceptions to indicate a bug in the _siwriterpro code (Eg, method that a subclass should have implemented) NSString * const TiExceptionUnimplementedFunction = @"Subclass did not implement required method"; NSString * const TiExceptionMemoryFailure = @"Memory allocation failed"; SEL SetterForKrollProperty(NSString * key) { NSString *method = [NSString stringWithFormat:@"set%@%@_:", [[key substringToIndex:1] uppercaseString], [key substringFromIndex:1]]; return NSSelectorFromString(method); } SEL SetterWithObjectForKrollProperty(NSString * key) { NSString *method = [NSString stringWithFormat:@"set%@%@_:withObject:", [[key substringToIndex:1] uppercaseString], [key substringFromIndex:1]]; return NSSelectorFromString(method); } void DoProxyDelegateChangedValuesWithProxy(UIView<TiProxyDelegate> * target, NSString * key, id oldValue, id newValue, TiProxy * proxy) { // default implementation will simply invoke the setter property for this object // on the main UI thread // first check to see if the property is defined by a <key>:withObject: signature SEL sel = SetterWithObjectForKrollProperty(key); if ([target respondsToSelector:sel]) { id firstarg = newValue; id secondarg = [NSDictionary dictionary]; if ([firstarg isKindOfClass:[TiComplexValue class]]) { firstarg = [(TiComplexValue*)newValue value]; secondarg = [(TiComplexValue*)newValue properties]; } if ([NSThread isMainThread]) { [target performSelector:sel withObject:firstarg withObject:secondarg]; } else { if (![key hasPrefix:@"set"]) { key = [NSString stringWithFormat:@"set%@%@_", [[key substringToIndex:1] uppercaseString], [key substringFromIndex:1]]; } NSArray *arg = [NSArray arrayWithObjects:key,firstarg,secondarg,target,nil]; TiThreadPerformOnMainThread(^{[proxy _dispatchWithObjectOnUIThread:arg];}, YES); } return; } sel = SetterForKrollProperty(key); if ([target respondsToSelector:sel]) { TiThreadPerformOnMainThread(^{[target performSelector:sel withObject:newValue];}, YES); } } void DoProxyDispatchToSecondaryArg(UIView<TiProxyDelegate> * target, SEL sel, NSString *key, id newValue, TiProxy * proxy) { id firstarg = newValue; id secondarg = [NSDictionary dictionary]; if ([firstarg isKindOfClass:[TiComplexValue class]]) { firstarg = [(TiComplexValue*)newValue value]; secondarg = [(TiComplexValue*)newValue properties]; } if ([NSThread isMainThread]) { [target performSelector:sel withObject:firstarg withObject:secondarg]; } else { if (![key hasPrefix:@"set"]) { key = [NSString stringWithFormat:@"set%@%@_", [[key substringToIndex:1] uppercaseString], [key substringFromIndex:1]]; } NSArray *arg = [NSArray arrayWithObjects:key,firstarg,secondarg,target,nil]; TiThreadPerformOnMainThread(^{[proxy _dispatchWithObjectOnUIThread:arg];}, YES); } } void DoProxyDelegateReadKeyFromProxy(UIView<TiProxyDelegate> * target, NSString *key, TiProxy * proxy, NSNull * nullValue, BOOL useThisThread) { // use valueForUndefined since this should really come from dynprops // not against the real implementation id value = [proxy valueForUndefinedKey:key]; if (value == nil) { return; } if (value == nullValue) { value = nil; } SEL sel = SetterWithObjectForKrollProperty(key); if ([target respondsToSelector:sel]) { DoProxyDispatchToSecondaryArg(target,sel,key,value,proxy); return; } sel = SetterForKrollProperty(key); if (![target respondsToSelector:sel]) { return; } if (useThisThread) { [target performSelector:sel withObject:value]; } else { TiThreadPerformOnMainThread(^{[target performSelector:sel withObject:value];}, NO); } } void DoProxyDelegateReadValuesWithKeysFromProxy(UIView<TiProxyDelegate> * target, id<NSFastEnumeration> keys, TiProxy * proxy) { BOOL isMainThread = [NSThread isMainThread]; NSNull * nullObject = [NSNull null]; BOOL viewAttached = YES; NSArray * keySequence = [proxy keySequence]; // assume if we don't have a view that we can send on the // main thread to the proxy if ([target isKindOfClass:[TiViewProxy class]]) { viewAttached = [(TiViewProxy*)target viewAttached]; } BOOL useThisThread = isMainThread==YES || viewAttached==NO; for (NSString * thisKey in keySequence) { DoProxyDelegateReadKeyFromProxy(target, thisKey, proxy, nullObject, useThisThread); } for (NSString * thisKey in keys) { if ([keySequence containsObject:thisKey]) { continue; } DoProxyDelegateReadKeyFromProxy(target, thisKey, proxy, nullObject, useThisThread); } } typedef struct { Class class; SEL selector; } TiClassSelectorPair; void TiClassSelectorFunction(TiBindingRunLoop runloop, void * payload) { TiClassSelectorPair * pair = payload; [(Class)(pair->class) performSelector:(SEL)(pair->selector) withObject:runloop]; } @implementation TiProxy +(void)performSelectorDuringRunLoopStart:(SEL)selector { TiClassSelectorPair * pair = malloc(sizeof(TiClassSelectorPair)); pair->class = [self class]; pair->selector = selector; TiBindingRunLoopCallOnStart(TiClassSelectorFunction,pair); } @synthesize pageContext, executionContext; @synthesize modelDelegate; #pragma mark Private -(id)init { if (self = [super init]) { _bubbleParent = YES; #if PROXY_MEMORY_TRACK == 1 NSLog(@"[DEBUG] INIT: %@ (%d)",self,[self hash]); #endif pthread_rwlock_init(&listenerLock, NULL); pthread_rwlock_init(&dynpropsLock, NULL); } return self; } -(void)initializeProperty:(NSString*)name defaultValue:(id)value { pthread_rwlock_wrlock(&dynpropsLock); if (dynprops == nil) { dynprops = [[NSMutableDictionary alloc] init]; } if ([dynprops valueForKey:name] == nil) { [dynprops setValue:((value == nil) ? [NSNull null] : value) forKey:name]; } pthread_rwlock_unlock(&dynpropsLock); } +(BOOL)shouldRegisterOnInit { return YES; } -(id)_initWithPageContext:(id<TiEvaluator>)context { if (self = [self init]) { pageContext = (id)context; // do not retain executionContext = context; //To ensure there is an execution context during _configure. if([[self class] shouldRegisterOnInit]) // && ![NSThread isMainThread]) { [pageContext registerProxy:self]; // allow subclasses to configure themselves } [self _configure]; executionContext = nil; } return self; } -(void)setModelDelegate:(id <TiProxyDelegate>)md { // TODO; revisit modelDelegate/TiProxy typing issue if ((void*)modelDelegate != self) { RELEASE_TO_NIL(modelDelegate); } if ((void*)md != self) { modelDelegate = [md retain]; } else { modelDelegate = md; } } /* * Currently, Binding/unbinding bridges does nearly nothing but atomically * increment or decrement. In the future, error checking could be done, or * when unbinding from the pageContext, to clear it. This might also be a * replacement for contextShutdown and friends, as contextShutdown is * called ONLY when a proxy is still registered with a context as it * is shutting down. */ -(void)boundBridge:(id<TiEvaluator>)newBridge withKrollObject:(KrollObject *)newKrollObject { OSAtomicIncrement32(&bridgeCount); if (newBridge == pageContext) { pageKrollObject = newKrollObject; } } -(void)unboundBridge:(id<TiEvaluator>)oldBridge { if(OSAtomicDecrement32(&bridgeCount)<0) { DeveloperLog(@"[WARN] BridgeCount for %@ is now at %d",self,bridgeCount); } if(oldBridge == pageContext) { pageKrollObject = nil; } } -(void)contextWasShutdown:(id<TiEvaluator>)context { } -(void)contextShutdown:(id)sender { id<TiEvaluator> context = (id<TiEvaluator>)sender; [self contextWasShutdown:context]; if(pageContext == context){ //TODO: Should we really stay bug compatible with the old behavior? //I think we should instead have it that the proxy stays around until //it's no longer referenced by any contexts at all. [self _destroy]; pageContext = nil; pageKrollObject = nil; } } -(void)setExecutionContext:(id<TiEvaluator>)context { // the execution context is different than the page context // // the page context is the owning context that created (and thus owns) the proxy // // the execution context is the context which is executing against the context when // this proxy is being touched. since objects can be referenced from one context // in another, the execution context should be used to resolve certain things like // paths, etc. so that the proper context can be contextualized which is different // than the owning context (page context). // /* * In theory, if two contexts are both using the proxy at the same time, * bad things could happen since this value will be overwritten. * TODO: Investigate thread safety of this, or to moot it. */ executionContext = context; //don't retain } -(void)_initWithProperties:(NSDictionary*)properties { [self setValuesForKeysWithDictionary:properties]; } -(void)_initWithCallback:(KrollCallback*)callback { } -(void)_configure { // for subclasses } -(id)_initWithPageContext:(id<TiEvaluator>)context_ args:(NSArray*)args { if (self = [self _initWithPageContext:context_]) { // If we are being created with a page context, assume that this is also // the execution context during creation so that recursively-made // proxies have the same page context. executionContext = context_; id a = nil; int count = [args count]; if (count > 0 && [[args objectAtIndex:0] isKindOfClass:[NSDictionary class]]) { a = [args objectAtIndex:0]; } if (count > 1 && [[args objectAtIndex:1] isKindOfClass:[KrollCallback class]]) { [self _initWithCallback:[args objectAtIndex:1]]; } [self _initWithProperties:a]; executionContext = nil; } return self; } -(void)_destroy { if (destroyed) { return; } destroyed = YES; #if PROXY_MEMORY_TRACK == 1 NSLog(@"[DEBUG] DESTROY: %@ (%d)",self,[self hash]); #endif if ((bridgeCount == 1) && (pageKrollObject != nil) && (pageContext != nil)) { [pageContext unregisterProxy:self]; } else if (bridgeCount > 1) { NSArray * pageContexts = [KrollBridge krollBridgesUsingProxy:self]; for (id thisPageContext in pageContexts) { [thisPageContext unregisterProxy:self]; } } if (executionContext!=nil) { executionContext = nil; } // remove all listeners JS side proxy pthread_rwlock_wrlock(&listenerLock); RELEASE_TO_NIL(listeners); pthread_rwlock_unlock(&listenerLock); pthread_rwlock_wrlock(&dynpropsLock); RELEASE_TO_NIL(dynprops); pthread_rwlock_unlock(&dynpropsLock); RELEASE_TO_NIL(baseURL); RELEASE_TO_NIL(krollDescription); if ((void*)modelDelegate != self) { TiThreadReleaseOnMainThread(modelDelegate, YES); modelDelegate = nil; } pageContext=nil; pageKrollObject = nil; } -(BOOL)destroyed { return destroyed; } -(void)dealloc { #if PROXY_MEMORY_TRACK == 1 NSLog(@"[DEBUG] DEALLOC: %@ (%d)",self,[self hash]); #endif [self _destroy]; pthread_rwlock_destroy(&listenerLock); pthread_rwlock_destroy(&dynpropsLock); [super dealloc]; } -(TiHost*)_host { if (pageContext==nil && executionContext==nil) { return nil; } if (pageContext!=nil) { TiHost *h = [pageContext host]; if (h!=nil) { return h; } } if (executionContext!=nil) { return [executionContext host]; } return nil; } -(TiProxy*)currentWindow { return [[self pageContext] preloadForKey:@"currentWindow" name:@"UI"]; } -(NSURL*)_baseURL { if (baseURL==nil) { TiProxy *currentWindow = [self currentWindow]; if (currentWindow!=nil) { // cache it [self _setBaseURL:[currentWindow _baseURL]]; return baseURL; } return [[self _host] baseURL]; } return baseURL; } -(void)_setBaseURL:(NSURL*)url { if (url!=baseURL) { RELEASE_TO_NIL(baseURL); baseURL = [[url absoluteURL] retain]; } } -(void)setReproxying:(BOOL)yn { reproxying = yn; } -(BOOL)inReproxy { return reproxying; } -(BOOL)_hasListeners:(NSString*)type { pthread_rwlock_rdlock(&listenerLock); //If listeners is nil at this point, result is still false. BOOL result = [[listeners objectForKey:type] intValue]>0; pthread_rwlock_unlock(&listenerLock); return result; } -(void)_fireEventToListener:(NSString*)type withObject:(id)obj listener:(KrollCallback*)listener thisObject:(TiProxy*)thisObject_ { TiHost *host = [self _host]; NSMutableDictionary* eventObject = nil; if ([obj isKindOfClass:[NSDictionary class]]) { eventObject = [NSMutableDictionary dictionaryWithDictionary:obj]; } else { eventObject = [NSMutableDictionary dictionary]; } // common event properties for all events we fire.. IF they're undefined. if ([eventObject objectForKey:@"type"] == nil) { [eventObject setObject:type forKey:@"type"]; } if ([eventObject objectForKey:@"source"] == nil) { [eventObject setObject:self forKey:@"source"]; } KrollContext* context = [listener context]; if (context!=nil) { id<TiEvaluator> evaluator = (id<TiEvaluator>)context.delegate; [host fireEvent:listener withObject:eventObject remove:NO context:evaluator thisObject:thisObject_]; } } -(void)_listenerAdded:(NSString*)type count:(int)count { // for subclasses } -(void)_listenerRemoved:(NSString*)type count:(int)count { // for subclasses } -(TiProxy *)parentForBubbling { return nil; } // this method will allow a proxy to return a different object back // for itself when the proxy serialization occurs from native back // to the bridge layer - the default is to just return ourselves, however, // in some concrete implementations you really want to return a different // representation which this will allow. the resulting value should not be // retained -(id)_proxy:(TiProxyBridgeType)type { return self; } #pragma mark Public -(id<NSFastEnumeration>)allKeys { pthread_rwlock_rdlock(&dynpropsLock); id<NSFastEnumeration> keys = [dynprops allKeys]; pthread_rwlock_unlock(&dynpropsLock); return keys; } -(NSNumber*)bubbleParent { return NUMBOOL(_bubbleParent); } -(void)setBubbleParent:(id)arg { _bubbleParent = [TiUtils boolValue:arg def:YES]; } /* * In views where the order in which keys are applied matter (I'm looking at you, TableView), this should be * an array of which keys go first, and in what order. Otherwise, this is nil. */ -(NSArray *)keySequence { return nil; } -(KrollObject *)krollObjectForBridge:(KrollBridge *)bridge { if ((pageContext == bridge) && (pageKrollObject != NULL)) { return pageKrollObject; } if (bridgeCount == 0) { return nil; } if(![bridge usesProxy:self]) { DeveloperLog(@"[DEBUG] Proxy %@ may be missing its javascript representation.", self); } return [bridge krollObjectForProxy:self]; } -(KrollObject *)krollObjectForContext:(KrollContext *)context { if ([pageKrollObject context] == context) { return pageKrollObject; } if (bridgeCount == 0) { return nil; } KrollBridge * ourBridge = (KrollBridge *)[context delegate]; if(![ourBridge usesProxy:self]) { DeveloperLog(@"[DEBUG] Proxy %@ may be missing its javascript representation.", self); } return [ourBridge krollObjectForProxy:self]; } - (int) bindingRunLoopCount { return bridgeCount; } - (TiBindingRunLoop) primaryBindingRunLoop { if (pageKrollObject != nil) { return [pageContext krollContext]; } return nil; } - (NSArray *) bindingRunLoopArray { return [[KrollBridge krollBridgesUsingProxy:self] valueForKeyPath:@"krollContext"]; } -(BOOL)retainsJsObjectForKey:(NSString *)key { return YES; } -(void)rememberProxy:(TiProxy *)rememberedProxy { if (rememberedProxy == nil) { return; } if ((bridgeCount == 1) && (pageKrollObject != nil)) { if (rememberedProxy == self) { [pageKrollObject protectJsobject]; return; } [pageKrollObject noteKeylessKrollObject:[rememberedProxy krollObjectForBridge:(KrollBridge*)pageContext]]; return; } if (bridgeCount < 1) { DeveloperLog(@"[DEBUG] Proxy %@ is missing its javascript representation needed to remember %@.",self,rememberedProxy); return; } for (KrollBridge * thisBridge in [KrollBridge krollBridgesUsingProxy:self]) { if(rememberedProxy == self) { KrollObject * thisObject = [thisBridge krollObjectForProxy:self]; [thisObject protectJsobject]; continue; } if(![thisBridge usesProxy:rememberedProxy]) { continue; } [[thisBridge krollObjectForProxy:self] noteKeylessKrollObject:[thisBridge krollObjectForProxy:rememberedProxy]]; } } -(void)forgetProxy:(TiProxy *)forgottenProxy { if (forgottenProxy == nil) { return; } if ((bridgeCount == 1) && (pageKrollObject != nil)) { if (forgottenProxy == self) { [pageKrollObject unprotectJsobject]; return; } [pageKrollObject forgetKeylessKrollObject:[forgottenProxy krollObjectForBridge:(KrollBridge*)pageContext]]; return; } if (bridgeCount < 1) { //While this may be of concern and there used to be a //warning here, too many false alarms were raised during //multi-context cleanups. return; } for (KrollBridge * thisBridge in [KrollBridge krollBridgesUsingProxy:self]) { if(forgottenProxy == self) { KrollObject * thisObject = [thisBridge krollObjectForProxy:self]; [thisObject unprotectJsobject]; continue; } if(![thisBridge usesProxy:forgottenProxy]) { continue; } [[thisBridge krollObjectForProxy:self] forgetKeylessKrollObject:[thisBridge krollObjectForProxy:forgottenProxy]]; } } -(void)rememberSelf { [self rememberProxy:self]; } -(void)forgetSelf { [self forgetProxy:self]; } -(void)setCallback:(KrollCallback *)eventCallback forKey:(NSString *)key { BOOL isCallback = [eventCallback isKindOfClass:[KrollCallback class]]; //Also check against nil. if ((bridgeCount == 1) && (pageKrollObject != nil)) { if (!isCallback || ([eventCallback context] != [pageKrollObject context])) { [pageKrollObject forgetCallbackForKey:key]; } else { [pageKrollObject noteCallback:eventCallback forKey:key]; } return; } KrollBridge * blessedBridge = (KrollBridge*)[[eventCallback context] delegate]; NSArray * bridges = [KrollBridge krollBridgesUsingProxy:self]; for (KrollBridge * currentBridge in bridges) { KrollObject * currentKrollObject = [currentBridge krollObjectForProxy:self]; if(!isCallback || (blessedBridge != currentBridge)) { [currentKrollObject forgetCallbackForKey:key]; } else { [currentKrollObject noteCallback:eventCallback forKey:key]; } } } -(void)fireCallback:(NSString*)type withArg:(NSDictionary *)argDict withSource:(id)source { NSMutableDictionary* eventObject = [NSMutableDictionary dictionaryWithObjectsAndKeys:type,@"type",self,@"source",nil]; if ([argDict isKindOfClass:[NSDictionary class]]) { [eventObject addEntriesFromDictionary:argDict]; } if ((bridgeCount == 1) && (pageKrollObject != nil)) { [pageKrollObject invokeCallbackForKey:type withObject:eventObject thisObject:source]; return; } NSArray * bridges = [KrollBridge krollBridgesUsingProxy:self]; for (KrollBridge * currentBridge in bridges) { KrollObject * currentKrollObject = [currentBridge krollObjectForProxy:self]; [currentKrollObject invokeCallbackForKey:type withObject:eventObject thisObject:source]; } } -(void)addEventListener:(NSArray*)args { NSString *type = [args objectAtIndex:0]; id listener = [args objectAtIndex:1]; if (![listener isKindOfClass:[KrollWrapper class]] && ![listener isKindOfClass:[KrollCallback class]]) { ENSURE_TYPE(listener,KrollCallback); } KrollObject * ourObject = [self krollObjectForContext:([listener isKindOfClass:[KrollCallback class]] ? [(KrollCallback *)listener context] : [(KrollWrapper *)listener bridge].krollContext)]; [ourObject storeListener:listener forEvent:type]; //TODO: You know, we can probably nip this in the bud and do this at a lower level, //Or make this less onerous. int ourCallbackCount = 0; pthread_rwlock_wrlock(&listenerLock); ourCallbackCount = [[listeners objectForKey:type] intValue] + 1; if(listeners==nil){ listeners = [[NSMutableDictionary alloc] initWithCapacity:3]; } [listeners setObject:NUMINT(ourCallbackCount) forKey:type]; pthread_rwlock_unlock(&listenerLock); [self _listenerAdded:type count:ourCallbackCount]; } -(void)removeEventListener:(NSArray*)args { NSString *type = [args objectAtIndex:0]; KrollCallback* listener = [args objectAtIndex:1]; ENSURE_TYPE(listener,KrollCallback); KrollObject * ourObject = [self krollObjectForContext:[listener context]]; [ourObject removeListener:listener forEvent:type]; //TODO: You know, we can probably nip this in the bud and do this at a lower level, //Or make this less onerous. int ourCallbackCount = 0; pthread_rwlock_wrlock(&listenerLock); ourCallbackCount = [[listeners objectForKey:type] intValue] - 1; [listeners setObject:NUMINT(ourCallbackCount) forKey:type]; pthread_rwlock_unlock(&listenerLock); [self _listenerRemoved:type count:ourCallbackCount]; } -(BOOL)doesntOverrideFireEventWithSource { IMP proxySourceImp = [[TiProxy class] instanceMethodForSelector:@selector(fireEvent:withObject:withSource:propagate:)]; IMP subclassSourceImp = [self methodForSelector:@selector(fireEvent:withObject:withSource:propagate:)]; return proxySourceImp == subclassSourceImp; } -(void)fireEvent:(id)args { NSString *type = nil; NSDictionary * params = nil; if ([args isKindOfClass:[NSArray class]]) { type = [args objectAtIndex:0]; if ([args count] > 1) { params = [args objectAtIndex:1]; } if ([params isKindOfClass:[NSNull class]]) { DebugLog(@"[WARN]fireEvent of type %@ called with two parameters but second parameter is null. Ignoring. Check your code",type); params = nil; } } else if ([args isKindOfClass:[NSString class]]) { type = (NSString*)args; } id bubbleObject = [params objectForKey:@"bubbles"]; //TODO: Yes is the historical default. Is this the right thing to do, given the expense? BOOL bubble = [TiUtils boolValue:bubbleObject def:YES]; if((bubbleObject != nil) && ([params count]==1)){ params = nil; //No need to propagate when we already have this information } if ([self doesntOverrideFireEventWithSource]){ //TODO: Once the deprecated methods are removed, we can use the following line without checking to see if we'd shortcut. // For now, we're shortcutting to suppress false warnings. [self fireEvent:type withObject:params propagate:bubble reportSuccess:NO errorCode:0 message:nil]; return; } DebugLog(@"[WARN] The Objective-C class %@ has overridden -[fireEvent:withObject:withSource:propagate:].",[self class]); [self fireEvent:type withObject:params withSource:self propagate:bubble]; //In case of not debugging, we don't change behavior, just in case. } -(void)fireEvent:(NSString*)type withObject:(id)obj { if ([self doesntOverrideFireEventWithSource]){ //TODO: Once the deprecated methods are removed, we can use the following line without checking to see if we'd shortcut. // For now, we're shortcutting to suppress false warnings. [self fireEvent:type withObject:obj propagate:YES reportSuccess:NO errorCode:0 message:nil]; return; } DebugLog(@"[WARN] The Objective-C class %@ has overridden -[fireEvent:withObject:withSource:propagate:].",[self class]); [self fireEvent:type withObject:obj withSource:self propagate:YES]; //In case of not debugging, we don't change behavior, just in case. } -(void)fireEvent:(NSString*)type withObject:(id)obj withSource:(id)source { //The warning for this is in the propagate version. [self fireEvent:type withObject:obj withSource:source propagate:YES]; } -(void)fireEvent:(NSString*)type withObject:(id)obj propagate:(BOOL)yn { if ([self doesntOverrideFireEventWithSource]){ //TODO: Once the deprecated methods are removed, we can use the following line without checking to see if we'd shortcut. // For now, we're shortcutting to suppress false warnings. [self fireEvent:type withObject:obj propagate:yn reportSuccess:NO errorCode:0 message:nil]; return; } DebugLog(@"[WARN] The Objective-C class %@ has overridden -[fireEvent:withObject:withSource:propagate:].",[self class]); [self fireEvent:type withObject:obj withSource:self propagate:yn]; } -(void)fireEvent:(NSString*)type withObject:(id)obj withSource:(id)source propagate:(BOOL)propagate { DebugLog(@"[WARN] The methods -[fireEvent:withObject:withSource:] and [fireEvent:withObject:withSource:propagate:] are deprecated. Please use -[fireEvent:withObject:propagate:reportSuccess:errorCode:message:] instead."); if (self != source) { NSLog(@"[WARN] Source is not the same as self. (Perhaps this edge case is still valid?)"); } [self fireEvent:type withObject:obj withSource:source propagate:propagate reportSuccess:NO errorCode:0 message:nil]; } -(void)fireEvent:(NSString*)type withObject:(id)obj errorCode:(int)code message:(NSString*)message; { [self fireEvent:type withObject:obj propagate:YES reportSuccess:YES errorCode:code message:message]; } //What classes should actually use. -(void)fireEvent:(NSString*)type withObject:(id)obj propagate:(BOOL)propagate reportSuccess:(BOOL)report errorCode:(int)code message:(NSString*)message; { if (![self _hasListeners:type]) { return; } TiBindingEvent ourEvent; ourEvent = TiBindingEventCreateWithNSObjects(self, self, type, obj); if (report || (code != 0)) { TiBindingEventSetErrorCode(ourEvent, code); } if (message != nil) { TiBindingEventSetErrorMessageWithNSString(ourEvent, message); } TiBindingEventSetBubbles(ourEvent, propagate); TiBindingEventFire(ourEvent); } //Temporary method until source is removed, for our subclasses. -(void)fireEvent:(NSString*)type withObject:(id)obj withSource:(id)source propagate:(BOOL)propagate reportSuccess:(BOOL)report errorCode:(int)code message:(NSString*)message; { if (![self _hasListeners:type]) { return; } TiBindingEvent ourEvent; ourEvent = TiBindingEventCreateWithNSObjects(self, source, type, obj); if (report || (code != 0)) { TiBindingEventSetErrorCode(ourEvent, code); } if (message != nil) { TiBindingEventSetErrorMessageWithNSString(ourEvent, message); } TiBindingEventSetBubbles(ourEvent, propagate); TiBindingEventFire(ourEvent); } - (void)setValuesForKeysWithDictionary:(NSDictionary *)keyedValues { //It's possible that the 'setvalueforkey' has its own plans of what should be in the JS object, //so we should do this first as to not overwrite the subclass's setter. if ((bridgeCount == 1) && (pageKrollObject != nil)) { for (NSString * currentKey in keyedValues) { id currentValue = [keyedValues objectForKey:currentKey]; if([currentValue isKindOfClass:[TiProxy class]] && [pageContext usesProxy:currentValue]) { [pageKrollObject noteKrollObject:[currentValue krollObjectForBridge:(KrollBridge*)pageContext] forKey:currentKey]; } } } else { for (KrollBridge * currentBridge in [KrollBridge krollBridgesUsingProxy:self]) { KrollObject * currentKrollObject = [currentBridge krollObjectForProxy:self]; for (NSString * currentKey in keyedValues) { id currentValue = [keyedValues objectForKey:currentKey]; if([currentValue isKindOfClass:[TiProxy class]] && [currentBridge usesProxy:currentValue]) { [currentKrollObject noteKrollObject:[currentBridge krollObjectForProxy:currentValue] forKey:currentKey]; } } } } NSArray * keySequence = [self keySequence]; for (NSString * thisKey in keySequence) { id thisValue = [keyedValues objectForKey:thisKey]; if (thisValue == nil) //Dictionary doesn't have this key. Skip. { continue; } if (thisValue == [NSNull null]) { //When a null, we want to write a nil. thisValue = nil; } [self setValue:thisValue forKey:thisKey]; } for (NSString * thisKey in keyedValues) { // don't set if already set above if ([keySequence containsObject:thisKey]) continue; id thisValue = [keyedValues objectForKey:thisKey]; if (thisValue == nil) //Dictionary doesn't have this key. Skip. { continue; } if (thisValue == [NSNull null]) { //When a null, we want to write a nil. thisValue = nil; } [self setValue:thisValue forKey:thisKey]; } } DEFINE_EXCEPTIONS - (id) valueForUndefinedKey: (NSString *) key { if ([key isEqualToString:@"toString"] || [key isEqualToString:@"valueOf"]) { return [self description]; } if (dynprops != nil) { pthread_rwlock_rdlock(&dynpropsLock); // In some circumstances this result can be replaced at an inconvenient time, // releasing the returned value - so we retain/autorelease. id result = [[[dynprops objectForKey:key] retain] autorelease]; pthread_rwlock_unlock(&dynpropsLock); // if we have a stored value as complex, just unwrap // it and return the internal value if ([result isKindOfClass:[TiComplexValue class]]) { TiComplexValue *value = (TiComplexValue*)result; return [value value]; } return result; } //NOTE: we need to return nil here since in JS you can ask for properties //that don't exist and it should return undefined, not an exception return nil; } - (void)replaceValue:(id)value forKey:(NSString*)key notification:(BOOL)notify { if (destroyed) { return; } if([value isKindOfClass:[KrollCallback class]]){ [self setCallback:value forKey:key]; //As a wrapper, we hold onto a KrollWrapper tuple so that other contexts //may access the function. KrollWrapper * newValue = [[[KrollWrapper alloc] init] autorelease]; [newValue setBridge:(KrollBridge*)[[(KrollCallback*)value context] delegate]]; [newValue setJsobject:[(KrollCallback*)value function]]; [newValue protectJsobject]; value = newValue; } id current = nil; pthread_rwlock_wrlock(&dynpropsLock); if (dynprops!=nil) { // hold it for this invocation since set may cause it to be deleted current = [[[dynprops objectForKey:key] retain] autorelease]; if (current==[NSNull null]) { current = nil; } } else { dynprops = [[NSMutableDictionary alloc] init]; } // TODO: Clarify internal difference between nil/NSNull // (which represent different JS values, but possibly consistent internal behavior) id propvalue = (value == nil) ? [NSNull null] : value; BOOL newValue = (current != propvalue && ![current isEqual:propvalue]); // We need to stage this out; the problem at hand is that some values // we might store as properties (such as NSArray) use isEqual: as a // strict address/hash comparison. So the notification must always // occur, and it's up to the delegate to make sense of it (for now). if (newValue) { // Remember any proxies set on us so they don't get GC'd if ([propvalue isKindOfClass:[TiProxy class]]) { [self rememberProxy:propvalue]; } [dynprops setValue:propvalue forKey:key]; } pthread_rwlock_unlock(&dynpropsLock); if (self.modelDelegate!=nil && notify) { [[(NSObject*)self.modelDelegate retain] autorelease]; [self.modelDelegate propertyChanged:key oldValue:current newValue:propvalue proxy:self]; } // Forget any old proxies so that they get cleaned up if (newValue && [current isKindOfClass:[TiProxy class]]) { [self forgetProxy:current]; } } // TODO: Shouldn't we be forgetting proxies and unprotecting callbacks and such here? - (void) deleteKey:(NSString*)key { pthread_rwlock_wrlock(&dynpropsLock); if (dynprops!=nil) { [dynprops removeObjectForKey:key]; } pthread_rwlock_unlock(&dynpropsLock); } - (void) setValue:(id)value forUndefinedKey: (NSString *) key { [self replaceValue:value forKey:key notification:YES]; } -(void)applyProperties:(id)args { ENSURE_SINGLE_ARG(args, NSDictionary) [self setValuesForKeysWithDictionary:args]; } -(NSDictionary*)allProperties { pthread_rwlock_rdlock(&dynpropsLock); NSDictionary* props = [[dynprops copy] autorelease]; pthread_rwlock_unlock(&dynpropsLock); return props; } -(id)sanitizeURL:(id)value { if (value == [NSNull null]) { return nil; } if([value isKindOfClass:[NSString class]]) { NSURL * result = [TiUtils toURL:value proxy:self]; if (result != nil) { return result; } } return value; } #pragma mark Memory Management -(void)didReceiveMemoryWarning:(NSNotification*)notification { //FOR NOW, we're not dropping anything but we'll want to do before release //subclasses need to call super if overriden } #pragma mark Dispatching Helper //TODO: Now that we have TiThreadPerform, we should optimize this out. -(void)_dispatchWithObjectOnUIThread:(NSArray*)args { //NOTE: this is called by ENSURE_UI_THREAD_WITH_OBJ and will always be on UI thread when we get here id method = [args objectAtIndex:0]; id firstobj = [args count] > 1 ? [args objectAtIndex:1] : nil; id secondobj = [args count] > 2 ? [args objectAtIndex:2] : nil; id target = [args count] > 3 ? [args objectAtIndex:3] : self; if (firstobj == [NSNull null]) { firstobj = nil; } if (secondobj == [NSNull null]) { secondobj = nil; } SEL selector = NSSelectorFromString([NSString stringWithFormat:@"%@:withObject:",method]); [target performSelector:selector withObject:firstobj withObject:secondobj]; } #pragma mark Description for nice toString in JS -(id)toString:(id)args { if (krollDescription==nil) { // if we have a cached id, use it for our identifier id temp = [self valueForUndefinedKey:@"id"]; NSString *cn =nil; if (temp==nil||![temp isKindOfClass:[NSString class]]){ cn = NSStringFromClass([self class]); } else { cn = temp; } krollDescription = [[NSString stringWithFormat:@"[object %@]",[cn stringByReplacingOccurrencesOfString:@"Proxy" withString:@""]] retain]; } return krollDescription; } -(id)description { return [self toString:nil]; } -(id)toJSON { // this is called in the case you try and use JSON.stringify and an object is a proxy // since you can't serialize a proxy as JSON, just return null return [NSNull null]; } //For subclasses to override -(NSString*)apiName { DebugLog(@"[ERROR] Subclasses must override the apiName API endpoint."); return @"Ti.Proxy"; } + (id)createProxy:(NSString*)qualifiedName withProperties:(NSDictionary*)properties inContext:(id<TiEvaluator>)context { static dispatch_once_t onceToken; static CFMutableDictionaryRef classNameLookup; dispatch_once(&onceToken, ^{ classNameLookup = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, NULL); }); Class proxyClass = (Class)CFDictionaryGetValue(classNameLookup, qualifiedName); if (proxyClass == nil) { NSString *_siwriterpro = [NSString stringWithFormat:@"%@%s",@"Ti","tanium."]; if ([qualifiedName hasPrefix:_siwriterpro]) { qualifiedName = [qualifiedName stringByReplacingCharactersInRange:NSMakeRange(2, 6) withString:@""]; } NSString *className = [[qualifiedName stringByReplacingOccurrencesOfString:@"." withString:@""] stringByAppendingString:@"Proxy"]; proxyClass = NSClassFromString(className); if (proxyClass==nil) { DebugLog(@"[WARN] Attempted to load %@: Could not find class definition.", className); @throw [NSException exceptionWithName:@"org.siwriterpro.module" reason:[NSString stringWithFormat:@"Class not found: %@", qualifiedName] userInfo:nil]; } CFDictionarySetValue(classNameLookup, qualifiedName, proxyClass); } NSArray *args = properties != nil ? [NSArray arrayWithObject:properties] : nil; return [[[proxyClass alloc] _initWithPageContext:context args:args ] autorelease]; } @end
{ "content_hash": "2be03334b1bc83f73b9807b4bdc24179", "timestamp": "", "source": "github", "line_count": 1308, "max_line_length": 221, "avg_line_length": 27.93730886850153, "alnum_prop": 0.7189808986919162, "repo_name": "simonron/SiWriterPro1.5.6", "id": "f4a432f0dd069805ef735b2c3f896506fba1a21e", "size": "36864", "binary": false, "copies": "1", "ref": "refs/heads/newspeech", "path": "build/iphone/Classes/TiProxy.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "152494" }, { "name": "C++", "bytes": "51991" }, { "name": "CSS", "bytes": "7072" }, { "name": "D", "bytes": "913927" }, { "name": "JavaScript", "bytes": "184432" }, { "name": "Objective-C", "bytes": "3583340" }, { "name": "Objective-C++", "bytes": "18015" }, { "name": "Shell", "bytes": "1061" } ], "symlink_target": "" }
import os import sys try: import syslog except ImportError: syslog = None import platform import types from .log import DefaultFilter ENV_PREFIX = 'KAIRA_' _address_dict = { 'Windows': ('localhost', 514), 'Darwin': '/var/run/syslog', 'Linux': '/dev/log', 'FreeBSD': '/dev/log' } LOGGING = { 'version': 1, 'filters': { 'accessFilter': { '()': DefaultFilter, 'param': [0, 10, 20] }, 'errorFilter': { '()': DefaultFilter, 'param': [30, 40, 50] } }, 'formatters': { 'simple': { 'format': '%(asctime)s - (%(name)s)[%(levelname)s]: %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'access': { 'format': '%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: ' + '%(request)s %(message)s %(status)d %(byte)d', 'datefmt': '%Y-%m-%d %H:%M:%S' } }, 'handlers': { 'internal': { 'class': 'logging.StreamHandler', 'filters': ['accessFilter'], 'formatter': 'simple', 'stream': sys.stderr }, 'accessStream': { 'class': 'logging.StreamHandler', 'filters': ['accessFilter'], 'formatter': 'access', 'stream': sys.stderr }, 'errorStream': { 'class': 'logging.StreamHandler', 'filters': ['errorFilter'], 'formatter': 'simple', 'stream': sys.stderr }, }, 'loggers': { 'kaira': { 'level': 'DEBUG', 'handlers': ['internal', 'errorStream'] }, 'network': { 'level': 'DEBUG', 'handlers': ['accessStream', 'errorStream'] } } } if syslog: LOGGING['handlers']['accessSysLog'] = { 'class': 'logging.handlers.SysLogHandler', 'address': _address_dict.get(platform.system(), ('localhost', 514)), 'facility': syslog.LOG_DAEMON, 'filters': ['accessFilter'], 'formatter': 'access' } LOGGING['handlers']['errorSysLog'] = { 'class': 'logging.handlers.SysLogHandler', 'address': _address_dict.get(platform.system(), ('localhost', 514)), 'facility': syslog.LOG_DAEMON, 'filters': ['errorFilter'], 'formatter': 'simple' } # this happens when using container or systems without syslog # keep things in config would cause file not exists error _addr = LOGGING['handlers']['accessSysLog']['address'] if type(_addr) is str and not os.path.exists(_addr): LOGGING['handlers'].pop('accessSysLog') LOGGING['handlers'].pop('errorSysLog') class Config(dict): def __init__(self, defaults=None, load_env=True, keep_alive=True): super().__init__(defaults or {}) self.REQUEST_MAX_SIZE = 100000000 # 100 megabytes self.REQUEST_TIMEOUT = 60 # 60 seconds self.KEEP_ALIVE = keep_alive if load_env: self.load_environment_vars() def __getattr__(self, attr): try: return self[attr] except KeyError as ke: raise AttributeError("Config has no '{}'".format(ke.args[0])) def __setattr__(self, attr, value): self[attr] = value def from_envvar(self, variable_name): """Load a configuration from an environment variable pointing to a configuration file. :param variable_name: name of the environment variable :return: bool. ``True`` if able to load config, ``False`` otherwise. """ config_file = os.environ.get(variable_name) if not config_file: raise RuntimeError('The environment variable %r is not set and ' 'thus configuration could not be loaded.' % variable_name) return self.from_pyfile(config_file) def from_pyfile(self, filename): """Update the values in the config from a Python file. Only the uppercase variables in that module are stored in the config. :param filename: an absolute path to the config file """ module = types.ModuleType('config') module.__file__ = filename try: with open(filename) as config_file: exec(compile(config_file.read(), filename, 'exec'), module.__dict__) except IOError as e: e.strerror = 'Unable to load configuration file (%s)' % e.strerror raise self.from_object(module) return True def from_object(self, obj): """Update the values from the given object. Objects are usually either modules or classes. Just the uppercase variables in that object are stored in the config. Example usage:: from yourapplication import default_config app.config.from_object(default_config) You should not use this function to load the actual configuration but rather configuration defaults. The actual config should be loaded with :meth:`from_pyfile` and ideally from a location not within the package because the package might be installed system wide. :param obj: an object holding the configuration """ for key in dir(obj): if key.isupper(): self[key] = getattr(obj, key) def load_environment_vars(self): """ Looks for any SANIC_ prefixed environment variables and applies them to the configuration if present. """ for k, v in os.environ.items(): if k.startswith(ENV_PREFIX): _, config_key = k.split(ENV_PREFIX, 1) self[config_key] = v
{ "content_hash": "8769d19e15fbd2fda2543e37d592f8f7", "timestamp": "", "source": "github", "line_count": 178, "max_line_length": 78, "avg_line_length": 33.2752808988764, "alnum_prop": 0.5353705892284315, "repo_name": "mulonemartin/kaira", "id": "88757fad31017f85299d481559b8b3365584a5e9", "size": "5923", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kaira/config.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "118899" } ], "symlink_target": "" }
package esapi import ( "context" "net/http" "strconv" "strings" ) func newMLDeleteJobFunc(t Transport) MLDeleteJob { return func(job_id string, o ...func(*MLDeleteJobRequest)) (*Response, error) { var r = MLDeleteJobRequest{JobID: job_id} for _, f := range o { f(&r) } return r.Do(r.ctx, t) } } // ----- API Definition ------------------------------------------------------- // MLDeleteJob - Deletes an existing anomaly detection job. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html. // type MLDeleteJob func(job_id string, o ...func(*MLDeleteJobRequest)) (*Response, error) // MLDeleteJobRequest configures the ML Delete Job API request. // type MLDeleteJobRequest struct { JobID string Force *bool WaitForCompletion *bool Pretty bool Human bool ErrorTrace bool FilterPath []string Header http.Header ctx context.Context } // Do executes the request and returns response or error. // func (r MLDeleteJobRequest) Do(ctx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder params map[string]string ) method = "DELETE" path.Grow(1 + len("_ml") + 1 + len("anomaly_detectors") + 1 + len(r.JobID)) path.WriteString("/") path.WriteString("_ml") path.WriteString("/") path.WriteString("anomaly_detectors") path.WriteString("/") path.WriteString(r.JobID) params = make(map[string]string) if r.Force != nil { params["force"] = strconv.FormatBool(*r.Force) } if r.WaitForCompletion != nil { params["wait_for_completion"] = strconv.FormatBool(*r.WaitForCompletion) } if r.Pretty { params["pretty"] = "true" } if r.Human { params["human"] = "true" } if r.ErrorTrace { params["error_trace"] = "true" } if len(r.FilterPath) > 0 { params["filter_path"] = strings.Join(r.FilterPath, ",") } req, err := newRequest(method, path.String(), nil) if err != nil { return nil, err } if len(params) > 0 { q := req.URL.Query() for k, v := range params { q.Set(k, v) } req.URL.RawQuery = q.Encode() } if len(r.Header) > 0 { if len(req.Header) == 0 { req.Header = r.Header } else { for k, vv := range r.Header { for _, v := range vv { req.Header.Add(k, v) } } } } if ctx != nil { req = req.WithContext(ctx) } res, err := transport.Perform(req) if err != nil { return nil, err } response := Response{ StatusCode: res.StatusCode, Body: res.Body, Header: res.Header, } return &response, nil } // WithContext sets the request context. // func (f MLDeleteJob) WithContext(v context.Context) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.ctx = v } } // WithForce - true if the job should be forcefully deleted. // func (f MLDeleteJob) WithForce(v bool) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.Force = &v } } // WithWaitForCompletion - should this request wait until the operation has completed before returning. // func (f MLDeleteJob) WithWaitForCompletion(v bool) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.WaitForCompletion = &v } } // WithPretty makes the response body pretty-printed. // func (f MLDeleteJob) WithPretty() func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. // func (f MLDeleteJob) WithHuman() func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. // func (f MLDeleteJob) WithErrorTrace() func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. // func (f MLDeleteJob) WithFilterPath(v ...string) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. // func (f MLDeleteJob) WithHeader(h map[string]string) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { if r.Header == nil { r.Header = make(http.Header) } for k, v := range h { r.Header.Add(k, v) } } } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. // func (f MLDeleteJob) WithOpaqueID(s string) func(*MLDeleteJobRequest) { return func(r *MLDeleteJobRequest) { if r.Header == nil { r.Header = make(http.Header) } r.Header.Set("X-Opaque-Id", s) } }
{ "content_hash": "95706050d0bbf947310315663287716a", "timestamp": "", "source": "github", "line_count": 212, "max_line_length": 112, "avg_line_length": 21.40566037735849, "alnum_prop": 0.6674746584398413, "repo_name": "quan-xie/tuba", "id": "9edcb757b9feab3492fe3e47d1ede61d9e40a023", "size": "5398", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "vendor/github.com/elastic/go-elasticsearch/v8/esapi/api.xpack.ml.delete_job.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "35894" } ], "symlink_target": "" }
#include <linux/kernel.h> #include <linux/init.h> #include <linux/platform_device.h> #include <linux/leds.h> #include <linux/err.h> #include <linux/io.h> #include <linux/scx200_gpio.h> #include <linux/module.h> #define DRVNAME "wrap-led" #define WRAP_POWER_LED_GPIO 2 #define WRAP_ERROR_LED_GPIO 3 #define WRAP_EXTRA_LED_GPIO 18 static struct platform_device *pdev; static void wrap_power_led_set(struct led_classdev *led_cdev, enum led_brightness value) { if (value) scx200_gpio_set_low(WRAP_POWER_LED_GPIO); else scx200_gpio_set_high(WRAP_POWER_LED_GPIO); } static void wrap_error_led_set(struct led_classdev *led_cdev, enum led_brightness value) { if (value) scx200_gpio_set_low(WRAP_ERROR_LED_GPIO); else scx200_gpio_set_high(WRAP_ERROR_LED_GPIO); } static void wrap_extra_led_set(struct led_classdev *led_cdev, enum led_brightness value) { if (value) scx200_gpio_set_low(WRAP_EXTRA_LED_GPIO); else scx200_gpio_set_high(WRAP_EXTRA_LED_GPIO); } static struct led_classdev wrap_power_led = { .name = "wrap::power", .brightness_set = wrap_power_led_set, .default_trigger = "default-on", .flags = LED_CORE_SUSPENDRESUME, }; static struct led_classdev wrap_error_led = { .name = "wrap::error", .brightness_set = wrap_error_led_set, .flags = LED_CORE_SUSPENDRESUME, }; static struct led_classdev wrap_extra_led = { .name = "wrap::extra", .brightness_set = wrap_extra_led_set, .flags = LED_CORE_SUSPENDRESUME, }; static int wrap_led_probe(struct platform_device *pdev) { int ret; ret = devm_led_classdev_register(&pdev->dev, &wrap_power_led); if (ret < 0) return ret; ret = devm_led_classdev_register(&pdev->dev, &wrap_error_led); if (ret < 0) return ret; return devm_led_classdev_register(&pdev->dev, &wrap_extra_led); } static struct platform_driver wrap_led_driver = { .probe = wrap_led_probe, .driver = { .name = DRVNAME, }, }; static int __init wrap_led_init(void) { int ret; if (!scx200_gpio_present()) { ret = -ENODEV; goto out; } ret = platform_driver_register(&wrap_led_driver); if (ret < 0) goto out; pdev = platform_device_register_simple(DRVNAME, -1, NULL, 0); if (IS_ERR(pdev)) { ret = PTR_ERR(pdev); platform_driver_unregister(&wrap_led_driver); goto out; } out: return ret; } static void __exit wrap_led_exit(void) { platform_device_unregister(pdev); platform_driver_unregister(&wrap_led_driver); } module_init(wrap_led_init); module_exit(wrap_led_exit); MODULE_AUTHOR("Kristian Kielhofner <[email protected]>"); MODULE_DESCRIPTION("PCEngines WRAP LED driver"); MODULE_LICENSE("GPL");
{ "content_hash": "6ed173a81c379c1a223805136da6ecff", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 65, "avg_line_length": 21.252032520325205, "alnum_prop": 0.6924254016832441, "repo_name": "mikedlowis-prototypes/albase", "id": "473fb6b97ed4f5f5ca86710f9b0afbfbaefa6ff3", "size": "2936", "binary": false, "copies": "726", "ref": "refs/heads/master", "path": "source/kernel/drivers/leds/leds-wrap.c", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Assembly", "bytes": "10263145" }, { "name": "Awk", "bytes": "55187" }, { "name": "Batchfile", "bytes": "31438" }, { "name": "C", "bytes": "551654518" }, { "name": "C++", "bytes": "11818066" }, { "name": "CMake", "bytes": "122998" }, { "name": "Clojure", "bytes": "945" }, { "name": "DIGITAL Command Language", "bytes": "232099" }, { "name": "GDB", "bytes": "18113" }, { "name": "Gherkin", "bytes": "5110" }, { "name": "HTML", "bytes": "18291" }, { "name": "Lex", "bytes": "58937" }, { "name": "M4", "bytes": "561745" }, { "name": "Makefile", "bytes": "7082768" }, { "name": "Objective-C", "bytes": "634652" }, { "name": "POV-Ray SDL", "bytes": "546" }, { "name": "Perl", "bytes": "1229221" }, { "name": "Perl6", "bytes": "11648" }, { "name": "Python", "bytes": "316536" }, { "name": "Roff", "bytes": "4201130" }, { "name": "Shell", "bytes": "2436879" }, { "name": "SourcePawn", "bytes": "2711" }, { "name": "TeX", "bytes": "182745" }, { "name": "UnrealScript", "bytes": "12824" }, { "name": "Visual Basic", "bytes": "11568" }, { "name": "XS", "bytes": "1239" }, { "name": "Yacc", "bytes": "146537" } ], "symlink_target": "" }
package org.axonframework.integrationtests.eventhandling; import org.axonframework.common.transaction.NoTransactionManager; import org.axonframework.common.transaction.Transaction; import org.axonframework.common.transaction.TransactionManager; import org.axonframework.eventhandling.EventHandlerInvoker; import org.axonframework.eventhandling.EventMessage; import org.axonframework.eventhandling.EventMessageHandler; import org.axonframework.eventhandling.EventTrackerStatus; import org.axonframework.eventhandling.EventTrackerStatusChangeListener; import org.axonframework.eventhandling.GapAwareTrackingToken; import org.axonframework.eventhandling.GenericTrackedEventMessage; import org.axonframework.eventhandling.GlobalSequenceTrackingToken; import org.axonframework.eventhandling.MultiEventHandlerInvoker; import org.axonframework.eventhandling.PropagatingErrorHandler; import org.axonframework.eventhandling.ReplayToken; import org.axonframework.eventhandling.SimpleEventHandlerInvoker; import org.axonframework.eventhandling.TrackedEventMessage; import org.axonframework.eventhandling.TrackingEventProcessor; import org.axonframework.eventhandling.TrackingEventProcessorConfiguration; import org.axonframework.eventhandling.TrackingEventStream; import org.axonframework.eventhandling.TrackingToken; import org.axonframework.eventhandling.tokenstore.TokenStore; import org.axonframework.eventhandling.tokenstore.UnableToClaimTokenException; import org.axonframework.eventhandling.tokenstore.inmemory.InMemoryTokenStore; import org.axonframework.eventsourcing.eventstore.EmbeddedEventStore; import org.axonframework.eventsourcing.eventstore.inmemory.InMemoryEventStorageEngine; import org.axonframework.integrationtests.utils.MockException; import org.axonframework.messaging.StreamableMessageSource; import org.axonframework.messaging.unitofwork.CurrentUnitOfWork; import org.axonframework.serialization.SerializationException; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.*; import org.mockito.*; import org.springframework.test.annotation.DirtiesContext; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Queue; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.UnaryOperator; import static java.util.Arrays.asList; import static java.util.Collections.emptySortedSet; import static java.util.Collections.singleton; import static java.util.stream.Collectors.toList; import static org.axonframework.eventhandling.EventUtils.asTrackedEventMessage; import static org.axonframework.integrationtests.utils.AssertUtils.assertWithin; import static org.axonframework.integrationtests.utils.EventTestUtils.createEvent; import static org.axonframework.integrationtests.utils.EventTestUtils.createEvents; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; /** * Test class validating the {@link TrackingEventProcessor}. This test class is part of the {@code integrationtests} * module as it relies on both the {@code messaging} (where the {@code TrackingEventProcessor} resides) and {@code * eventsourcing} modules. * * @author Rene de Waele */ class TrackingEventProcessorTest { private static final Object NO_RESET_PAYLOAD = null; private TrackingEventProcessor testSubject; private EmbeddedEventStore eventBus; private TokenStore tokenStore; private EventHandlerInvoker eventHandlerInvoker; private EventMessageHandler mockHandler; private List<Long> sleepInstructions; private TransactionManager mockTransactionManager; private Transaction mockTransaction; private static TrackingEventStream trackingEventStreamOf(Iterator<TrackedEventMessage<?>> iterator) { return trackingEventStreamOf(iterator, c -> { }); } private static TrackingEventStream trackingEventStreamOf(Iterator<TrackedEventMessage<?>> iterator, Consumer<Class<?>> blacklistListener) { return new TrackingEventStream() { private boolean hasPeeked; private TrackedEventMessage<?> peekEvent; @Override public Optional<TrackedEventMessage<?>> peek() { if (!hasPeeked) { if (!hasNextAvailable()) { return Optional.empty(); } peekEvent = iterator.next(); hasPeeked = true; } return Optional.of(peekEvent); } @Override public boolean hasNextAvailable(int timeout, TimeUnit unit) { if (timeout > 0) { // To keep tests speedy, we don't wait, but we do give other threads a chance Thread.yield(); } return hasPeeked || iterator.hasNext(); } @Override public TrackedEventMessage<?> nextAvailable() { if (!hasPeeked) { return iterator.next(); } TrackedEventMessage<?> result = peekEvent; peekEvent = null; hasPeeked = false; return result; } @Override public void close() { } @Override public void blacklist(TrackedEventMessage<?> ignoredMessage) { blacklistListener.accept(ignoredMessage.getPayloadType()); } }; } @BeforeEach void setUp() { tokenStore = spy(new InMemoryTokenStore()); mockHandler = mock(EventMessageHandler.class); when(mockHandler.canHandle(any())).thenReturn(true); when(mockHandler.supportsReset()).thenReturn(true); eventHandlerInvoker = spy( SimpleEventHandlerInvoker.builder() .eventHandlers(mockHandler) .listenerInvocationErrorHandler(PropagatingErrorHandler.instance()) .build() ); mockTransaction = mock(Transaction.class); mockTransactionManager = mock(TransactionManager.class); when(mockTransactionManager.startTransaction()).thenReturn(mockTransaction); //noinspection unchecked when(mockTransactionManager.fetchInTransaction(any(Supplier.class))).thenAnswer(i -> { Supplier<?> s = i.getArgument(0); return s.get(); }); doAnswer(i -> { Runnable r = i.getArgument(0); r.run(); return null; }).when(mockTransactionManager).executeInTransaction(any(Runnable.class)); eventBus = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine()).build(); sleepInstructions = new CopyOnWriteArrayList<>(); initProcessor(TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andEventAvailabilityTimeout(100, TimeUnit.MILLISECONDS)); } private void initProcessor(TrackingEventProcessorConfiguration config) { initProcessor(config, UnaryOperator.identity()); } private void initProcessor(TrackingEventProcessorConfiguration config, UnaryOperator<TrackingEventProcessor.Builder> customization) { TrackingEventProcessor.Builder eventProcessorBuilder = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .trackingEventProcessorConfiguration(config) .tokenStore(tokenStore) .transactionManager(mockTransactionManager); testSubject = new TrackingEventProcessor(customization.apply(eventProcessorBuilder)) { @Override protected void doSleepFor(long millisToSleep) { if (isRunning()) { sleepInstructions.add(millisToSleep); Thread.yield(); } } }; } @AfterEach void tearDown() { testSubject.shutDown(); eventBus.shutDown(); } @Test void testPublishedEventsGetPassedToHandler() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(2); doAnswer(invocation -> { countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); eventBus.publish(createEvents(2)); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Handler to have received 2 published events"); } @Test void testBlacklist() throws Exception { when(mockHandler.canHandle(any())).thenReturn(false); when(mockHandler.canHandleType(String.class)).thenReturn(false); Set<Class<?>> blacklisted = new HashSet<>(); EmbeddedEventStore mockEventBus = mock(EmbeddedEventStore.class); TrackingToken trackingToken = new GlobalSequenceTrackingToken(0); List<TrackedEventMessage<?>> events = createEvents(2).stream().map(event -> asTrackedEventMessage(event, trackingToken)).collect(toList()); when(mockEventBus.openStream(null)).thenReturn(trackingEventStreamOf(events.iterator(), blacklisted::add)); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(mockEventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); testSubject.start(); Thread.sleep(200); assertEquals(1, blacklisted.size()); assertTrue(blacklisted.contains(String.class)); } @Test void testProcessorExposesErrorStateOnHandlerException() throws Exception { doReturn(Object.class).when(mockHandler).getTargetType(); AtomicBoolean errorFlag = new AtomicBoolean(true); doAnswer(invocation -> { if (errorFlag.get()) { throw new MockException("Simulating issues"); } return null; }).when(mockHandler).handle(any()); int segmentId = 0; testSubject.start(); eventBus.publish(createEvents(2)); assertWithin(2, TimeUnit.SECONDS, () -> { EventTrackerStatus status = testSubject.processingStatus().get(segmentId); assertNotNull(status); assertTrue(status.isErrorState()); assertEquals(MockException.class, status.getError().getClass()); }); errorFlag.set(false); assertWithin(5, TimeUnit.SECONDS, () -> { EventTrackerStatus status = testSubject.processingStatus().get(segmentId); assertNotNull(status); assertFalse(status.isErrorState()); assertNull(status.getError()); }); } @Test void testHandlerIsInvokedInTransactionScope() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(1); AtomicInteger counter = new AtomicInteger(); AtomicInteger counterAtHandle = new AtomicInteger(); when(mockTransactionManager.startTransaction()).thenAnswer(i -> { counter.incrementAndGet(); return mockTransaction; }); doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).rollback(); doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).commit(); doAnswer(invocation -> { counterAtHandle.set(counter.get()); countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); eventBus.publish(createEvents(2)); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Handler to have received 2 published events"); assertEquals(1, counterAtHandle.get()); } @Test void testProcessorStopsOnNonTransientExceptionWhenLoadingToken() { doThrow(new SerializationException("Faking a serialization issue")).when(tokenStore).fetchToken("test", 0); testSubject.start(); assertWithin( 1, TimeUnit.SECONDS, () -> assertFalse(testSubject.isRunning(), "Expected processor to have stopped") ); assertWithin( 1, TimeUnit.SECONDS, () -> assertTrue(testSubject.isError(), "Expected processor to set the error flag") ); assertEquals(Collections.emptyList(), sleepInstructions); } @Test void testProcessorRetriesOnTransientExceptionWhenLoadingToken() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(1); doAnswer(invocation -> { countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); doThrow(new RuntimeException("Faking a recoverable issue")) .doCallRealMethod() .when(tokenStore).fetchToken("test", 0); testSubject.start(); eventBus.publish(createEvent()); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Handler to have received published event"); assertTrue(testSubject.isRunning()); assertFalse(testSubject.isError()); assertEquals(Collections.singletonList(5000L), sleepInstructions); } @Test void testTokenIsStoredWhenEventIsRead() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(1); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); eventBus.publish(createEvent()); testSubject.start(); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase"); verify(tokenStore).storeToken(any(), eq(testSubject.getName()), eq(0)); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); } @Test void testTokenIsExtendedAtStartAndStoredAtEndOfEventBatch_WithStoringTokensAfterProcessingSetting() throws Exception { initProcessor( TrackingEventProcessorConfiguration.forSingleThreadedProcessing().andBatchSize(100), TrackingEventProcessor.Builder::storingTokensAfterProcessing ); CountDownLatch countDownLatch = new CountDownLatch(2); AtomicInteger invocationsInUnitOfWork = new AtomicInteger(); doAnswer(i -> { if (CurrentUnitOfWork.isStarted()) { invocationsInUnitOfWork.incrementAndGet(); } return i.callRealMethod(); }).when(tokenStore).extendClaim(anyString(), anyInt()); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); eventBus.publish(createEvents(2)); assertTrue( countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase for 2 messages" ); InOrder inOrder = inOrder(tokenStore); inOrder.verify(tokenStore, times(1)).extendClaim(eq(testSubject.getName()), anyInt()); inOrder.verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt()); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); assertEquals( 1, invocationsInUnitOfWork.get(), "Unexpected number of invocations of token extension in unit of work" ); } @Test void testTokenStoredAtEndOfEventBatchAndNotExtendedWhenUsingANoTransactionManager() throws Exception { TrackingEventProcessorConfiguration tepConfig = TrackingEventProcessorConfiguration.forSingleThreadedProcessing().andBatchSize(100); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .trackingEventProcessorConfiguration(tepConfig) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); CountDownLatch countDownLatch = new CountDownLatch(2); AtomicInteger invocationsInUnitOfWork = new AtomicInteger(); doAnswer(i -> { if (CurrentUnitOfWork.isStarted()) { invocationsInUnitOfWork.incrementAndGet(); } return i.callRealMethod(); }).when(tokenStore).extendClaim(anyString(), anyInt()); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); eventBus.publish(createEvents(2)); assertTrue( countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase for 2 messages" ); verify(tokenStore, times(1)).storeToken(any(), any(), anyInt()); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); assertEquals( 1, invocationsInUnitOfWork.get(), "Unexpected number of invocations of token extension in unit of work" ); } @Test void testTokenStoredAtEndOfEventBatchAndNotExtendedWhenTransactionManagerIsConfigured() throws Exception { TrackingEventProcessorConfiguration tepConfig = TrackingEventProcessorConfiguration.forSingleThreadedProcessing().andBatchSize(100); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .trackingEventProcessorConfiguration(tepConfig) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(() -> mock(Transaction.class)) .build(); CountDownLatch countDownLatch = new CountDownLatch(2); AtomicInteger invocationsInUnitOfWork = new AtomicInteger(); doAnswer(i -> { if (CurrentUnitOfWork.isStarted()) { invocationsInUnitOfWork.incrementAndGet(); fail("Did not expect an invocation in a Unit of Work"); } return i.callRealMethod(); }).when(tokenStore).extendClaim(anyString(), anyInt()); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); eventBus.publish(createEvents(2)); assertTrue( countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase for 2 messages" ); verify(tokenStore, times(1)).storeToken(any(), any(), anyInt()); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); assertEquals( 0, invocationsInUnitOfWork.get(), "Unexpected number of invocations of token extension in unit of work" ); } @Test void testTokenStoredAtEndOfEventBatchAndExtendedWhenTokenClaimIntervalExceeded() throws Exception { TrackingEventProcessorConfiguration tepConfig = TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andEventAvailabilityTimeout(10, TimeUnit.MILLISECONDS); testSubject = TrackingEventProcessor.builder() .name("test") .trackingEventProcessorConfiguration(tepConfig) .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); CountDownLatch countDownLatch = new CountDownLatch(2); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); Thread.sleep(50); return interceptorChain.proceed(); })); testSubject.start(); eventBus.publish(createEvents(2)); assertTrue( countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase for 2 messages" ); InOrder inOrder = inOrder(tokenStore); inOrder.verify(tokenStore, times(1)).storeToken(any(), any(), anyInt()); inOrder.verify(tokenStore, times(1)).extendClaim(eq(testSubject.getName()), anyInt()); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); } @Test void testTokenIsNotStoredWhenUnitOfWorkIsRolledBack() throws Exception { CountDownLatch countDownLatch = new CountDownLatch(1); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCommit(uow -> { throw new MockException(); }); return interceptorChain.proceed(); })); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); eventBus.publish(createEvent()); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase"); assertThat( tokenStore.fetchToken(testSubject.getName(), 0), CoreMatchers.anyOf(CoreMatchers.nullValue(), CoreMatchers.equalTo(eventBus.createTailToken())) ); } @Test void testContinueFromPreviousToken() throws Exception { tokenStore = new InMemoryTokenStore(); eventBus.publish(createEvents(10)); TrackedEventMessage<?> firstEvent = eventBus.openStream(null).nextAvailable(); tokenStore.storeToken(firstEvent.trackingToken(), testSubject.getName(), 0); assertEquals(firstEvent.trackingToken(), tokenStore.fetchToken(testSubject.getName(), 0)); List<EventMessage<?>> acknowledgedEvents = new CopyOnWriteArrayList<>(); CountDownLatch countDownLatch = new CountDownLatch(9); doAnswer(invocation -> { acknowledgedEvents.add((EventMessage<?>) invocation.getArguments()[0]); countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); testSubject.start(); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected 9 invocations on Event Handler by now"); assertEquals(9, acknowledgedEvents.size()); } @Test @Timeout(value = 10) @DirtiesContext void testContinueAfterPause() throws Exception { List<EventMessage<?>> acknowledgedEvents = new CopyOnWriteArrayList<>(); CountDownLatch countDownLatch = new CountDownLatch(2); doAnswer(invocation -> { acknowledgedEvents.add((EventMessage<?>) invocation.getArguments()[0]); countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); testSubject.start(); eventBus.publish(createEvents(2)); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected 2 invocations on Event Handler by now"); assertEquals(2, acknowledgedEvents.size()); testSubject.shutDown(); // The thread may block for 1 second waiting for a next event to pop up while (testSubject.activeProcessorThreads() > 0) { Thread.sleep(1); // Wait... } CountDownLatch countDownLatch2 = new CountDownLatch(2); doAnswer(invocation -> { acknowledgedEvents.add((EventMessage<?>) invocation.getArguments()[0]); countDownLatch2.countDown(); return null; }).when(mockHandler).handle(any()); eventBus.publish(createEvents(2)); assertEquals(2, countDownLatch2.getCount()); testSubject.start(); assertTrue(countDownLatch2.await(5, TimeUnit.SECONDS), "Expected 4 invocations on Event Handler by now"); assertEquals(4, acknowledgedEvents.size()); } @Test @DirtiesContext void testProcessorGoesToRetryModeWhenOpenStreamFails() throws Exception { eventBus = spy(eventBus); tokenStore = new InMemoryTokenStore(); eventBus.publish(createEvents(5)); when(eventBus.openStream(any())).thenThrow(new MockException()).thenCallRealMethod(); List<EventMessage<?>> acknowledgedEvents = new ArrayList<>(); CountDownLatch countDownLatch = new CountDownLatch(5); doAnswer(invocation -> { acknowledgedEvents.add((EventMessage<?>) invocation.getArguments()[0]); countDownLatch.countDown(); return null; }).when(mockHandler).handle(any()); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); assertTrue(countDownLatch.await(10, TimeUnit.SECONDS), "Expected 5 invocations on Event Handler by now"); assertEquals(5, acknowledgedEvents.size()); verify(eventBus, times(2)).openStream(any()); } @Test void testFirstTokenIsStoredWhenUnitOfWorkIsRolledBackOnSecondEvent() throws Exception { List<? extends EventMessage<?>> events = createEvents(2); CountDownLatch countDownLatch = new CountDownLatch(2); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCommit(uow -> { if (uow.getMessage().equals(events.get(1))) { throw new MockException(); } }); return interceptorChain.proceed(); })); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); eventBus.publish(events); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase"); verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt()); assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0)); } @Test @DirtiesContext void testEventsWithTheSameTokenAreProcessedInTheSameBatch() throws Exception { eventBus.shutDown(); eventBus = mock(EmbeddedEventStore.class); TrackingToken trackingToken = new GlobalSequenceTrackingToken(0); List<TrackedEventMessage<?>> events = createEvents(2).stream().map(event -> asTrackedEventMessage(event, trackingToken)).collect(toList()); when(eventBus.openStream(null)).thenReturn(trackingEventStreamOf(events.iterator())); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); //noinspection Duplicates testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCommit(uow -> { if (uow.getMessage().equals(events.get(1))) { throw new MockException(); } }); return interceptorChain.proceed(); })); CountDownLatch countDownLatch = new CountDownLatch(2); testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> { unitOfWork.onCleanup(uow -> countDownLatch.countDown()); return interceptorChain.proceed(); })); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS), "Expected Unit of Work to have reached clean up phase"); verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt()); assertNull(tokenStore.fetchToken(testSubject.getName(), 0)); } @Test void testResetCausesEventsToBeReplayed() throws Exception { when(mockHandler.supportsReset()).thenReturn(true); final List<String> handled = new CopyOnWriteArrayList<>(); final List<String> handledInRedelivery = new CopyOnWriteArrayList<>(); int segmentId = 0; //noinspection Duplicates doAnswer(i -> { EventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { handledInRedelivery.add(message.getIdentifier()); } handled.add(message.getIdentifier()); return null; }).when(mockHandler).handle(any()); eventBus.publish(createEvents(4)); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size())); testSubject.shutDown(); testSubject.resetTokens(); // Resetting twice caused problems (see issue #559) testSubject.resetTokens(); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, handled.size())); assertEquals(handled.subList(0, 4), handled.subList(4, 8)); assertEquals(handled.subList(4, 8), handledInRedelivery); assertTrue(testSubject.processingStatus().get(segmentId).isReplaying()); assertTrue(testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent()); assertTrue(testSubject.processingStatus().get(segmentId).getResetPosition().isPresent()); long resetPositionAtReplay = testSubject.processingStatus().get(segmentId).getCurrentPosition().getAsLong(); eventBus.publish(createEvents(1)); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).isReplaying() )); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).getResetPosition().isPresent() )); assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent() )); //noinspection OptionalGetWithoutIsPresent assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().getAsLong() > resetPositionAtReplay )); verify(eventHandlerInvoker, times(2)).performReset(NO_RESET_PAYLOAD); } @Test void testResetToPositionCausesCertainEventsToBeReplayed() throws Exception { when(mockHandler.supportsReset()).thenReturn(true); final List<String> handled = new CopyOnWriteArrayList<>(); final List<String> handledInRedelivery = new CopyOnWriteArrayList<>(); int segmentId = 0; //noinspection Duplicates doAnswer(i -> { EventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { handledInRedelivery.add(message.getIdentifier()); } handled.add(message.getIdentifier()); return null; }).when(mockHandler).handle(any()); eventBus.publish(createEvents(4)); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size())); testSubject.shutDown(); testSubject.resetTokens(source -> new GlobalSequenceTrackingToken(1L)); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(6, handled.size())); assertFalse(handledInRedelivery.contains(handled.get(0))); assertFalse(handledInRedelivery.contains(handled.get(1))); assertEquals(handled.subList(2, 4), handled.subList(4, 6)); assertEquals(handled.subList(4, 6), handledInRedelivery); assertTrue(testSubject.processingStatus().get(segmentId).isReplaying()); assertTrue(testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent()); assertTrue(testSubject.processingStatus().get(segmentId).getResetPosition().isPresent()); long resetPositionAtReplay = testSubject.processingStatus().get(segmentId).getResetPosition().getAsLong(); eventBus.publish(createEvents(1)); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).isReplaying() )); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).getResetPosition().isPresent() )); assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent() )); //noinspection OptionalGetWithoutIsPresent assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().getAsLong() > resetPositionAtReplay )); verify(eventHandlerInvoker).performReset(NO_RESET_PAYLOAD); } @Test void testResetOnInitializeWithTokenResetToThatToken() throws Exception { TrackingEventProcessorConfiguration config = TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andInitialTrackingToken(ms -> new GlobalSequenceTrackingToken(1L)); TrackingEventProcessor.Builder eventProcessorBuilder = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(eventBus) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .trackingEventProcessorConfiguration(config); testSubject = new TrackingEventProcessor(eventProcessorBuilder) { @Override protected void doSleepFor(long millisToSleep) { if (isRunning()) { sleepInstructions.add(millisToSleep); } } }; when(mockHandler.supportsReset()).thenReturn(true); final List<String> handled = new CopyOnWriteArrayList<>(); final List<String> handledInRedelivery = new CopyOnWriteArrayList<>(); int segmentId = 0; //noinspection Duplicates doAnswer(i -> { EventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { handledInRedelivery.add(message.getIdentifier()); } handled.add(message.getIdentifier()); return null; }).when(mockHandler).handle(any()); eventBus.publish(createEvents(4)); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(2, handled.size())); testSubject.shutDown(); testSubject.resetTokens(); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size())); assertEquals(handled.subList(0, 2), handled.subList(2, 4)); assertEquals(handled.subList(2, 4), handledInRedelivery); assertTrue(testSubject.processingStatus().get(segmentId).isReplaying()); assertTrue(testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent()); assertTrue(testSubject.processingStatus().get(segmentId).getResetPosition().isPresent()); long resetPositionAtReplay = testSubject.processingStatus().get(segmentId).getResetPosition().getAsLong(); eventBus.publish(createEvents(1)); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).isReplaying() )); assertWithin(1, TimeUnit.SECONDS, () -> assertFalse( testSubject.processingStatus().get(segmentId).getResetPosition().isPresent())); assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent() )); //noinspection OptionalGetWithoutIsPresent assertWithin(1, TimeUnit.SECONDS, () -> assertTrue( testSubject.processingStatus().get(segmentId).getCurrentPosition().getAsLong() > resetPositionAtReplay )); verify(eventHandlerInvoker).performReset(NO_RESET_PAYLOAD); } @Test void testResetBeforeStartingPerformsANormalRun() throws Exception { when(mockHandler.supportsReset()).thenReturn(true); final List<String> handled = new CopyOnWriteArrayList<>(); final List<String> handledInRedelivery = new CopyOnWriteArrayList<>(); int segmentId = 0; //noinspection Duplicates doAnswer(i -> { EventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { handledInRedelivery.add(message.getIdentifier()); } handled.add(message.getIdentifier()); return null; }).when(mockHandler).handle(any()); testSubject.resetTokens(); testSubject.start(); eventBus.publish(createEvents(4)); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size())); assertEquals(0, handledInRedelivery.size()); assertFalse(testSubject.processingStatus().get(segmentId).isReplaying()); assertFalse(testSubject.processingStatus().get(segmentId).getResetPosition().isPresent()); assertTrue(testSubject.processingStatus().get(segmentId).getCurrentPosition().isPresent()); assertTrue(testSubject.processingStatus().get(segmentId).getCurrentPosition().getAsLong() > 0); verify(eventHandlerInvoker).performReset(NO_RESET_PAYLOAD); } @SuppressWarnings("unchecked") @Test void testReplayFlagAvailableWhenReplayInDifferentOrder() throws Exception { StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(stubSource) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE) .build(); when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5)) .thenReturn(new StubTrackingEventStream(0, 1, 2, 3, 4, 5, 6, 7)); when(eventHandlerInvoker.supportsReset()).thenReturn(true); doReturn(true).when(eventHandlerInvoker).canHandle(any(), any()); List<TrackingToken> firstRun = new CopyOnWriteArrayList<>(); List<TrackingToken> replayRun = new CopyOnWriteArrayList<>(); doAnswer(i -> { firstRun.add(i.<TrackedEventMessage<?>>getArgument(0).trackingToken()); return null; }).when(eventHandlerInvoker).handle(any(), any()); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, firstRun.size())); testSubject.shutDown(); doAnswer(i -> { replayRun.add(i.<TrackedEventMessage<?>>getArgument(0).trackingToken()); return null; }).when(eventHandlerInvoker).handle(any(), any()); testSubject.resetTokens(); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, replayRun.size())); assertEquals(GapAwareTrackingToken.newInstance(5, asList(3L, 4L)), firstRun.get(3)); assertTrue(replayRun.get(0) instanceof ReplayToken); assertTrue(replayRun.get(5) instanceof ReplayToken); assertEquals(GapAwareTrackingToken.newInstance(6, emptySortedSet()), replayRun.get(6)); verify(eventHandlerInvoker).performReset(NO_RESET_PAYLOAD); } @Test void testResetRejectedWhileRunning() { testSubject.start(); assertThrows(IllegalStateException.class, testSubject::resetTokens); } @Test void testResetNotSupportedWhenInvokerDoesNotSupportReset() { when(mockHandler.supportsReset()).thenReturn(false); assertFalse(testSubject.supportsReset()); } @Test void testResetRejectedWhenInvokerDoesNotSupportReset() { when(mockHandler.supportsReset()).thenReturn(false); assertThrows(IllegalStateException.class, testSubject::resetTokens); } @Test void testResetRejectedIfNotAllTokensCanBeClaimed() { tokenStore.initializeTokenSegments("test", 4); when(tokenStore.fetchToken("test", 3)).thenThrow(new UnableToClaimTokenException("Mock")); assertThrows(UnableToClaimTokenException.class, testSubject::resetTokens); verify(tokenStore, never()).storeToken(isNull(), anyString(), anyInt()); } @Test void testResetTokensPassesOnResetContext() throws Exception { String resetContext = "reset-context"; final List<String> handled = new CopyOnWriteArrayList<>(); when(mockHandler.supportsReset()).thenReturn(true); //noinspection Duplicates doAnswer(i -> { EventMessage<?> message = i.getArgument(0); handled.add(message.getIdentifier()); return null; }).when(mockHandler).handle(any()); eventBus.publish(createEvents(4)); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size())); testSubject.shutDown(); testSubject.resetTokens(resetContext); testSubject.start(); verify(eventHandlerInvoker).performReset(resetContext); } @Test void testWhenFailureDuringInit() throws InterruptedException { doThrow(new RuntimeException("Faking issue during fetchSegments")) .doCallRealMethod() .when(tokenStore).fetchSegments(anyString()); doThrow(new RuntimeException("Faking issue during initializeTokenSegments")) // And on further calls .doNothing() .when(tokenStore).initializeTokenSegments(anyString(), anyInt()); testSubject.start(); for (int i = 0; i < 250 && testSubject.activeProcessorThreads() < 1; i++) { Thread.sleep(10); } assertEquals(1, testSubject.activeProcessorThreads()); } @Test void testUpdateActiveSegmentsWhenBatchIsEmpty() throws Exception { int segmentId = 0; //noinspection unchecked StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class); testSubject = TrackingEventProcessor.builder() .name("test") .eventHandlerInvoker(eventHandlerInvoker) .messageSource(stubSource) .tokenStore(tokenStore) .transactionManager(NoTransactionManager.INSTANCE).build(); when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5)); doReturn(true, false).when(eventHandlerInvoker).canHandle(any(), any()); testSubject.start(); // Give it a bit of time to start waitForStatus("processor thread started", 200, TimeUnit.MILLISECONDS, status -> status.containsKey(0)); waitForStatus("Segment 0 caught up", 5, TimeUnit.SECONDS, status -> status.get(0).isCaughtUp()); EventTrackerStatus eventTrackerStatus = testSubject.processingStatus().get(segmentId); GapAwareTrackingToken expectedToken = GapAwareTrackingToken.newInstance(5, asList(3L, 4L)); TrackingToken lastToken = eventTrackerStatus.getTrackingToken(); assertTrue(lastToken.covers(expectedToken)); } @Test void testReleaseSegment() { testSubject.start(); assertWithin(5, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads())); testSubject.releaseSegment(0, 2, TimeUnit.SECONDS); assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.activeProcessorThreads())); assertWithin(5, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads())); } @Test void testHasAvailableSegments() { assertEquals(1, testSubject.availableProcessorThreads()); testSubject.start(); assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.availableProcessorThreads())); testSubject.releaseSegment(0); assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.availableProcessorThreads())); } @Test @Timeout(value = 10) void testSplitSegments() throws InterruptedException { tokenStore.initializeTokenSegments(testSubject.getName(), 1); testSubject.start(); waitForSegmentStart(0); assertTrue(testSubject.splitSegment(0).join(), "Expected split to succeed"); assertArrayEquals(new int[]{0, 1}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(0); } @Test @Timeout(value = 10) void testMergeSegments() throws InterruptedException { tokenStore.initializeTokenSegments(testSubject.getName(), 2); testSubject.start(); while (testSubject.processingStatus().isEmpty()) { Thread.sleep(10); } int segmentId = 0; assertTrue(testSubject.mergeSegment(segmentId).join(), "Expected merge to succeed"); waitForProcessingStatus(segmentId, EventTrackerStatus::isMerging); waitForSegmentStart(segmentId); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); publishEvents(1); waitForProcessingStatus(segmentId, s -> !s.isMerging()); } @Test @Timeout(value = 10) void testMergeSegments_BothClaimedByProcessor() throws Exception { initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(2) .andEventAvailabilityTimeout(10, TimeUnit.MILLISECONDS) .andBatchSize(100)); tokenStore.initializeTokenSegments(testSubject.getName(), 2); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); int segmentId = 0; when(mockHandler.handle(any())).thenAnswer(i -> handledEvents.add(i.getArgument(0))); publishEvents(10); testSubject.start(); waitForActiveThreads(2); assertWithin( 5, TimeUnit.SECONDS, () -> assertEquals( 10, handledEvents.stream().map(EventMessage::getIdentifier).distinct().count(), "Expected message to be handled" ) ); assertFalse(testSubject.processingStatus().get(segmentId).isMerging()); assertFalse(testSubject.processingStatus().get(segmentId).mergeCompletedPosition().isPresent()); assertWithin( 50, TimeUnit.MILLISECONDS, () -> assertTrue(testSubject.mergeSegment(segmentId).join(), "Expected merge to succeed") ); EventTrackerStatus status = waitForProcessingStatus(segmentId, EventTrackerStatus::isMerging); assertTrue(status.mergeCompletedPosition().isPresent()); long mergeCompletedPosition = status.mergeCompletedPosition().getAsLong(); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); publishEvents(1); status = waitForProcessingStatus(segmentId, s -> !s.isMerging()); assertFalse(status.mergeCompletedPosition().isPresent()); assertTrue(status.getCurrentPosition().isPresent()); assertTrue(status.getCurrentPosition().getAsLong() > mergeCompletedPosition); } @Test @Timeout(value = 10) void testMergeSegments_WithExplicitReleaseOther() throws Exception { initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(2)); tokenStore.initializeTokenSegments(testSubject.getName(), 2); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); List<EventMessage<?>> events = new ArrayList<>(); int segmentId = 0; for (int i = 0; i < 10; i++) { events.add(createEvent(UUID.randomUUID().toString(), 0)); } when(mockHandler.handle(any())).thenAnswer(i -> handledEvents.add(i.getArgument(0))); eventBus.publish(events); testSubject.start(); waitForActiveThreads(2); testSubject.releaseSegment(1); waitForSegmentRelease(1); assertWithin( 50, TimeUnit.MILLISECONDS, () -> assertTrue(testSubject.mergeSegment(segmentId).join(), "Expected merge to succeed") ); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(segmentId); while (!Optional.ofNullable(testSubject.processingStatus().get(segmentId)) .map(EventTrackerStatus::isCaughtUp) .orElse(false)) { Thread.sleep(10); } assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(10, handledEvents.size())); } @Test @Timeout(value = 10) void testDoubleSplitAndMerge() throws Exception { tokenStore.initializeTokenSegments(testSubject.getName(), 1); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); int segmentId = 0; when(mockHandler.handle(any())).thenAnswer(i -> handledEvents.add(i.getArgument(0))); publishEvents(10); testSubject.start(); waitForActiveThreads(1); assertWithin( 50, TimeUnit.MILLISECONDS, () -> assertTrue(testSubject.splitSegment(segmentId).join(), "Expected split to succeed") ); waitForActiveThreads(1); assertWithin( 50, TimeUnit.MILLISECONDS, () -> assertTrue(testSubject.splitSegment(segmentId).join(), "Expected split to succeed") ); waitForActiveThreads(1); assertArrayEquals(new int[]{0, 1, 2}, tokenStore.fetchSegments(testSubject.getName())); publishEvents(20); waitForProcessingStatus(segmentId, EventTrackerStatus::isCaughtUp); assertFalse(testSubject.processingStatus().get(segmentId).isMerging()); assertTrue(testSubject.mergeSegment(segmentId).join(), "Expected merge to succeed"); assertArrayEquals(new int[]{0, 1}, tokenStore.fetchSegments(testSubject.getName())); publishEvents(10); waitForSegmentStart(segmentId); waitForProcessingStatus(segmentId, est -> est.getSegment().getMask() == 1 && est.isCaughtUp()); assertTrue(testSubject.mergeSegment(segmentId).join(), "Expected merge to succeed"); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); assertWithin(3, TimeUnit.SECONDS, () -> assertEquals(40, handledEvents.size())); } @Test @Timeout(value = 10) void testMergeSegmentWithDifferentProcessingGroupsAndSequencingPolicies() throws Exception { EventMessageHandler otherHandler = mock(EventMessageHandler.class); when(otherHandler.canHandle(any())).thenReturn(true); when(otherHandler.supportsReset()).thenReturn(true); int segmentId = 0; EventHandlerInvoker mockInvoker = SimpleEventHandlerInvoker.builder() .eventHandlers(singleton(otherHandler)) .sequencingPolicy(m -> 0) .build(); initProcessor( TrackingEventProcessorConfiguration.forParallelProcessing(2).andBatchSize(5), builder -> builder.eventHandlerInvoker(new MultiEventHandlerInvoker(eventHandlerInvoker, mockInvoker)) ); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); when(mockHandler.handle(any())).thenAnswer(i -> { TrackedEventMessage<?> message = i.getArgument(0); return handledEvents.add(message); }); publishEvents(10); testSubject.start(); while (testSubject.processingStatus().size() < 2 || !testSubject.processingStatus().values().stream().allMatch(EventTrackerStatus::isCaughtUp)) { Thread.sleep(10); } testSubject.releaseSegment(1); while (testSubject.processingStatus().size() != 1 || !testSubject.processingStatus().values().stream().allMatch(EventTrackerStatus::isCaughtUp)) { Thread.sleep(10); } publishEvents(10); testSubject.mergeSegment(segmentId); publishEvents(10); while (testSubject.processingStatus().size() != 1 || !testSubject.processingStatus().values().stream().allMatch(EventTrackerStatus::isCaughtUp)) { Thread.sleep(10); } assertWithin(5, TimeUnit.SECONDS, () -> assertEquals(30, handledEvents.size())); Thread.sleep(100); assertEquals(30, handledEvents.size()); } @Test @Timeout(value = 10) void testMergeSegmentsDuringReplay() throws Exception { initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(2)); tokenStore.initializeTokenSegments(testSubject.getName(), 2); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); List<EventMessage<?>> replayedEvents = new CopyOnWriteArrayList<>(); int segmentId = 0; when(mockHandler.handle(any())).thenAnswer(i -> { TrackedEventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { replayedEvents.add(message); } else { handledEvents.add(message); } return null; }); for (int i = 0; i < 10; i++) { eventBus.publish(createEvent(UUID.randomUUID().toString(), 0)); } testSubject.start(); while (testSubject.processingStatus().size() < 2 || !testSubject.processingStatus().values().stream().allMatch(EventTrackerStatus::isCaughtUp)) { Thread.sleep(10); } testSubject.shutDown(); testSubject.resetTokens(); testSubject.releaseSegment(1); testSubject.start(); waitForActiveThreads(1); Thread.yield(); CompletableFuture<Boolean> mergeResult = testSubject.mergeSegment(segmentId); publishEvents(20); assertTrue(mergeResult.join(), "Expected merge to succeed"); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(segmentId); assertWithin(10, TimeUnit.SECONDS, () -> assertEquals(30, handledEvents.size())); Thread.sleep(100); assertEquals(30, handledEvents.size()); // Make sure replay events are only delivered once. assertEquals( replayedEvents.stream().map(EventMessage::getIdentifier).distinct().count(), replayedEvents.size() ); } @Test @Timeout(value = 10) void testReplayDuringIncompleteMerge() throws Exception { initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(2)); tokenStore.initializeTokenSegments(testSubject.getName(), 2); List<EventMessage<?>> handledEvents = new CopyOnWriteArrayList<>(); List<EventMessage<?>> events = new ArrayList<>(); int segmentId = 0; for (int i = 0; i < 10; i++) { events.add(createEvent(UUID.randomUUID().toString(), 0)); } when(mockHandler.handle(any())).thenAnswer(i -> { TrackedEventMessage<?> message = i.getArgument(0); if (ReplayToken.isReplay(message)) { // Ignore replays return null; } return handledEvents.add(message); }); eventBus.publish(events); testSubject.start(); while (testSubject.processingStatus().size() < 2 || !testSubject.processingStatus().values().stream().allMatch(EventTrackerStatus::isCaughtUp)) { Thread.sleep(10); } testSubject.releaseSegment(1); while (testSubject.processingStatus().containsKey(1)) { Thread.yield(); } publishEvents(10); CompletableFuture<Boolean> mergeResult = testSubject.mergeSegment(segmentId); assertTrue(mergeResult.join(), "Expected split to succeed"); waitForActiveThreads(1); testSubject.shutDown(); testSubject.resetTokens(); publishEvents(10); testSubject.start(); waitForActiveThreads(1); assertArrayEquals(new int[]{0}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(segmentId); while (!testSubject.processingStatus().get(segmentId).isCaughtUp()) { Thread.sleep(10); } // Replayed messages aren't counted assertEquals(30, handledEvents.size()); } @Test @Timeout(value = 10) void testMergeWithIncompatibleSegmentRejected() throws InterruptedException { initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(3)); testSubject.start(); waitForActiveThreads(3); assertTrue(testSubject.processingStatus().containsKey(0)); assertTrue(testSubject.processingStatus().containsKey(1)); assertTrue(testSubject.processingStatus().containsKey(2)); // 1 is not "mergeable" with 0, because 0 itself was already split testSubject.releaseSegment(0); testSubject.releaseSegment(2); testSubject.processingStatus().values().forEach(status -> assertFalse(status::isMerging)); while (testSubject.processingStatus().size() > 1) { Thread.sleep(10); } CompletableFuture<Boolean> actual = testSubject.mergeSegment(1); assertFalse(actual.join(), "Expected merge to be rejected"); } @Test @Timeout(value = 10) void testMergeWithSingleSegmentRejected() throws InterruptedException { int numberOfSegments = 1; initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(numberOfSegments)); int segmentId = 0; testSubject.start(); waitForActiveThreads(1); CompletableFuture<Boolean> actual = testSubject.mergeSegment(segmentId); assertFalse(actual.join(), "Expected merge to be rejected"); assertFalse(testSubject.processingStatus().get(segmentId).isMerging()); } /** * This test is a follow up from issue https://github.com/AxonIQ/axon-server-se/issues/135 */ @Test @Timeout(value = 10) void testMergeInvertedSegmentOrder() throws InterruptedException { int numberOfSegments = 4; initProcessor(TrackingEventProcessorConfiguration.forParallelProcessing(numberOfSegments)); testSubject.start(); waitForActiveThreads(4); int segmentId = 3; CompletableFuture<Boolean> mergeResult = testSubject.mergeSegment(segmentId); assertTrue(mergeResult.join(), "Expected merge to succeed"); verify(tokenStore).deleteToken("test", 3); verify(tokenStore).storeToken(any(), eq("test"), eq(1)); } /** * This test is a follow up from issue https://github.com/AxonFramework/AxonFramework/issues/1212 */ @Test public void testThrownErrorBubblesUp() { AtomicReference<Throwable> thrownException = new AtomicReference<>(); EventHandlerInvoker eventHandlerInvoker = mock(EventHandlerInvoker.class); when(eventHandlerInvoker.canHandle(any(), any())).thenThrow(new TestError()); initProcessor( TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andThreadFactory(name -> runnableForThread -> new Thread(() -> { try { runnableForThread.run(); } catch (Throwable t) { thrownException.set(t); } })), builder -> builder.eventHandlerInvoker(eventHandlerInvoker) ); eventBus.publish(createEvents(1)); testSubject.start(); assertWithin(2, TimeUnit.SECONDS, () -> assertTrue(testSubject.isError())); assertWithin( 15, TimeUnit.SECONDS, () -> assertTrue(thrownException.get() instanceof TestError) ); } @Test void retrievingStorageIdentifierWillCacheResults() { String id = testSubject.getTokenStoreIdentifier(); InOrder inOrder = inOrder(mockTransactionManager, tokenStore); inOrder.verify(mockTransactionManager).fetchInTransaction(any()); inOrder.verify(tokenStore, times(1)).retrieveStorageIdentifier(); String id2 = testSubject.getTokenStoreIdentifier(); // expect no extra invocations verify(tokenStore, times(1)).retrieveStorageIdentifier(); assertEquals(id, id2); } /** * This test can spot three invocations of the {@link EventTrackerStatusChangeListener}, but asserts two: * <ol> * <li> First call is when the single active {@link org.axonframework.eventhandling.Segment} is added.</li> * <li> Second call is when the status transitions to {@link EventTrackerStatus#isCaughtUp()}.</li> * <li> * The last not asserted call is when the {@link TrackingEventProcessor} is shutting down, which removes the * status. This isn't taking into account as it is part of the {@link #tearDown()}. * </li> * </ol> * <p> * More changes occur on the {@link EventTrackerStatus}, but by default only complete additions, removals and {@code * boolean} field updates are included as changes. */ @Test void testPublishedEventsUpdateStatusAndHitChangeListener() throws Exception { CountDownLatch eventHandlingLatch = new CountDownLatch(2); doAnswer(invocation -> { eventHandlingLatch.countDown(); return null; }).when(mockHandler).handle(any()); CountDownLatch statusChangeLatch = new CountDownLatch(2); AtomicInteger addedStatusCounter = new AtomicInteger(0); AtomicInteger updatedStatusCounter = new AtomicInteger(0); AtomicInteger removedStatusCounter = new AtomicInteger(0); EventTrackerStatusChangeListener statusChangeListener = updatedTrackerStatus -> { assertEquals(1, updatedTrackerStatus.size()); EventTrackerStatus eventTrackerStatus = updatedTrackerStatus.get(0); if (eventTrackerStatus.trackerAdded()) { addedStatusCounter.getAndIncrement(); } else if (eventTrackerStatus.trackerRemoved()) { removedStatusCounter.getAndIncrement(); } else { updatedStatusCounter.getAndIncrement(); } statusChangeLatch.countDown(); }; TrackingEventProcessorConfiguration tepConfiguration = TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andEventTrackerStatusChangeListener(statusChangeListener); initProcessor(tepConfiguration); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); publishEvents(2); assertTrue(eventHandlingLatch.await(5, TimeUnit.SECONDS)); assertTrue(statusChangeLatch.await(5, TimeUnit.SECONDS)); assertEquals(1, addedStatusCounter.get()); assertEquals(1, updatedStatusCounter.get()); assertEquals(0, removedStatusCounter.get()); } /** * This test can spot five invocations of the {@link EventTrackerStatusChangeListener}, but asserts four: * <ol> * <li> First call is when the single active {@link org.axonframework.eventhandling.Segment} is added.</li> * <li> Second call is when the status transitions to {@link EventTrackerStatus#isCaughtUp()}.</li> * <li> Third call is when the {@link EventTrackerStatus#getCurrentPosition()} moves to 0.</li> * <li> Fourth call is when the {@link EventTrackerStatus#getCurrentPosition()} moves to 1.</li> * <li> * The last not asserted call is when the {@link TrackingEventProcessor} is shutting down, which removes the * status. This isn't taking into account as it is part of the {@link #tearDown()}. * </li> * </ol> */ @Test void testPublishedEventsUpdateStatusAndHitChangeListenerIncludingPositions() throws Exception { CountDownLatch eventHandlingLatch = new CountDownLatch(2); doAnswer(invocation -> { eventHandlingLatch.countDown(); return null; }).when(mockHandler).handle(any()); CountDownLatch statusChangeLatch = new CountDownLatch(4); AtomicInteger addedStatusCounter = new AtomicInteger(0); AtomicInteger updatedStatusCounter = new AtomicInteger(0); AtomicInteger removedStatusCounter = new AtomicInteger(0); EventTrackerStatusChangeListener statusChangeListener = new EventTrackerStatusChangeListener() { @Override public void onEventTrackerStatusChange(Map<Integer, EventTrackerStatus> updatedTrackerStatus) { assertEquals(1, updatedTrackerStatus.size()); EventTrackerStatus eventTrackerStatus = updatedTrackerStatus.get(0); if (eventTrackerStatus.trackerAdded()) { addedStatusCounter.getAndIncrement(); } else if (eventTrackerStatus.trackerRemoved()) { removedStatusCounter.getAndIncrement(); } else { updatedStatusCounter.getAndIncrement(); } statusChangeLatch.countDown(); } @Override public boolean validatePositions() { return true; } }; TrackingEventProcessorConfiguration tepConfiguration = TrackingEventProcessorConfiguration.forSingleThreadedProcessing() .andEventTrackerStatusChangeListener(statusChangeListener); initProcessor(tepConfiguration); testSubject.start(); // Give it a bit of time to start Thread.sleep(200); publishEvents(2); assertTrue(eventHandlingLatch.await(5, TimeUnit.SECONDS)); assertTrue(statusChangeLatch.await(5, TimeUnit.SECONDS)); assertEquals(1, addedStatusCounter.get()); assertEquals(3, updatedStatusCounter.get()); assertEquals(0, removedStatusCounter.get()); } @Test @Timeout(value = 10) void testSplitAndMergeInfluenceOnChangeListenerInvocations() throws InterruptedException { int firstSegment = 0; int secondSegment = 1; CountDownLatch addedStatusLatch = new CountDownLatch(4); CountDownLatch updatedStatusLatch = new CountDownLatch(1); CountDownLatch removedStatusLatch = new CountDownLatch(3); EventTrackerStatusChangeListener statusChangeListener = updatedTrackerStatus -> { assertEquals(1, updatedTrackerStatus.size()); EventTrackerStatus eventTrackerStatus = updatedTrackerStatus.values().iterator().next(); if (eventTrackerStatus.trackerAdded()) { addedStatusLatch.countDown(); } else if (eventTrackerStatus.trackerRemoved()) { removedStatusLatch.countDown(); } else { updatedStatusLatch.countDown(); } }; TrackingEventProcessorConfiguration tepConfiguration = TrackingEventProcessorConfiguration.forParallelProcessing(2) .andEventTrackerStatusChangeListener(statusChangeListener); initProcessor(tepConfiguration); tokenStore.initializeTokenSegments(testSubject.getName(), 1); publishEvents(2); testSubject.start(); waitForSegmentStart(firstSegment); assertTrue(testSubject.splitSegment(firstSegment).join(), "Expected split to succeed"); assertArrayEquals(new int[]{firstSegment, secondSegment}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(secondSegment); assertWithin( 50, TimeUnit.MILLISECONDS, () -> assertTrue(testSubject.mergeSegment(firstSegment).join(), "Expected merge to succeed") ); assertArrayEquals(new int[]{firstSegment}, tokenStore.fetchSegments(testSubject.getName())); waitForSegmentStart(firstSegment); assertTrue(addedStatusLatch.await(5, TimeUnit.SECONDS)); assertTrue(updatedStatusLatch.await(5, TimeUnit.SECONDS)); assertTrue(removedStatusLatch.await(5, TimeUnit.SECONDS)); } private void waitForStatus(String description, long time, TimeUnit unit, Predicate<Map<Integer, EventTrackerStatus>> status) throws InterruptedException { long deadline = System.currentTimeMillis() + unit.toMillis(time); while (!status.test(testSubject.processingStatus())) { if (deadline < System.currentTimeMillis()) { fail("Expected state '" + description + "'' within " + time + " " + unit.name()); } Thread.sleep(10); } } @SuppressWarnings("SameParameterValue") private void waitForSegmentStart(int segmentId) throws InterruptedException { while (!testSubject.processingStatus().containsKey(segmentId)) { Thread.sleep(10); } } @SuppressWarnings("SameParameterValue") private void waitForSegmentRelease(int segmentId) throws InterruptedException { while (testSubject.processingStatus().containsKey(segmentId)) { Thread.sleep(10); } } private void waitForActiveThreads(int minimalThreadCount) throws InterruptedException { while (testSubject.processingStatus().size() < minimalThreadCount) { Thread.sleep(10); } } @SuppressWarnings("SameParameterValue") private EventTrackerStatus waitForProcessingStatus(int segmentId, Predicate<EventTrackerStatus> expectedStatus) throws InterruptedException { EventTrackerStatus status = testSubject.processingStatus().get(segmentId); while (!Optional.ofNullable(status) .map(expectedStatus::test) .orElse(false)) { Thread.sleep(1); status = testSubject.processingStatus().get(segmentId); } return status; } private void publishEvents(int nrOfEvents) { for (int i = 0; i < nrOfEvents; i++) { eventBus.publish(createEvent(UUID.randomUUID().toString(), 0)); } } private static class StubTrackingEventStream implements TrackingEventStream { private final Queue<TrackedEventMessage<?>> eventMessages; public StubTrackingEventStream(long... tokens) { GapAwareTrackingToken lastToken = GapAwareTrackingToken.newInstance(-1, emptySortedSet()); eventMessages = new LinkedList<>(); for (Long seq : tokens) { lastToken = lastToken.advanceTo(seq, 1000); eventMessages.add(new GenericTrackedEventMessage<>(lastToken, createEvent(seq))); } } @Override public Optional<TrackedEventMessage<?>> peek() { if (eventMessages.isEmpty()) { return Optional.empty(); } return Optional.of(eventMessages.peek()); } @Override public boolean hasNextAvailable(int timeout, TimeUnit unit) { return !eventMessages.isEmpty(); } @Override public TrackedEventMessage<?> nextAvailable() { return eventMessages.poll(); } @Override public void close() { } } private static class TestError extends Error { private static final long serialVersionUID = -5579826202840099704L; } }
{ "content_hash": "9ed540854162fedeafc5ab82eb2e59f9", "timestamp": "", "source": "github", "line_count": 1766, "max_line_length": 120, "avg_line_length": 43.583805209513024, "alnum_prop": 0.6281619872935857, "repo_name": "krosenvold/AxonFramework", "id": "891d9bb70bd3958810cb38ff93ccf0f47d46a8de", "size": "77575", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "integrationtests/src/test/java/org/axonframework/integrationtests/eventhandling/TrackingEventProcessorTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "4501913" } ], "symlink_target": "" }
'use strict'; const path = require('path'); const lstat = require('fs').lstat; const basename = path.basename; const dirname = path.dirname; const resolve = path.resolve; const fs = require('graceful-fs'); const inspectWithKind = require('inspect-with-kind'); const isPlainObj = require('is-plain-obj'); const isStream = require('is-stream'); const mkdirp = require('mkdirp'); const Observable = require('zen-observable'); const pack = require('tar-fs').pack; const cancelablePump = require('cancelable-pump'); const streamLib = require('stream'); const PassThrough = streamLib.PassThrough; const Transform = streamLib.Transform; const FILE_PATH_ERROR = 'Expected a file path to be compressed as an archive'; const TAR_PATH_ERROR = 'Expected a file path where an archive file will be created'; const TAR_TRANSFORM_ERROR = '`tarTransform` option must be a transform stream ' + 'that modifies the tar archive before writing'; const MAP_STREAM_ERROR = 'The function passed to `mapStream` option must return a stream'; const unsupportedOptions = [ 'entries', 'filter', 'ignore', 'strip' ]; module.exports = function fileToTar(filePath, tarPath, options) { return new Observable(observer => { if (typeof filePath !== 'string') { throw new TypeError(`${FILE_PATH_ERROR}, but got a non-string value ${inspectWithKind(filePath)}.`); } if (filePath.length === 0) { throw new Error(`${FILE_PATH_ERROR}, but got '' (empty string).`); } if (typeof tarPath !== 'string') { throw new TypeError(`${TAR_PATH_ERROR}, but got a non-string value ${inspectWithKind(tarPath)}.`); } if (tarPath.length === 0) { throw new Error(`${TAR_PATH_ERROR}, but got '' (empty string).`); } const absoluteFilePath = resolve(filePath); const absoluteTarPath = resolve(tarPath); const dirPath = dirname(absoluteFilePath); if (absoluteFilePath === absoluteTarPath) { throw new Error(`Source file path must be different from the archive path. Both were specified to ${ absoluteFilePath }.`); } if (options !== undefined) { if (!isPlainObj(options)) { throw new TypeError(`Expected a plain object to set file-to-tar options, but got ${ inspectWithKind(options) }.`); } } else { options = {}; } for (const optionName of unsupportedOptions) { const val = options[optionName]; if (val !== undefined) { throw new Error(`file-to-tar doesn't support \`${optionName}\` option, but ${ inspectWithKind(val) } was provided.`); } } if (options.tarTransform !== undefined) { if (!isStream(options.tarTransform)) { throw new TypeError(`${TAR_TRANSFORM_ERROR}, but got a non-stream value ${ inspectWithKind(options.tarTransform) }.`); } if (!isStream.transform(options.tarTransform)) { throw new TypeError(`${TAR_TRANSFORM_ERROR}, but got a ${ ['duplex', 'writable', 'readable'].find(type => isStream[type](options.tarTransform)) } stream instead.`); } } let cancel; lstat(absoluteFilePath, (lstatErr, stat) => { if (lstatErr) { observer.error(lstatErr); return; } if (!stat.isFile()) { observer.error(new Error(`Expected ${absoluteFilePath} to be a file path, but it was a ${ stat.isDirectory() ? 'directory' : 'symbolic link' }.`)); return; } let firstWriteFailed = false; const firstWriteStream = fs.createWriteStream(tarPath, options).on('error', err => { if (err.code === 'EISDIR') { err.message = `Tried to write an archive file to ${absoluteTarPath}, but a directory already exists there.`; observer.error(err); return; } firstWriteFailed = true; }); mkdirp(dirname(tarPath), Object.assign({fs}, options), mkdirpErr => { if (mkdirpErr) { observer.error(mkdirpErr); return; } const packStream = pack(dirPath, Object.assign({fs}, options, { entries: [basename(filePath)], map(header) { if (options.map) { header = options.map(header); } return header; }, mapStream(fileStream, header) { const newStream = options.mapStream ? options.mapStream(fileStream, header) : fileStream; if (!isStream.readable(newStream)) { packStream.emit('error', new TypeError(`${MAP_STREAM_ERROR}${ isStream(newStream) ? ' that is readable, but returned a non-readable stream' : `, but returned a non-stream value ${inspectWithKind(newStream)}` }.`)); return new PassThrough(); } let bytes = 0; observer.next({bytes, header}); return newStream.pipe(new Transform({ transform(chunk, encoding, cb) { bytes += chunk.length; observer.next({bytes, header}); cb(null, chunk); } })); } })); function getDest() { return firstWriteFailed ? fs.createWriteStream(tarPath, options) : firstWriteStream; } cancel = cancelablePump(options.tarTransform ? [ packStream, options.tarTransform, getDest() ] : [ packStream, getDest() ], err => { if (err) { observer.error(err); return; } observer.complete(); }); }); }); return function cancelCompression() { if (cancel) { cancel(); } }; }); };
{ "content_hash": "c30af41974beb5e7dd2436bb5ff2870f", "timestamp": "", "source": "github", "line_count": 198, "max_line_length": 118, "avg_line_length": 29.45959595959596, "alnum_prop": 0.578090176581519, "repo_name": "joegesualdo/dotfiles", "id": "6c3ed904080d7f02e870a7bb6e6dc0a092cceef8", "size": "5833", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/yarn/global/node_modules/file-to-tar/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "712" }, { "name": "CoffeeScript", "bytes": "386" }, { "name": "Elm", "bytes": "332785" }, { "name": "JavaScript", "bytes": "115864" }, { "name": "Ruby", "bytes": "5718" }, { "name": "Shell", "bytes": "24929" }, { "name": "Vim script", "bytes": "90842" } ], "symlink_target": "" }
package org.apache.geode.management.internal.cli.commands; import static java.util.stream.Collectors.toSet; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE; import static org.apache.geode.management.internal.cli.commands.ExportLogsCommand.ONLY_DATE_FORMAT; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import java.io.IOException; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import com.google.common.collect.Sets; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.apache.geode.distributed.ConfigurationProperties; import org.apache.geode.management.internal.cli.util.CommandStringBuilder; import org.apache.geode.test.dunit.rules.ClusterStartupRule; import org.apache.geode.test.dunit.rules.MemberVM; import org.apache.geode.test.junit.rules.GfshCommandRule; import org.apache.geode.test.junit.rules.serializable.SerializableTemporaryFolder; public class ExportLogsStatsDistributedTestBase { @ClassRule public static ClusterStartupRule lsRule = new ClusterStartupRule().withLogFile(); @ClassRule public static GfshCommandRule connector = new GfshCommandRule(); @Rule public SerializableTemporaryFolder tempFolderRule = new SerializableTemporaryFolder(); protected static Set<String> expectedZipEntries = new HashSet<>(); protected static MemberVM locator; @BeforeClass public static void beforeClass() { // start the locator in vm0 and then connect to it over http locator = lsRule.startLocatorVM(0, l -> l.withHttpService()); Properties serverProperties = new Properties(); serverProperties.setProperty(ConfigurationProperties.STATISTIC_SAMPLING_ENABLED, "true"); serverProperties.setProperty(STATISTIC_ARCHIVE_FILE, "statistics.gfs"); lsRule.startServerVM(1, serverProperties, locator.getPort()); expectedZipEntries = Sets.newHashSet( "locator-0" + File.separator + "locator-0.log", "server-1" + File.separator + "server-1.log", "server-1" + File.separator + "statistics.gfs"); } protected void connectIfNeeded() throws Exception { if (!connector.isConnected()) { connector.connect(locator); } } @Test public void testExportLogsAndStats() throws Exception { connectIfNeeded(); String tempFolder = tempFolderRule.getRoot().getAbsolutePath(); connector.executeAndAssertThat("export logs --dir=" + tempFolder).statusIsSuccess(); String zipPath = getZipPathFromCommandResult(connector.getGfshOutput()); Set<String> actualZipEntries = getZipEntries(zipPath); Set<String> expectedFiles = Sets.newHashSet( "locator-0" + File.separator + "locator-0.log", "server-1" + File.separator + "server-1.log", "server-1" + File.separator + "statistics.gfs"); assertThat(actualZipEntries).containsAll(expectedFiles); // remove pulse.log if present actualZipEntries = actualZipEntries.stream().filter(x -> !x.endsWith("pulse.log")).collect(toSet()); assertThat(actualZipEntries).hasSize(3); } @Test public void testExportLogsOnly() throws Exception { connectIfNeeded(); String tempFolder = tempFolderRule.getRoot().getAbsolutePath(); connector.executeAndAssertThat("export logs --logs-only --dir=" + tempFolder).statusIsSuccess(); String zipPath = getZipPathFromCommandResult(connector.getGfshOutput()); Set<String> actualZipEntries = getZipEntries(zipPath); Set<String> expectedFiles = Sets.newHashSet( "locator-0" + File.separator + "locator-0.log", "server-1" + File.separator + "server-1.log"); assertThat(actualZipEntries).containsAll(expectedFiles); // remove pulse.log if present actualZipEntries = actualZipEntries.stream().filter(x -> !x.endsWith("pulse.log")).collect(toSet()); assertThat(actualZipEntries).hasSize(2); } @Test public void testExportStatsOnly() throws Exception { connectIfNeeded(); String tempFolder = tempFolderRule.getRoot().getAbsolutePath(); connector.executeAndAssertThat("export logs --stats-only --dir=" + tempFolder) .statusIsSuccess(); String zipPath = getZipPathFromCommandResult(connector.getGfshOutput()); Set<String> actualZipEntries = getZipEntries(zipPath); Set<String> expectedFiles = Sets.newHashSet("server-1" + File.separator + "statistics.gfs"); assertThat(actualZipEntries).isEqualTo(expectedFiles); } @Test public void startAndEndDateCanExcludeLogs() throws Exception { connectIfNeeded(); ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault()); ZonedDateTime tomorrow = now.plusDays(1); DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT); CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs"); commandStringBuilder.addOption("start-time", dateTimeFormatter.format(tomorrow)); commandStringBuilder.addOption("log-level", "debug"); connector.executeAndAssertThat(commandStringBuilder.toString()).statusIsError() .containsOutput("No files to be exported"); } @Test public void testExportedZipFileTooBig() throws Exception { connectIfNeeded(); connector.executeAndAssertThat("export logs --file-size-limit=10k").statusIsError(); } protected String getZipPathFromCommandResult(String message) { return message.replaceAll("Logs exported to the connected member's file system: ", "").trim(); } private static Set<String> getZipEntries(String zipFilePath) throws IOException { return new ZipFile(zipFilePath).stream().map(ZipEntry::getName) .filter(x -> !x.endsWith("views.log")).collect(Collectors.toSet()); } }
{ "content_hash": "d721ecdcfa7794a1a99f6de8f09d5e03", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 100, "avg_line_length": 39.80794701986755, "alnum_prop": 0.7507902179337881, "repo_name": "davebarnes97/geode", "id": "27032e5397b2693cdbdffd3ff10da90658d5483b", "size": "6800", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "geode-dunit/src/main/java/org/apache/geode/management/internal/cli/commands/ExportLogsStatsDistributedTestBase.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "106708" }, { "name": "Dockerfile", "bytes": "17835" }, { "name": "Go", "bytes": "1205" }, { "name": "Groovy", "bytes": "38590" }, { "name": "HTML", "bytes": "3855237" }, { "name": "Java", "bytes": "31895961" }, { "name": "JavaScript", "bytes": "1781602" }, { "name": "Python", "bytes": "30033" }, { "name": "Ruby", "bytes": "6698" }, { "name": "Shell", "bytes": "190751" } ], "symlink_target": "" }
package com.guoxiaoxing.music.widget; import android.content.Context; import android.util.AttributeSet; import android.widget.ImageView; public class SquareImageView extends ImageView { public SquareImageView(Context context) { super(context); } public SquareImageView(Context context, AttributeSet attrs) { super(context, attrs); } public SquareImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); setMeasuredDimension(getMeasuredWidth(), getMeasuredWidth()); } }
{ "content_hash": "4feba8b4b92daa9f4bfb6de6e4545362", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 79, "avg_line_length": 26.962962962962962, "alnum_prop": 0.728021978021978, "repo_name": "guoxiaoxing/material-design-music-player", "id": "4b2425caaaa17f01488b967b875d24c805fa1a7f", "size": "728", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "music/src/main/java/com/guoxiaoxing/music/widget/SquareImageView.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1090599" } ], "symlink_target": "" }
package org.apache.airavata.gfac.core.monitor; import org.apache.airavata.common.logger.AiravataLogger; import org.apache.airavata.common.logger.AiravataLoggerFactory; import org.apache.airavata.commons.gfac.type.HostDescription; import org.apache.airavata.gfac.core.context.JobExecutionContext; import org.apache.airavata.model.workspace.experiment.JobState; import java.sql.Timestamp; import java.util.Date; import java.util.Map; /* This is the object which contains the data to identify a particular Job to start the monitoring */ public class MonitorID { private final static AiravataLogger logger = AiravataLoggerFactory.getLogger(MonitorID.class); private String userName; private Timestamp jobStartedTime; private Timestamp lastMonitored; private HostDescription host; private Map<String, Object> parameters; private String experimentID; private String workflowNodeID; private String taskID; private String jobID; private String jobName; private int failedCount = 0; private JobState state; private JobExecutionContext jobExecutionContext; public MonitorID() { } public MonitorID(MonitorID monitorID){ this.host = monitorID.getHost(); this.jobStartedTime = new Timestamp((new Date()).getTime()); this.userName = monitorID.getUserName(); this.jobID = monitorID.getJobID(); this.taskID = monitorID.getTaskID(); this.experimentID = monitorID.getExperimentID(); this.workflowNodeID = monitorID.getWorkflowNodeID(); this.jobName = monitorID.getJobName(); } public MonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID, String experimentID, String userName,String jobName) { this.host = host; this.jobStartedTime = new Timestamp((new Date()).getTime()); this.userName = userName; this.jobID = jobID; this.taskID = taskID; this.experimentID = experimentID; this.workflowNodeID = workflowNodeID; this.jobName = jobName; } public MonitorID(JobExecutionContext jobExecutionContext) { this.jobExecutionContext = jobExecutionContext; host = jobExecutionContext.getApplicationContext().getHostDescription(); userName = jobExecutionContext.getExperiment().getUserName(); taskID = jobExecutionContext.getTaskData().getTaskID(); experimentID = jobExecutionContext.getExperiment().getExperimentID(); workflowNodeID = jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId();// at this point we only have one node todo: fix this try { jobID = jobExecutionContext.getJobDetails().getJobID(); jobName = jobExecutionContext.getJobDetails().getJobName(); }catch(NullPointerException e){ logger.error("There is not job created at this point"); } } public HostDescription getHost() { return host; } public void setHost(HostDescription host) { this.host = host; } public Timestamp getLastMonitored() { return lastMonitored; } public void setLastMonitored(Timestamp lastMonitored) { this.lastMonitored = lastMonitored; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getJobID() { return jobID; } public void setJobID(String jobID) { this.jobID = jobID; } public Timestamp getJobStartedTime() { return jobStartedTime; } public void setJobStartedTime(Timestamp jobStartedTime) { this.jobStartedTime = jobStartedTime; } public void addParameter(String key, Object value) { this.parameters.put(key, value); } public Object getParameter(String key) { return this.parameters.get(key); } public Map<String, Object> getParameters() { return parameters; } public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public String getExperimentID() { return experimentID; } public void setExperimentID(String experimentID) { this.experimentID = experimentID; } public String getTaskID() { return taskID; } public void setTaskID(String taskID) { this.taskID = taskID; } public int getFailedCount() { return failedCount; } public void setFailedCount(int failedCount) { this.failedCount = failedCount; } public JobState getStatus() { return state; } public void setStatus(JobState status) { // this logic is going to be useful for fast finishing jobs // because in some machines job state vanishes quicckly when the job is done // during that case job state comes as unknown.so we handle it here. if (this.state != null && status.equals(JobState.UNKNOWN)) { this.failedCount++; logger.infoId(this.getJobID(), "{} status came for job {}, Increasing the failed count to: {}.", status.toString(), this.jobID, this.failedCount); }else { // normal scenario logger.infoId(this.getJobID(), "Valid status {} came for job {}, resetting fail count to 0", status.toString(), this.jobID); setFailedCount(0); this.state = status; } } public String getWorkflowNodeID() { return workflowNodeID; } public void setWorkflowNodeID(String workflowNodeID) { this.workflowNodeID = workflowNodeID; } public JobExecutionContext getJobExecutionContext() { return jobExecutionContext; } public void setJobExecutionContext(JobExecutionContext jobExecutionContext) { this.jobExecutionContext = jobExecutionContext; } public String getJobName() { return jobName; } public void setJobName(String jobName) { this.jobName = jobName; } }
{ "content_hash": "e03271f1d2ce7fac6baa87663fd51265", "timestamp": "", "source": "github", "line_count": 208, "max_line_length": 149, "avg_line_length": 29.235576923076923, "alnum_prop": 0.6701200460450584, "repo_name": "glahiru/airavata", "id": "fa4ecd203a77c3333a677fe6d78c337916e76943", "size": "6893", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "67381" }, { "name": "C++", "bytes": "2828557" }, { "name": "Java", "bytes": "15148513" }, { "name": "PHP", "bytes": "1418267" }, { "name": "Shell", "bytes": "54877" }, { "name": "XSLT", "bytes": "45917" } ], "symlink_target": "" }
package dynamic import ( "errors" "fmt" "reflect" "strings" "testing" "time" "github.com/gogo/protobuf/proto" "github.com/gogo/protobuf/protoc-gen-gogo/descriptor" "github.com/gogo/protobuf/types" "istio.io/api/policy/v1beta1" "istio.io/istio/mixer/pkg/lang/compiled" "istio.io/pkg/attribute" ) func checkErrors(t *testing.T, gotError error, wantError error) { wantOk := wantError != nil gotOk := gotError != nil if wantOk != gotOk { t.Fatalf("got: %v, want: %v", gotError, wantError) } if !gotOk { return } // both are errors and they are different if strings.Contains(gotError.Error(), wantError.Error()) { return } t.Fatalf("%v should contain '%v'", gotError.Error(), wantError.Error()) } func TestValueTypeEncoder_Errors(t *testing.T) { compiler := compiled.NewBuilder(StandardVocabulary()) for _, tst := range []struct { input interface{} typeName string builderError error encoderError error bag map[string]interface{} }{ { input: "badAttribute", builderError: errors.New("unknown attribute"), }, { input: "incorrectMessage", typeName: ".mymessage.com", builderError: errors.New("cannot process message of type"), }, { input: "response.size", encoderError: errors.New("lookup failed"), }, { input: "test.bool", encoderError: errors.New("lookup failed"), }, { input: "test.double", encoderError: errors.New("lookup failed"), }, { input: "api.operation", encoderError: errors.New("lookup failed"), }, { input: time.Time{}, builderError: errors.New("unsupported type"), }, { input: "source.ip", encoderError: errors.New("incorrect type for IP_ADDRESS"), bag: map[string]interface{}{ "source.ip": "this should be a byte array", }, }, { input: "request.path", builderError: errors.New("incorrect type for .istio.policy.v1beta1.IPAddress"), bag: map[string]interface{}{ "request.path": "this should be a byte array", }, typeName: ".istio.policy.v1beta1.IPAddress", }, { input: "context.timestamp", encoderError: errors.New("incorrect type for TIMESTAMP"), bag: map[string]interface{}{ "context.timestamp": []byte{1, 2, 4, 8}, }, }, { input: "context.timestamp", encoderError: errors.New("invalid timestamp"), bag: map[string]interface{}{ "context.timestamp": time.Date(20000, 1, 1, 0, 0, 0, 0, time.UTC).UTC(), }, }, { input: "response.duration", encoderError: errors.New("error converting value"), bag: map[string]interface{}{ "response.duration": "invalid", }, }, { input: "request.headers", builderError: errors.New("unsupported type: STRING_MAP"), bag: map[string]interface{}{ "request.headers": map[string]string{ "user": "me", }, }, }, { input: "test.uri", encoderError: errors.New("error converting value"), bag: map[string]interface{}{ "test.uri": int64(5), }, }, { input: "test.dns_name", encoderError: errors.New("error converting value"), bag: map[string]interface{}{ "test.dns_name": int64(5), }, }, { input: "test.email_address", encoderError: errors.New("error converting value"), bag: map[string]interface{}{ "test.email_address": int64(5), }, }, } { t.Run(fmt.Sprintf("%v", tst.input), func(t *testing.T) { vt := valueTypeName fd := &descriptor.FieldDescriptorProto{TypeName: &vt} if tst.typeName != "" { fd.TypeName = &tst.typeName } enc, err := valueTypeEncoderBuilder(nil, fd, tst.input, compiler) checkErrors(t, err, tst.builderError) if enc == nil { return } bag := attribute.GetMutableBagForTesting(tst.bag) var ba []byte _, err = enc.Encode(bag, ba) checkErrors(t, err, tst.encoderError) }) } } func TestValueTypeEncoder(t *testing.T) { compiler := compiled.NewBuilder(StandardVocabulary()) now := time.Now() ts, err := types.TimestampProto(now) if err != nil { t.Fatalf("invalid time: %v", err) } for _, tst := range []struct { input interface{} output v1beta1.Value typeName string bag map[string]interface{} }{ { input: 1, output: v1beta1.Value{Value: &v1beta1.Value_Int64Value{1}}, }, { input: "test.i64", output: v1beta1.Value{Value: &v1beta1.Value_Int64Value{1}}, bag: map[string]interface{}{ "test.i64": int64(1), }, }, { input: "'astring'", output: v1beta1.Value{Value: &v1beta1.Value_StringValue{"astring"}}, }, { input: "api.operation", output: v1beta1.Value{Value: &v1beta1.Value_StringValue{"astring"}}, bag: map[string]interface{}{ "api.operation": "astring", }, }, { input: 3.14, output: v1beta1.Value{Value: &v1beta1.Value_DoubleValue{3.14}}, }, { input: "test.double", output: v1beta1.Value{Value: &v1beta1.Value_DoubleValue{3.14}}, bag: map[string]interface{}{ "test.double": 3.14, }, }, { input: false, output: v1beta1.Value{Value: &v1beta1.Value_BoolValue{false}}, }, { input: "test.bool", output: v1beta1.Value{Value: &v1beta1.Value_BoolValue{false}}, bag: map[string]interface{}{ "test.bool": false, }, }, { input: "response.time", output: v1beta1.Value{Value: &v1beta1.Value_TimestampValue{ TimestampValue: &v1beta1.TimeStamp{ts}}}, bag: map[string]interface{}{ "response.time": now, }, }, { input: "source.ip", output: v1beta1.Value{Value: &v1beta1.Value_IpAddressValue{ IpAddressValue: &v1beta1.IPAddress{Value: []byte{1, 2, 4, 8}}}}, bag: map[string]interface{}{ "source.ip": []byte{1, 2, 4, 8}, }, }, { input: "response.duration", output: v1beta1.Value{&v1beta1.Value_DurationValue{ DurationValue: &v1beta1.Duration{Value: types.DurationProto(time.Minute)}}}, bag: map[string]interface{}{ "response.duration": time.Minute, }, }, { input: "test.uri", output: v1beta1.Value{&v1beta1.Value_UriValue{UriValue: &v1beta1.Uri{Value: "/health"}}}, bag: map[string]interface{}{ "test.uri": "/health", }, }, { input: "test.dns_name", output: v1beta1.Value{&v1beta1.Value_DnsNameValue{DnsNameValue: &v1beta1.DNSName{Value: "a.b.c.d"}}}, bag: map[string]interface{}{ "test.dns_name": "a.b.c.d", }, }, { input: "test.email_address", output: v1beta1.Value{&v1beta1.Value_EmailAddressValue{ EmailAddressValue: &v1beta1.EmailAddress{Value: "[email protected]"}}}, bag: map[string]interface{}{ "test.email_address": "[email protected]", }, }, } { t.Run(fmt.Sprintf("%v", tst.input), func(t *testing.T) { vt := valueTypeName fd := &descriptor.FieldDescriptorProto{TypeName: &vt} if tst.typeName != "" { fd.TypeName = &tst.typeName } enc, err := valueTypeEncoderBuilder(nil, fd, tst.input, compiler) if err != nil { t.Fatalf("unexpected encoder build error: %v", err) } bag := attribute.GetMutableBagForTesting(tst.bag) var ba []byte if ba, err = enc.Encode(bag, ba); err != nil { t.Fatalf("unexpected encoder error: %v", err) } bout, _ := tst.output.Marshal() sz, nbytes := proto.DecodeVarint(ba) if sz != uint64(len(bout)) || !reflect.DeepEqual(ba[nbytes:], bout) { t.Fatalf("encoding differs\n got: %v\nwant: %v", ba, bout) } }) } }
{ "content_hash": "0d1813b46399a4d80ef36c611fb32584", "timestamp": "", "source": "github", "line_count": 298, "max_line_length": 104, "avg_line_length": 24.93959731543624, "alnum_prop": 0.6184068891280947, "repo_name": "geeknoid/istio", "id": "347efc7ad154377aa5a72acf911dffa6bdc25c89", "size": "8024", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "mixer/pkg/protobuf/yaml/dynamic/valueTypeEncoder_test.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "3464" }, { "name": "Go", "bytes": "11069243" }, { "name": "HTML", "bytes": "36270" }, { "name": "JavaScript", "bytes": "1491" }, { "name": "Makefile", "bytes": "81541" }, { "name": "Python", "bytes": "12859" }, { "name": "Ruby", "bytes": "4321" }, { "name": "Shell", "bytes": "298984" }, { "name": "Smarty", "bytes": "23998" } ], "symlink_target": "" }
package com.google.cloud.texttospeech.v1.stub.samples; // [START texttospeech_v1_generated_TextToSpeechStubSettings_ListVoices_sync] import com.google.cloud.texttospeech.v1.stub.TextToSpeechStubSettings; import java.time.Duration; public class SyncListVoices { public static void main(String[] args) throws Exception { syncListVoices(); } public static void syncListVoices() throws Exception { // This snippet has been automatically generated and should be regarded as a code template only. // It will require modifications to work: // - It may require correct/in-range values for request initialization. // - It may require specifying regional endpoints when creating the service client as shown in // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library TextToSpeechStubSettings.Builder textToSpeechSettingsBuilder = TextToSpeechStubSettings.newBuilder(); textToSpeechSettingsBuilder .listVoicesSettings() .setRetrySettings( textToSpeechSettingsBuilder.listVoicesSettings().getRetrySettings().toBuilder() .setTotalTimeout(Duration.ofSeconds(30)) .build()); TextToSpeechStubSettings textToSpeechSettings = textToSpeechSettingsBuilder.build(); } } // [END texttospeech_v1_generated_TextToSpeechStubSettings_ListVoices_sync]
{ "content_hash": "a5ec002e9184c92fda13d157e81f2c93", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 100, "avg_line_length": 42.90625, "alnum_prop": 0.7560087399854334, "repo_name": "googleapis/java-texttospeech", "id": "6c5307ca4355e90e45ce3d71fd5f8105c8842b46", "size": "1968", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "samples/snippets/generated/com/google/cloud/texttospeech/v1/stub/texttospeechstubsettings/listvoices/SyncListVoices.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "801" }, { "name": "Java", "bytes": "1032682" }, { "name": "Python", "bytes": "788" }, { "name": "Shell", "bytes": "22872" } ], "symlink_target": "" }
#region auto-generated FILE INFORMATION // ============================================== // This file is distributed under the MIT License // ============================================== // // Filename: TextSeparator.cs // Version: 2020-01-29 16:17 // // Copyright (c) 2020, Si13n7 Developments(tm) // All rights reserved. // ______________________________________________ #endregion namespace SilDev { /// <summary> /// Provides constant <see cref="char"/> values of separator characters. /// </summary> public static class TextSeparatorChar { /// <summary> /// Boundary Neutral [BN]. /// </summary> public const char BoundaryNeutral = '\u200B'; /// <summary> /// Carriage Return [CR]. /// </summary> public const char CarriageReturn = '\r'; /// <summary> /// Common Number Separator [CS]. /// </summary> public const char CommonNumberSeparator = '\u00a0'; /// <summary> /// Form Feed [FF]. /// </summary> public const char FormFeed = '\f'; /// <summary> /// Horizontal Tab [TAB]. /// </summary> public const char HorizontalTab = '\t'; /// <summary> /// Line Feed [LF]. /// </summary> public const char LineFeed = '\n'; /// <summary> /// Line Separator. /// </summary> public const char LineSeparator = '\u2028'; /// <summary> /// Next Line [NEL]. /// </summary> public const char NextLine = '\u0085'; /// <summary> /// Paragraph Separator [B]. /// </summary> public const char ParagraphSeparator = '\u2029'; /// <summary> /// Space. /// </summary> public const char Space = ' '; /// <summary> /// Vertical Tab [VT]. /// </summary> public const char VerticalTab = '\v'; /// <summary> /// Returns a sequence of all line separator characters. /// </summary> public static readonly char[] AllNewLineChars = { LineFeed, VerticalTab, FormFeed, CarriageReturn, NextLine, BoundaryNeutral, LineSeparator, ParagraphSeparator }; /// <summary> /// Returns a sequence of all whitespace characters. /// </summary> public static readonly char[] AllWhiteSpaceChars = { LineFeed, HorizontalTab, VerticalTab, FormFeed, CarriageReturn, Space, NextLine, CommonNumberSeparator, BoundaryNeutral, LineSeparator, ParagraphSeparator }; } /// <summary> /// Provides constant <see cref="string"/> values of separator characters. /// </summary> public static class TextSeparatorString { /// <summary> /// Boundary Neutral [BN]. /// </summary> public const string BoundaryNeutral = "\u200B"; /// <summary> /// Carriage Return [CR]. /// </summary> public const string CarriageReturn = "\r"; /// <summary> /// Common Number Separator [CS]. /// </summary> public const string CommonNumberSeparator = "\u00a0"; /// <summary> /// Form Feed [FF]. /// </summary> public const string FormFeed = "\f"; /// <summary> /// Horizontal Tab [TAB]. /// </summary> public const string HorizontalTab = "\t"; /// <summary> /// Line Feed [LF]. /// </summary> public const string LineFeed = "\n"; /// <summary> /// Line Separator. /// </summary> public const string LineSeparator = "\u2028"; /// <summary> /// Next Line [NEL]. /// </summary> public const string NextLine = "\u0085"; /// <summary> /// Paragraph Separator [B]. /// </summary> public const string ParagraphSeparator = "\u2029"; /// <summary> /// Space. /// </summary> public const string Space = " "; /// <summary> /// Vertical Tab [VT]. /// </summary> public const string VerticalTab = "\v"; /// <summary> /// Carriage Return [CR] &amp; Line Feed [LF]. /// </summary> public const string WindowsDefault = "\r\n"; /// <summary> /// Returns a sequence of all line separator strings. /// </summary> public static readonly string[] AllNewLineStrings = { LineFeed, VerticalTab, FormFeed, CarriageReturn, NextLine, BoundaryNeutral, LineSeparator, ParagraphSeparator }; /// <summary> /// Returns a sequence of all whitespace strings. /// </summary> public static readonly string[] AllWhiteSpaceStrings = { LineFeed, HorizontalTab, VerticalTab, FormFeed, CarriageReturn, Space, NextLine, CommonNumberSeparator, BoundaryNeutral, LineSeparator, ParagraphSeparator }; } }
{ "content_hash": "8cdb29ea6cc777cf49451520f212152d", "timestamp": "", "source": "github", "line_count": 210, "max_line_length": 82, "avg_line_length": 26.485714285714284, "alnum_prop": 0.4703344120819849, "repo_name": "Si13n7/SilDev.CSharpLib", "id": "39f100866eb509b865060f67f6f4a5cb872e7f28", "size": "5564", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/SilDev/TextSeparator.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1693108" }, { "name": "Smalltalk", "bytes": "46494" } ], "symlink_target": "" }
// Copyright(c) 2011 John Clayton - http://codemonkeylabs.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace SlideShowPro.Director { public class User { public int ContentCount { get; set; } public User Creator { get; set; } public string DisplayName { get; set; } public string First { get; set; } public long Id { get; set; } public string Last { get; set; } public int Photos { get; set; } public string Profile { get; set; } public User Public { get; set; } public string Username { get; set; } public User Updater { get; set; } } }
{ "content_hash": "36f62c21ac0e3d9317a20daf3c5d85e1", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 75, "avg_line_length": 28.48780487804878, "alnum_prop": 0.6549657534246576, "repo_name": "jsclayton/slideshowpro-director", "id": "8d91d67fa1416424f21dfc66702d93d804fd6cc8", "size": "1170", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "source/SlideShowPro.Director/User.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "27156" } ], "symlink_target": "" }
// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // clang-format off import {sendWithPromise} from 'chrome://resources/js/cr.m.js'; // clang-format on export interface StartupPageInfo { modelIndex: number; title: string; tooltip: string; url: string; } export interface StartupUrlsPageBrowserProxy { loadStartupPages(): void; useCurrentPages(): void; /** @return Whether the URL is valid. */ validateStartupPage(url: string): Promise<boolean>; /** * @return Whether the URL was actually added, or ignored because it was * invalid. */ addStartupPage(url: string): Promise<boolean>; /** * @return Whether the URL was actually edited, or ignored because it was * invalid. */ editStartupPage(modelIndex: number, url: string): Promise<boolean>; removeStartupPage(index: number): void; } export class StartupUrlsPageBrowserProxyImpl implements StartupUrlsPageBrowserProxy { loadStartupPages() { chrome.send('onStartupPrefsPageLoad'); } useCurrentPages() { chrome.send('setStartupPagesToCurrentPages'); } validateStartupPage(url: string) { return sendWithPromise('validateStartupPage', url); } addStartupPage(url: string) { return sendWithPromise('addStartupPage', url); } editStartupPage(modelIndex: number, url: string) { return sendWithPromise('editStartupPage', modelIndex, url); } removeStartupPage(index: number) { chrome.send('removeStartupPage', [index]); } static getInstance(): StartupUrlsPageBrowserProxy { return instance || (instance = new StartupUrlsPageBrowserProxyImpl()); } static setInstance(obj: StartupUrlsPageBrowserProxy) { instance = obj; } } let instance: StartupUrlsPageBrowserProxy|null = null;
{ "content_hash": "8053b4273a96224c43c92e9277b71d30", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 75, "avg_line_length": 25.26027397260274, "alnum_prop": 0.7190889370932755, "repo_name": "nwjs/chromium.src", "id": "505b5976efea5e61bc66351c9329554ff41f68e2", "size": "1844", "binary": false, "copies": "1", "ref": "refs/heads/nw70", "path": "chrome/browser/resources/settings/on_startup_page/startup_urls_page_browser_proxy.ts", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?php # vim:sw=4:ts=4:et:nowrap /* SEARCHENGINE class 2004-05-26 [email protected] Example usage: include_once INCLUDESPATH."easyparliament/searchengine.php"; $searchengine = new SEARCHENGINE($searchstring); $description = $searchengine->query_description(); $short_description = $searchengine->query_description_short(); $count = $searchengine->run_count(); // $first_result begins at 0 $searchengine->run_search($first_result, $results_per_page); $gids = $searchengine->get_gids(); $relevances = $searchengine->get_relevances(); $bestpos = $searchengine->position_of_first_word($body); $extract = $searchengine->highlight($extract); */ include_once INCLUDESPATH . 'dbtypes.php'; if (defined('XAPIANDB') && XAPIANDB) { if (file_exists('/usr/local/share/php5/xapian.php')) include_once '/usr/local/share/php5/xapian.php'; if (file_exists('/usr/local/share/xapian-bindings/php5/xapian.php')) include_once '/usr/local/share/xapian-bindings/php5/xapian.php'; } global $xapiandb; class SEARCHENGINE { function SEARCHENGINE ($query) { if (!defined('XAPIANDB') || !XAPIANDB) return null; $this->query = $query; $this->stemmer = new XapianStem('english'); $this->enquire = null; // Any characters other than this are treated as, basically, white space // (apart from quotes and minuses, special case below) // The colon is in here for prefixes speaker:10043 and so on. $this->wordchars = "A-Za-z0-9:"; // An array of normal words. $this->words = array(); // All quoted phrases, as an (array of (arrays of words in each phrase)). $this->phrases = array(); // Items prefixed with a colon (speaker:10024) as an (array of (name, value)) $this->prefixed = array(); // Words you don't want $this->excluded = array(); // Stemmed words // doesn't work yet // $this->rough = array(); // Split words up into individual words, and quoted phrases preg_match_all('/(' . '"|' . # match either a quote, or... '(?:(?<![' .$this->wordchars. '])-)?' . # optionally a - (exclude) # if at start of word (i.e. not preceded by a word character, in # which case it is probably a hyphenated-word) '['.$this->wordchars.']+' . # followed by a string of word-characters ')/', $query, $all_words); if ($all_words) { $all_words = $all_words[0]; } else { $all_words = array(); } $in_quote = false; foreach ($all_words as $word) { if ($word == '"') { $in_quote = !$in_quote; if ($in_quote) { array_push($this->phrases, array()); } continue; } if ($word == '') { continue; } if (strpos($word, ':') !== false) { $items = split(":", strtolower($word)); $type = $items[0]; $value = join(":", array_slice($items,1)); if ($type == "section") { if ($value == "debates" || $value == "debate") $value = 1; elseif ($value == 'whall' || $value == 'westminster' || $value == 'westminhall') $value = 2; elseif ($value == "wrans" || $value == "wran") $value = 3; elseif ($value == 'wms' || $value == 'statements' || $value == 'statement') $value = 4; elseif ($value == 'lordsdebates' || $value == 'lords') $value = 101; elseif ($value == 'ni') $value = 5; elseif ($value == 'pbc' || $value == 'standing') $value = 6; $type = "major"; } if ($type == "groupby") { if ($value == "date" || $value == "day") $value = "day"; if ($value == "debates" || $value == "debate" || $value == "department" || $value == "departments" || $value == "dept") $value = "debate"; if ($value == "speech" || $value == "speeches") $value = "speech"; } array_push($this->prefixed, array($type, $value)); } elseif (strpos($word, '-') !== false) { array_push($this->excluded, str_replace("-", "", strtolower($word))); } /*else if (strpos($word, '~') !== false) { array_push($this->rough, str_replace("~", "", strtolower($word))); } */ elseif ($in_quote) { array_push($this->phrases[count($this->phrases) - 1], strtolower($word)); } else { array_push($this->words, strtolower($word)); } } twfy_debug("SEARCH", "words: " . var_export($this->words, true)); twfy_debug("SEARCH", "phrases: " . var_export($this->phrases, true)); twfy_debug("SEARCH", "prefixed: " . var_export($this->prefixed, true)); twfy_debug("SEARCH", "excluded: " . var_export($this->excluded, true)); // twfy_debug("SEARCH", "rough: " . var_export($this->rough, true)); } function make_phrase($phrasearray) { return '"' . join(' ', $phrasearray) . '"'; } function query_description_internal($long) { global $PAGE, $hansardmajors; if (!defined('XAPIANDB') || !XAPIANDB) return ''; $description = ""; if (count($this->words) > 0) { if ($long and $description == "") { $description .= " containing"; } $description .= " the ". make_plural("word", count($this->words)); $description .= " '"; if (count($this->words) > 2) { $description .= join("', '", array_slice($this->words, 0, -2)); $description .= "', '"; $description .= $this->words[count($this->words)-2] . "', and '" . $this->words[count($this->words)-1]; } elseif (count($this->words) == 2) { $description .= $this->words[0] . "' and '" . $this->words[1]; } else { $description .= $this->words[0]; } $description .= "'"; } if (count($this->phrases) > 0) { if ($description == "") { if ($long) { $description .= " containing"; } } else { $description .= " and"; } $description .= " the ". make_plural("phrase", count($this->phrases)) . " "; $description .= join(', ', array_map(array($this, "make_phrase"), $this->phrases)); } if (count($this->excluded) > 0) { if (count($this->words) > 0 or count($this->phrases) > 0) { $description .= " but not"; } else { $description .= " excluding"; } $description .= " the ". make_plural("word", count($this->excluded)); $description .= " '" . join(' ', $this->excluded) . "'"; } /* if (count($this->rough) > 0) { if ($description == "") { if ($long) { $description .= " containing "; } } $description .= " roughly words '" . join(' ', $this->rough) . "'"; } */ $major = array(); $speaker = array(); foreach( $this->prefixed as $items ) { if ($items[0] == 'speaker') { $member = new MEMBER(array('person_id' => $items[1])); $name = $member->full_name(); $speaker[] = $name; } elseif ($items[0] == 'major') { if (isset($hansardmajors[$items[1]]['title'])) { $major[] = $hansardmajors[$items[1]]['title']; } else { $PAGE->error_message("Unknown major section '$items[1]' ignored"); } } elseif ($items[0] == 'groupby') { if ($items[1] == 'day') { $description .= ' grouped by day'; } elseif ($items[1] == 'debate') { $description .= ' grouped by debate/department'; } elseif ($items[1] == 'speech') { $description .= ' showing all speeches'; } else { $PAGE->error_message("Unknown group by '$items[1]' ignored"); } } elseif ($items[0] == "bias") { list($weight, $halflife) = explode(":", $items[1]); $description .= " bias by $weight halflife $halflife seconds"; } elseif ($items[0] == 'date') { $description .= ' spoken on ' . $items[1]; } elseif ($items[0] == 'batch') { # silently ignore, as description goes in email alerts #$description .= ' in search batch ' . $items[1]; } else { $PAGE->error_message("Unknown search prefix '$items[0]' ignored"); } } if (sizeof($speaker)) $description .= ' by ' . join(' or ', $speaker); if (sizeof($major)) $description .= ' in ' . join(' or ', $major); return trim($description); } // Return textual description of search function query_description_short() { return $this->query_description_internal(false); } // Return textual description of search function query_description_long() { return $this->query_description_internal(true); } // Return stem of a word function stem($word) { return $this->stemmer->stem_word(strtolower($word)); } // Internal use mainly - you probably want query_description. Converts // parsed form of query that PHP knows into a full textual form again (for // feeding to Xapian's queryparser). function query_remade() { $remade = array(); foreach( $this->phrases as $phrase ) { $remade[] = '"' . join(' ', $phrase) . '"'; } if ($this->words) { $remade = array_merge($remade, $this->words); } $prefixes = array(); foreach( $this->prefixed as $items ) { if (!isset($prefixes[$items[0]])) $prefixes[$items[0]] = array(); if ($items[0] != 'groupby' && $items[0] != 'bias') { $prefixes[$items[0]][] = $items[0] . ':' . $items[1]; } } foreach ($prefixes as $prefix) { if (count($prefix)) $remade[] = '(' . join(' OR ', $prefix) . ')'; } $query = trim(join(' AND ', $remade)); if ($this->excluded) { $query .= ' NOT (' . join(' AND ', $this->excluded) . ')'; } // $remade .= ' ' . join(' ', array_map(array($this, "stem"), $this->rough)); return $query; } // Perform partial query to get a count of number of matches function run_count () { if (!defined('XAPIANDB') || !XAPIANDB) return null; $start = getmicrotime(); global $xapiandb; if (!$xapiandb) { $xapiandb = new XapianDatabase(XAPIANDB); } if (!$this->enquire) { $this->enquire = new XapianEnquire($xapiandb); } $queryparser = new XapianQueryParser(); $queryparser->set_stemming_strategy(QueryParser_STEM_NONE); $queryparser->set_default_op(Query_OP_AND); $queryparser->add_prefix("speaker", "speaker:"); $queryparser->add_prefix("major", "major:"); $queryparser->add_prefix('date', 'date:'); $queryparser->add_prefix('batch', 'batch:'); twfy_debug("SEARCH", "query remade -- ". $this->query_remade()); // We rebuild (with query_remade) our query and feed that text string to // the query parser. This is because the error handling in the query parser // is a bit knackered, and we want to be sure our highlighting etc. exactly // matches. XXX don't need to do this for more recent Xapians $query = $queryparser->parse_query($this->query_remade()); twfy_debug("SEARCH", "queryparser description -- " . $query->get_description()); $this->enquire->set_query($query); // Set collapsing and sorting global $PAGE; $collapsed = false; foreach( $this->prefixed as $items ) { if ($items[0] == 'groupby') { $collapsed = true; if ($items[1] == 'day') $this->enquire->set_collapse_key(2); else if ($items[1] == 'debate') $this->enquire->set_collapse_key(3); else if ($items[1] == 'speech') ; // no collapse key else $PAGE->error_message("Unknown group by '$items[1]' ignored"); } elseif ($items[0] == 'bias') { list($weight, $halflife) = explode(":", $items[1]); $this->enquire->set_bias($weight, intval($halflife)); } elseif ($items[0] == 'speaker') { # Don't do any collapsing if we're searching for a person's speeches $collapsed = true; } } // default to grouping by subdebate, i.e. by page if (!$collapsed) $this->enquire->set_collapse_key(7); $matches = $this->enquire->get_mset(0, 500); // Take either: 1) the estimate which is sometimes too large or 2) the // size which is sometimes too low (it is limited to the 500 in the line // above). We get the exact mset we need later, according to which page // we are on. if ($matches->size() < 500) { $count = $matches->size(); } else { $count = $matches->get_matches_estimated(); } $duration = getmicrotime() - $start; twfy_debug ("SEARCH", "Search count took $duration seconds."); return $count; } // Perform the full search... function run_search ($first_result, $results_per_page, $sort_order='relevance') { $start = getmicrotime(); // NOTE: this is to do sort by date switch ($sort_order) { case 'date': $this->enquire->set_sort_by_value_then_relevance(0, true); break; case 'created': $this->enquire->set_sort_by_value_then_relevance(6, true); default: //do nothing, default ordering is by relevance break; } $matches = $this->enquire->get_mset($first_result, $results_per_page); $this->gids = array(); $this->created = array(); $this->relevances = array(); $iter = $matches->begin(); $end = $matches->end(); while (!$iter->equals($end)) { $relevancy = $iter->get_percent(); $weight = $iter->get_weight(); $doc = $iter->get_document(); $gid = $doc->get_data(); if ($sort_order=='created') { array_push($this->created, $doc->get_value(6)); } twfy_debug("SEARCH", "gid: $gid relevancy: $relevancy% weight: $weight"); array_push($this->gids, "uk.org.publicwhip/".$gid); array_push($this->relevances, $relevancy); $iter->next(); } $duration = getmicrotime() - $start; twfy_debug ("SEARCH", "Run search took $duration seconds."); } // ... use these to get the results function get_gids() { return $this->gids; } function get_relevances() { return $this->relevances; } function get_createds() { return $this->created; } // Puts HTML highlighting round all the matching words in the text function highlight($body) { // Contents will be used in preg_replace() to highlight the search terms. $findwords = array(); $replacewords = array(); foreach ($this->words as $word) { if (ctype_digit($word)) { array_push($findwords, "/\b($word|" . number_format($word) . ")\b/"); } else { array_push($findwords, "/\b($word)\b/i"); } array_push($replacewords, "<span class=\"hi\">\\1</span>"); //array_push($findwords, "/([^>\.\'])\b(" . $word . ")\b([^<\'])/i"); //array_push($replacewords, "\\1<span class=\"hi\">\\2</span>\\3"); } foreach( $this->phrases as $phrase ) { $phrasematch = join($phrase, '[^'.$this->wordchars.']+'); array_push($findwords, "/\b($phrasematch)\b/i"); $replacewords[] = "<span class=\"hi\">\\1</span>"; } // Highlight search words. $hlbody = preg_replace($findwords, $replacewords, $body); // Remove any highlighting within HTML. $hlbody = preg_replace('#<(a|phrase)\s([^>]*?)<span class="hi">(.*?)</span>([^>]*?)">#', "<\\1 \\2\\3\\4\">", $hlbody); $hlbody = preg_replace('#<(/?)<span class="hi">a</span>([^>]*?)>#', "<\\1a\\2>", $hlbody); # XXX Horrible hack // Collapse duplicates $hlbody = preg_replace("#</span>(\W+)<span class=\"hi\">#", "\\1", $hlbody); return $hlbody; } // Find the position of the first of the search words/phrases in $body. function position_of_first_word($body) { $lcbody = ' ' . strtolower($body) . ' '; // spaces to make regexp mapping easier $pos = -1; // look for phrases foreach( $this->phrases as $phrase ) { $phrasematch = join($phrase, '[^'.$this->wordchars.']+'); if (preg_match('/([^'.$this->wordchars.']' . $phrasematch . '[^'.$this->wordchars. '])/', $lcbody, $matches)) { $wordpos = strpos( $lcbody, $matches[0] ); if ($wordpos) { if ( ($wordpos < $pos) || ($pos==-1) ) { $pos = $wordpos; } } } } // only look for earlier words if phrases weren't found if ($pos == -1) { foreach( $this->words as $word ) { if (ctype_digit($word)) $word = '(?:'.$word.'|'.number_format($word).')'; if (preg_match('/([^'.$this->wordchars.']' . $word . '[^'.$this->wordchars. '])/', $lcbody, $matches)) { $wordpos = strpos( $lcbody, $matches[0] ); if ($wordpos) { if ( ($wordpos < $pos) || ($pos==-1) ) { $pos = $wordpos; } } } } } if ($pos == -1) { $pos = 0; } return $pos; } /* old stemming code (does syntax highlighting with stemming, but does it badly) $splitextract = preg_split("/([a-zA-Z]+)/", $extract, -1, PREG_SPLIT_DELIM_CAPTURE); $hlextract = ""; foreach( $splitextract as $extractword) { $hl = false; foreach( $searchstring_stemwords as $word ) { if ($word == '') { continue; } $matchword = $searchengine->stem($extractword); #print "$extractword : $matchword : $word<br>"; if ($matchword == $word) { $hl = true; break; } } if ($hl) $hlextract .= "<span class=\"hi\">$extractword</span>"; else $hlextract .= $extractword; } $hlextract = preg_replace("#</span>\s+<span class=\"hi\">#", " ", $hlextract); */ /* This doesn't work yet as PHP bindings are knackered - the idea is to do all parsing here and replace queryparser, so we can do stuff how we want more. e.g. sync highlighting with the queries better */ // Instead we are now parsing in PHP, and rebuilding something to feed to // query parser. Yucky but works. /* $querydummy = new XapianQuery("dummy"); $query1 = new XapianQuery("ethiopia"); $query2 = new XapianQuery("economic"); #$query = $querydummy->querycombine(Query_OP_AND, $query1, $query2); $query = new_QueryCombine(Query_OP_AND, $query1, $query2); #new_QueryCombine # $query = $query1->querycombine(Query_OP_OR, $query1, $query2); # foreach ($this->words as $word) { # $query = new XapianQuery(Query_OP_OR, $query, new XapianQuery($word)); # } print "description:" . $query->get_description() . "<br>"; */ } global $SEARCHENGINE; $SEARCHENGINE = null; function search_by_usage($search, $house = 0) { $data = array(); $SEARCHENGINE = new SEARCHENGINE($search); $data['pagetitle'] = $SEARCHENGINE->query_description_short(); $SEARCHENGINE = new SEARCHENGINE($search . ' groupby:speech'); $count = $SEARCHENGINE->run_count(); if ($count <= 0) { $data['error'] = 'No results'; return $data; } $SEARCHENGINE->run_search(0, 10000, 'date'); $gids = $SEARCHENGINE->get_gids(); if (count($gids) <= 0) { $data['error'] = 'No results'; return $data; } if (count($gids) == 10000) $data['limit_reached'] = true; # Fetch all the speakers of the results, count them up and get min/max date usage $speaker_count = array(); $gids = join('","', $gids); $db = new ParlDB; $q = $db->query('SELECT gid,speaker_id,hdate FROM hansard WHERE gid IN ("' . $gids . '")'); for ($n=0; $n<$q->rows(); $n++) { $gid = $q->field($n, 'gid'); $speaker_id = $q->field($n, 'speaker_id'); # This is member ID $hdate = $q->field($n, 'hdate'); if (!isset($speaker_count[$speaker_id])) { $speaker_count[$speaker_id] = 0; $maxdate[$speaker_id] = '1001-01-01'; $mindate[$speaker_id] = '9999-12-31'; } $speaker_count[$speaker_id]++; if ($hdate < $mindate[$speaker_id]) $mindate[$speaker_id] = $hdate; if ($hdate > $maxdate[$speaker_id]) $maxdate[$speaker_id] = $hdate; } # Fetch details of all the speakers if (count($speaker_count)) { $speaker_ids = join(',', array_keys($speaker_count)); $q = $db->query('SELECT member_id, person_id, title,first_name,last_name,constituency,house,party, moffice_id, dept, position, from_date, to_date, left_house FROM member LEFT JOIN moffice ON member.person_id = moffice.person WHERE member_id IN (' . $speaker_ids . ') ' . ($house ? " AND house=$house" : '') . ' ORDER BY left_house DESC'); for ($n=0; $n<$q->rows(); $n++) { $mid = $q->field($n, 'member_id'); if (!isset($pids[$mid])) { $title = $q->field($n, 'title'); $first = $q->field($n, 'first_name'); $last = $q->field($n, 'last_name'); $cons = $q->field($n, 'constituency'); $house = $q->field($n, 'house'); $party = $q->field($n, 'party'); $full_name = ucfirst(member_full_name($house, $title, $first, $last, $cons)); $pid = $q->field($n, 'person_id'); $pids[$mid] = $pid; $speakers[$pid]['house'] = $house; $speakers[$pid]['left'] = $q->field($n, 'left_house'); } $dept = $q->field($n, 'dept'); $posn = $q->field($n, 'position'); $moffice_id = $q->field($n, 'moffice_id'); if ($dept && $q->field($n, 'to_date') == '9999-12-31') $speakers[$pid]['office'][$moffice_id] = prettify_office($posn, $dept); if (!isset($speakers[$pid]['name'])) { $speakers[$pid]['name'] = $full_name . ($house==1?' MP':''); $speakers[$pid]['party'] = $party; } } } $pids[0] = 0; $speakers[0] = array('party'=>'', 'name'=>'Headings, procedural text, etc.', 'house'=>0, 'count'=>0); $party_count = array(); $ok = 0; foreach ($speaker_count as $speaker_id => $count) { if (!isset($pids[$speaker_id])) continue; $pid = $pids[$speaker_id]; if (!isset($speakers[$pid]['pmindate'])) { $speakers[$pid]['count'] = 0; $speakers[$pid]['pmaxdate'] = '1001-01-01'; $speakers[$pid]['pmindate'] = '9999-12-31'; $ok = 1; } if (!isset($party_count[$speakers[$pid]['party']])) $party_count[$speakers[$pid]['party']] = 0; $speakers[$pid]['count'] += $count; $party_count[$speakers[$pid]['party']] += $count; if ($mindate[$speaker_id] < $speakers[$pid]['pmindate']) $speakers[$pid]['pmindate'] = $mindate[$speaker_id]; if ($maxdate[$speaker_id] > $speakers[$pid]['pmaxdate']) $speakers[$pid]['pmaxdate'] = $maxdate[$speaker_id]; } function sort_by_count($a, $b) { if ($a['count'] > $b['count']) return -1; if ($a['count'] < $b['count']) return 1; return 0; } if ($speakers[0]['count']==0) unset($speakers[0]); uasort($speakers, 'sort_by_count'); arsort($party_count); if (!$ok) { $data['error'] = 'No results'; return $data; } $data['party_count'] = $party_count; $data['speakers'] = $speakers; return $data; }
{ "content_hash": "4c4c9f7d8ad174d912e1894bfa87f902", "timestamp": "", "source": "github", "line_count": 635, "max_line_length": 158, "avg_line_length": 40.77952755905512, "alnum_prop": 0.4847653987256227, "repo_name": "NathanaelB/twfy", "id": "221c00d3a904592279fed9395036f68fa42400d1", "size": "25895", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "www/includes/easyparliament/searchengine.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "3470" }, { "name": "CSS", "bytes": "23351" }, { "name": "PHP", "bytes": "974840" }, { "name": "Perl", "bytes": "171410" }, { "name": "Python", "bytes": "12569" }, { "name": "Shell", "bytes": "8531" } ], "symlink_target": "" }
export ANDROID_COMPONENTS=build-tools-25.0.1,android-24,extra-android-support,extra-google-google_play_services,extra-google-m2repository,extra-android-m2repository export ANDROID_LICENSES=android-sdk-license-c81a61d9 export XAMARIN_ANDROID_VERSION=7.3.1-2 export XAMARIN_IOS_VERSION=10.10.0.36 export MONO_ARCHIVE_VERSION=5.0.1 export MONO_FRAMEWORK_VERSION=5.0.1.1 # # Download and install Mono and Xamarin # echo Download and install Mono wget -nc -P downloads "https://download.mono-project.com/archive/${MONO_ARCHIVE_VERSION}/macos-10-universal/MonoFramework-MDK-${MONO_FRAMEWORK_VERSION}.macos10.xamarin.universal.pkg" sudo installer -pkg "downloads/MonoFramework-MDK-${MONO_FRAMEWORK_VERSION}.macos10.xamarin.universal.pkg" -target / echo Download and install Xamarin.iOS wget -nc -P downloads "https://dl.xamarin.com/MonoTouch/Mac/xamarin.ios-${XAMARIN_IOS_VERSION}.pkg" sudo installer -pkg "downloads/xamarin.ios-${XAMARIN_IOS_VERSION}.pkg" -target / #echo Download and install Xamarin.Android #curl -L https://raw.github.com/embarkmobile/android-sdk-installer/version-2/android-sdk-installer | bash /dev/stdin --install=$ANDROID_COMPONENTS --accept=$ANDROID_LICENSES #source ~/.android-sdk-installer/env #wget -nc -P downloads "https://dl.xamarin.com/MonoforAndroid/Mac/xamarin.android-${XAMARIN_ANDROID_VERSION}.pkg" #sudo installer -pkg "downloads/xamarin.android-${XAMARIN_ANDROID_VERSION}.pkg" -target /
{ "content_hash": "ec1e5612343c0879d2617e1ef1995cde", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 182, "avg_line_length": 64.54545454545455, "alnum_prop": 0.7943661971830986, "repo_name": "DreamTeamMobile/DT.Xamarin.DGActivityIndicators", "id": "0dd67efa33db92835f6616cb13ac7a18278e7c6b", "size": "1433", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build-scripts/travis-before-build.sh", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "366" }, { "name": "C#", "bytes": "13095" }, { "name": "Shell", "bytes": "2279" } ], "symlink_target": "" }
package com.xantoria.flippy.utils import org.scalatest.Suites class UtilsSuite extends Suites( new NetSpec )
{ "content_hash": "fd5d916b949cc2223c869aab37dfe87e", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 33, "avg_line_length": 16.142857142857142, "alnum_prop": 0.8053097345132744, "repo_name": "giftig/flippy", "id": "da44e72b4055b22afadfe72308a761510bf21bd0", "size": "113", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/test/scala/com/xantoria/flippy/utils/UtilsSuite.scala", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5300" }, { "name": "HTML", "bytes": "1245" }, { "name": "JavaScript", "bytes": "35706" }, { "name": "Scala", "bytes": "88829" }, { "name": "Shell", "bytes": "502" } ], "symlink_target": "" }
#ifndef LP_BLD_INIT_H #define LP_BLD_INIT_H #include "pipe/p_compiler.h" #include "util/u_pointer.h" // for func_pointer #include "lp_bld.h" #include <llvm-c/ExecutionEngine.h> struct gallivm_state { LLVMModuleRef module; LLVMExecutionEngineRef engine; LLVMTargetDataRef target; LLVMPassManagerRef passmgr; LLVMContextRef context; LLVMBuilderRef builder; struct lp_generated_code *code; unsigned compiled; }; void lp_build_init(void); struct gallivm_state * gallivm_create(const char *name); void gallivm_destroy(struct gallivm_state *gallivm); void gallivm_free_ir(struct gallivm_state *gallivm); void gallivm_verify_function(struct gallivm_state *gallivm, LLVMValueRef func); void gallivm_compile_module(struct gallivm_state *gallivm); func_pointer gallivm_jit_function(struct gallivm_state *gallivm, LLVMValueRef func); void lp_set_load_alignment(LLVMValueRef Inst, unsigned Align); void lp_set_store_alignment(LLVMValueRef Inst, unsigned Align); #endif /* !LP_BLD_INIT_H */
{ "content_hash": "eefcc5e1c18396be33b91fd2decf0cb3", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 54, "avg_line_length": 18.610169491525422, "alnum_prop": 0.7058287795992714, "repo_name": "execunix/vinos", "id": "2e32cf8b077f4eca49b34a84880a7cbdcc7d4366", "size": "2420", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xsrc/external/mit/MesaLib/dist/src/gallium/auxiliary/gallivm/lp_bld_init.h", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package simple.view.implementation; import javafx.scene.Node; import javafx.scene.control.Button; import simple.view.IViewManager; import simple.view.SimpleAbstractMenu; /** * * Organizes and manages the game window, including starting/pausing and displaying menu. * */ public class SimpleGameMenu extends SimpleAbstractMenu{ public SimpleGameMenu(IViewManager manager, double width, double height, String title) { super(manager, width, height, title); generateOptions(createNewGameButton(),createLoadGameButton(), createHelpButton()); } private Node createNewGameButton() { Button newGame = createButton("New Game"); newGame.setOnAction(e -> launchGame()); return newGame; } private Node createHelpButton() { Button newGame = createButton("Help"); //set action to load game return newGame; } private Node createLoadGameButton() { Button newGame = createButton("Load Game"); //set action to launch help file return newGame; } }
{ "content_hash": "3317b525795889201d2265bb509dee1b", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 90, "avg_line_length": 24.923076923076923, "alnum_prop": 0.7530864197530864, "repo_name": "nbv3/voogasalad_CS308", "id": "f3389e3c15cb255de0f243f8a9beac3df67840fe", "size": "972", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "working/simple/view/implementation/SimpleGameMenu.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6508" }, { "name": "Java", "bytes": "539042" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_26) on Mon Oct 07 06:41:30 UTC 2013 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> Uses of Class org.apache.hadoop.util.bloom.DynamicBloomFilter (Apache Hadoop Main 2.2.0 API) </TITLE> <META NAME="date" CONTENT="2013-10-07"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.hadoop.util.bloom.DynamicBloomFilter (Apache Hadoop Main 2.2.0 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/util/bloom/DynamicBloomFilter.html" title="class in org.apache.hadoop.util.bloom"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/hadoop/util/bloom//class-useDynamicBloomFilter.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="DynamicBloomFilter.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>org.apache.hadoop.util.bloom.DynamicBloomFilter</B></H2> </CENTER> No usage of org.apache.hadoop.util.bloom.DynamicBloomFilter <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/util/bloom/DynamicBloomFilter.html" title="class in org.apache.hadoop.util.bloom"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/hadoop/util/bloom//class-useDynamicBloomFilter.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="DynamicBloomFilter.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2013 <a href="http://www.apache.org">Apache Software Foundation</a>. All Rights Reserved. </BODY> </HTML>
{ "content_hash": "f4827f79aa745b006cf3d0b54d562772", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 235, "avg_line_length": 43.317241379310346, "alnum_prop": 0.6177360292946983, "repo_name": "ashish-17/playback", "id": "fcfbe3e1b43a5c0516958ce654c174d6ab439683", "size": "6281", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "hadoop-2.2.0/share/doc/hadoop/api/org/apache/hadoop/util/bloom/class-use/DynamicBloomFilter.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "85328" }, { "name": "C", "bytes": "24796" }, { "name": "C++", "bytes": "16604" }, { "name": "CSS", "bytes": "394460" }, { "name": "Groff", "bytes": "25822" }, { "name": "HTML", "bytes": "58267561" }, { "name": "Java", "bytes": "78980" }, { "name": "JavaScript", "bytes": "13300" }, { "name": "Python", "bytes": "1038" }, { "name": "Shell", "bytes": "135543" }, { "name": "TeX", "bytes": "27734" }, { "name": "XSLT", "bytes": "20437" } ], "symlink_target": "" }
[Cowboy original example](https://github.com/ninenines/cowboy/tree/master/examples/cookie) ## Build `$ rebar3 compile` ## Run `rebar3 shell` ## Build and run `rebar3 compile && rebar3 shell` Point your browser to http://localhost:1234 ## How to work with the demo 1. Build and run. 2. Point your browser to http://localhost:1234. 3. Inspect a responce by Browser tools. ## How to switch Cowboy/Comachine working flow Note that you get two working flow (Cowboy and Comachine). By default flow selection is random. For instance, to use Cowboy way only please edit `main.erl` file content. Find out `cowboy_options(Dispatch)` function and set `TypeOfCallingCowmachine` value to `1` into it. ## How stop the application Type in `q().` command and press `Enter`.
{ "content_hash": "fcc10bdd1f0f9e7b38a46bdb6bdad228", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 100, "avg_line_length": 24.25, "alnum_prop": 0.7371134020618557, "repo_name": "zotonic/cowmachine", "id": "18fec26ea04671216fab6a4f0871fbbb7f1f0816", "size": "795", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/cookie/README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Erlang", "bytes": "169008" }, { "name": "Makefile", "bytes": "842" }, { "name": "Shell", "bytes": "279" } ], "symlink_target": "" }
use std::{ borrow::Cow, cmp::min, io::{self}, }; use saturating::Saturating as S; use crate::{ binlog::{ consts::{BinlogVersion, EventType, IncidentType, UnknownIncidentType}, BinlogCtx, BinlogEvent, BinlogStruct, }, io::ParseBuf, misc::raw::{bytes::U8Bytes, int::*, RawBytes, RawConst}, proto::{MyDeserialize, MySerialize}, }; use super::BinlogEventHeader; /// Used to log an out of the ordinary event that occurred on the master. /// /// It notifies the slave that something happened on the master that might cause data /// to be in an inconsistent state. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct IncidentEvent<'a> { incident_type: RawConst<LeU16, IncidentType>, message: RawBytes<'a, U8Bytes>, } impl<'a> IncidentEvent<'a> { pub fn new(incident_type: IncidentType, message: impl Into<Cow<'a, [u8]>>) -> Self { Self { incident_type: RawConst::new(incident_type as u16), message: RawBytes::new(message), } } /// Returns the `incident_type` value, if it's valid. pub fn incident_type(&self) -> Result<IncidentType, UnknownIncidentType> { self.incident_type.get() } /// Returns the raw `message` value. pub fn message_raw(&'a self) -> &'a [u8] { self.message.as_bytes() } /// Returns `message` value as a string (lossy converted). pub fn message(&'a self) -> Cow<'a, str> { self.message.as_str() } /// Sets the `incident_type` value. pub fn with_incident_type(mut self, incident_type: IncidentType) -> Self { self.incident_type = RawConst::new(incident_type as u16); self } /// Sets the `message` value. pub fn with_message(mut self, message: impl Into<Cow<'a, [u8]>>) -> Self { self.message = RawBytes::new(message); self } pub fn into_owned(self) -> IncidentEvent<'static> { IncidentEvent { incident_type: self.incident_type, message: self.message.into_owned(), } } } impl<'de> MyDeserialize<'de> for IncidentEvent<'de> { const SIZE: Option<usize> = None; type Ctx = BinlogCtx<'de>; fn deserialize(_ctx: Self::Ctx, buf: &mut ParseBuf<'de>) -> io::Result<Self> { Ok(Self { incident_type: buf.parse(())?, message: buf.parse(())?, }) } } impl MySerialize for IncidentEvent<'_> { fn serialize(&self, buf: &mut Vec<u8>) { self.incident_type.serialize(&mut *buf); self.message.serialize(&mut *buf); } } impl<'a> BinlogEvent<'a> for IncidentEvent<'a> { const EVENT_TYPE: EventType = EventType::INCIDENT_EVENT; } impl<'a> BinlogStruct<'a> for IncidentEvent<'a> { fn len(&self, _version: BinlogVersion) -> usize { let mut len = S(0); len += S(2); len += S(1); len += S(min(self.message.0.len(), u8::MAX as usize)); min(len.0, u32::MAX as usize - BinlogEventHeader::LEN) } }
{ "content_hash": "d8189756afa6aacc4e04cff1af289955", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 88, "avg_line_length": 28.02803738317757, "alnum_prop": 0.5975325108369457, "repo_name": "blackbeam/rust_mysql_common", "id": "5bb86eccd153e1e73d898181e4d54b6070566a9d", "size": "3377", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/binlog/events/incident_event.rs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "1467" }, { "name": "Rust", "bytes": "809933" } ], "symlink_target": "" }
module GoToSymbolFunction_Pattern_CaretOnVariable where test :: Int test = let (seven, eight) = (7,8) in s<caret>even + 1
{ "content_hash": "62809f7cf517a80b5950816cae560526", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 55, "avg_line_length": 25.8, "alnum_prop": 0.6976744186046512, "repo_name": "charleso/intellij-haskforce", "id": "e21836e3c139bf8a8d80ad699c86d1333fdbb773", "size": "129", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tests/gold/codeInsight/GoToSymbolFunction_Pattern_CaretOnVariable.hs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "435" }, { "name": "Haskell", "bytes": "109431" }, { "name": "Java", "bytes": "1295286" }, { "name": "Lex", "bytes": "44643" }, { "name": "Scala", "bytes": "53347" }, { "name": "Shell", "bytes": "585" } ], "symlink_target": "" }