content
stringlengths
4
1.04M
lang
stringclasses
358 values
score
int64
0
5
repo_name
stringlengths
5
114
repo_path
stringlengths
4
229
repo_licenses
sequencelengths
1
8
make or := ? : (A : Set)(B : Set) -> Set ; make or-left := ? : (A : Set)(B : Set)(a : A) -> or A B ; make or-right := ? : (A : Set)(B : Set)(b : B) -> or A B ; make or-elim := ? : (A : Set)(B : Set) -> or A B -> (C : Set) -> (A -> C) -> (B -> C) -> C; make goal : (P : Set)(Q : Set) -> or P Q -> or Q P ; lambda P ; lambda Q ; lambda PorQ ; elim or-elim P Q PorQ ; give \ P Q p _ -> or-right Q P p ; give \ P Q q _ -> or-left Q P q ;
PigLatin
4
mietek/epigram
test/Elim1.pig
[ "MIT" ]
printf(fmt cstring) int #Foreign("printf") #VarArgs main() { n := 7 a := 1_u b := 1_u for i := 1; i < n { temp := a a += b b = temp } printf("%d\n", a) }
mupad
4
jturner/muon
examples/fib_iterative.mu
[ "MIT" ]
/****************************************************************************** * Copyright 2018 The Apollo Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ #include "modules/perception/camera/test/camera_lib_calibrator_laneline_app_util.h" #include <fstream> #include "modules/perception/common/i_lib/core/i_basic.h" namespace apollo { namespace perception { namespace obstacle { // void change_suffix(boost::filesystem::path file_path, std::string suffix, // std::string *file_path_changed) { // boost::filesystem::path file_name_tmp = file_path; // file_name_tmp.replace_extension(suffix); // *file_path_changed = file_name_tmp.string(); // } // void change_suffix(std::string file_path, std::string suffix, // std::string *file_path_changed) { // boost::filesystem::path path = file_path; // change_suffix(path, suffix, file_path_changed); // } bool load_filename(std::string path, std::string suffix, std::vector<std::string> *name_list) { assert(name_list != nullptr); name_list->clear(); boost::filesystem::directory_iterator end_itr; boost::filesystem::directory_iterator iter(path); boost::filesystem::path file; while (iter != end_itr) { file = *iter; if (!suffix.empty()) { std::string extension = file.extension().string(); std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower); if (extension != suffix) { continue; } } std::string filename = file.string(); name_list->push_back(filename); iter++; } if (name_list->size() == 0) { return false; } std::sort(name_list->begin(), name_list->end()); // in dictionary order return true; } // bool load_ref_camera_p_mat(const std::string &filename, float p_mat[12]) { // std::fstream fin(filename); // if (!fin.is_open()) { // std::cerr << "Fail to load the camera p matrix: " << filename << // std::endl; // return false; // } // fin >> p_mat[0] >> p_mat[1] >> p_mat[2] >> p_mat[3] >> p_mat[4] >> p_mat[5] // >> // p_mat[6] >> p_mat[7] >> p_mat[8] >> p_mat[9] >> p_mat[10] >> p_mat[11]; // fin.close(); // return true; // } bool load_ref_camera_k_mat(const std::string &filename, float k_mat[9], int *w, int *h) { std::fstream fin(filename); if (!fin.is_open()) { std::cerr << "Fail to load the camera k matrix: " << filename << std::endl; return false; } float wh_flt[2] = {0}; fin >> wh_flt[0] >> wh_flt[1]; *w = common::IRound(wh_flt[0]); *h = common::IRound(wh_flt[1]); fin >> k_mat[0] >> k_mat[1] >> k_mat[2] >> k_mat[3] >> k_mat[4] >> k_mat[5] >> k_mat[6] >> k_mat[7] >> k_mat[8]; fin.close(); return true; } // void draw_2d_bbox(cv::Mat *image, float left, float top, float right, // float bottom, const cv::Scalar &color) { // cv::rectangle(*image, cvPoint(common::IRound(left), common::IRound(top)), // cvPoint(common::IRound(right), // common::IRound(bottom)), color, 2, // 1, // 0); // } // void draw_2d_face(cv::Mat *image, const float corners_2d[16], // const int idx_points[4], const cv::Scalar &color) { // for (int i = 0; i < 4; ++i) { // int i_cur2 = idx_points[i] << 1; // int i_next2 = idx_points[((i + 1) % 4)] << 1; // cv::line(*image, cvPoint(common::IRound(corners_2d[i_cur2]), // common::IRound(corners_2d[i_cur2 + 1])), // cvPoint(common::IRound(corners_2d[i_next2]), // common::IRound(corners_2d[i_next2 + 1])), // color, 2, 8, 0); // } // } void write_text_on_image(cv::Mat *image, float left, float top, const char *text, const CvFont &font, const cv::Scalar &color) { IplImage ipl_img = *image; cvPutText(&ipl_img, text, cvPoint(common::IRound(left), common::IRound(top)), &font, color); } // void add_noise_to_vector_radius(float *x, int n, float radius, bool set_seed) // { // unsigned int seed = time(NULL); // for (int i = 0; i < n; ++i) { // float dx = radius * (rand_r(&seed) / RAND_MAX - 0.5f) * 2; // /* // std::cout << "|i, dx: " << i << ", " << dx; // */ // x[i] += dx; // } // // std::cout << std::endl; // } // void add_noise_to_vector_ratio(float *x, int n, float ratio, bool set_seed) { // unsigned int seed = time(NULL); // for (int i = 0; i < n; ++i) { // float radius = fabs(x[i]) * ratio; // float dx = radius * (rand_r(&seed) / RAND_MAX - 0.5f) * 2; // /* // std::cout << "|i, dx: " << i << ", " << dx; // */ // x[i] += dx; // } // // std::cout << std::endl; // } } // namespace obstacle } // namespace perception } // namespace apollo
C++
4
seeclong/apollo
modules/perception/camera/test/camera_lib_calibrator_laneline_app_util.cc
[ "Apache-2.0" ]
import "mekanoobject" import "mekanopolygon" class MekanoObjectPolygonal : MekanoObject { private: List<MekanoPolygon> m_Polygons { }; public: property List<MekanoPolygon> polygons { get { return m_Polygons; } } void addPolygon(MekanoPolygon polygon) { m_Polygons.Add(polygon); } ~MekanoObjectPolygonal() { m_Polygons.Free(); } void computePoints() { for(poly : m_Polygons) { for(p : poly.points) { p.lastLocalPosition = p.localPosition; p.computeLocalPosition(); } } } void draw(MekanoDisplay display) { MekanoObject::draw(display); for(p : m_Polygons) display.drawPolygon(m_Position, p); } bool isInside(Vector2D v) { Vector2D localposition; float radius = boundingRadius; localposition.subtract(v, position); if(radius) if(localposition.length > radius) return false; for(p : m_Polygons; p.isInside(localposition)) return true; return false; } }
eC
4
N-eil/ecere-sdk
samples/guiAndGfx/mekano/mekanoobjectpolygonal.ec
[ "BSD-3-Clause" ]
.video-js .vjs-audio-button .vjs-icon-placeholder { @extend .vjs-icon-audio; } .video-js .vjs-audio-button + .vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder { vertical-align: middle; display: inline-block; margin-bottom: -0.1em; } // Mark a main-desc-menu-item (main + description) item with a trailing Audio Description icon .video-js .vjs-audio-button + .vjs-menu .vjs-main-desc-menu-item .vjs-menu-item-text .vjs-icon-placeholder:before { font-family: VideoJS; content: " \f11d"; font-size: 1.5em; line-height: inherit; }
SCSS
3
TradeCast/video.js
src/css/components/_audio.scss
[ "Apache-2.0" ]
// https://tools.ietf.org/html/rfc7617 Basic Authentication sig BasicChallenge extends Challenge { realm : Realm, charset : lone Charset } { name = "Basic" (RealmParameter & parameters).realm = Realm one charset implies (CharsetParameter & parameters).charset = charset } sig BasicCredentials extends Credentials { user_id : String, password : String, charset : lone Charset } { name = "Basic" let s = user_id.cat[":"].cat[password], c = one charset implies charset else OTHER_CHARSET, p = (Token68Parameter & parameters ){ p.value = c.binary[s] } } fun String.cat[ other : String ] : String { other // wrong! but cannot concatenate } // https://tools.ietf.org/html/rfc7235 Authentication one sig SC_UNAUTHORIZED_401 extends ResponseCode {} sig AuthorizationServer extends Server { protectionSpaces : set Realm } abstract sig Challenge extends AuthScheme {} abstract sig Credentials extends AuthScheme {} sig WWWAuthenticate extends Header { challenges : seq Challenge } { name = "WWW-Authenticate" some challenges } sig Authorization extends Header { credentials : Credentials } { name = "Authorization" } abstract sig AuthScheme { name : String, parameters : set Parameter } { some (Token68Parameter & parameters) implies one parameters } abstract sig Parameter { } sig Binary { } abstract sig Token68Parameter extends Parameter { value : Binary } abstract sig AuthParam extends Parameter { name : String } sig Realm {} sig RealmParameter extends AuthParam { realm : Realm } { name = "realm" } abstract sig Charset { maps : String -> Binary } fun Charset.binary[ s : String ] : Binary { this.maps[s] } one sig ASCII extends Charset {} one sig ISO8859 extends Charset {} one sig UTF16 extends Charset {} one sig UTF8 extends Charset {} one sig OTHER_CHARSET extends Charset {} sig CharsetParameter extends AuthParam { charset : Charset } { name = "charset" } fact WWWAuthenticateChallengeResponse { all r : HttpResponse | r.response = SC_UNAUTHORIZED_401 implies some (r.headers.elems & WWWAuthenticate ) } // https://tools.ietf.org/html/rfc7230 (HTTP 1.1) and further sig Server {} sig Path {} one sig EmptyPath extends Path { } sig URI { host : Server, path : Path } enum Method { GET, POST, PUT, DELETE, PATCH, OPTIONS, HEAD } enum ResponseCode { SC_OK_200, SC_NOT_FOUND_404, SC_TEMP_REDIRECT_302 } abstract sig Body {} sig HttpRequest { method : Method, url : URI, headers : seq Header, body : lone Body } sig HttpResponse { response : ResponseCode, headers : seq Header, payload : lone Body, } abstract sig Header { name : String } fact fixup { all a : Authorization | a in HttpRequest.headers.elems all a : WWWAuthenticate | a in HttpResponse.headers.elems all b : Credentials | b in Authorization.credentials all b : Challenge | b in WWWAuthenticate.challenges.elems all b : Body | lone r : HttpRequest | r.body = b } run {} for 3
Alloy
4
esb-dev/models
ietf-rfcs/rfc7617-BasicAuth/basic-auth.als
[ "Apache-2.0" ]
"use strict"; /** @type {import("../../../../").Configuration} */ module.exports = { optimization: { sideEffects: true, usedExports: true, innerGraph: true, splitChunks: { cacheGroups: { forceMerge: { test: /shared/, enforce: true, name: "shared", chunks: "all" } } } }, module: { rules: [ { test: /dep/, sideEffects: false } ] } };
JavaScript
4
1shenxi/webpack
test/hotCases/conditional-runtime/accept-conditional/webpack.config.js
[ "MIT" ]
// @strict: true // The type below should be invariant in T but is measured as covariant because // we don't analyze recursive references. interface Foo1<T> { x: T; y: Foo1<(arg: T) => void>; } declare const f10: Foo1<string>; const f11: Foo1<'a'> = f10; const f12: Foo1<unknown> = f10; // The type below is invariant in T and is measured as such. interface Foo2<T> { x: T; y: { x: (arg: T) => void, y: Foo2<(arg: T) => void>; } } declare const f20: Foo2<string>; const f21: Foo2<'a'> = f20; const f22: Foo2<unknown> = f20; // The type below should be invariant in T but is measured as covariant because // we don't analyze recursive references. type Foo3<T> = { x: T; y: Foo3<(arg: T) => void>; } declare const f30: Foo3<string>; const f31: Foo3<'a'> = f30; const f32: Foo3<unknown> = f30; // The type below is invariant in T and is measured as such. type Foo4<T> = { x: T; y: { x: (arg: T) => void, y: Foo4<(arg: T) => void>; } } declare const f40: Foo4<string>; const f41: Foo4<'a'> = f40; const f42: Foo4<unknown> = f40; // Repro from #3580 interface Fn<A, B> { (a: A): B; then<C>(next: Fn<B, C>): Fn<A, C>; } declare const fn: Fn<string, number>; // Contravariant in A const fn1: Fn<unknown, number> = fn; // Error const fn2: Fn<'a', number> = fn; // Covariant in B const fn3: Fn<string, unknown> = fn; const fn4: Fn<string, 0> = fn; // Error // Repro from #39947 interface I<Dummy, V> { c: C<Dummy, V>; } class C<Dummy, V> { declare sub: I<Dummy, V>; declare covariance: V; } const c1: C<unknown, string> = new C<unknown, number>(); // Error
TypeScript
5
monciego/TypeScript
tests/cases/compiler/varianceMeasurement.ts
[ "Apache-2.0" ]
package inline class A { var z = 0 inline var f: Int get() = z + 1 set(p: Int) { z = p + 1 } }
Groff
4
AndrewReitz/kotlin
jps-plugin/testData/incremental/pureKotlin/inlinePropertyInClass/inline.kt.new.2
[ "ECL-2.0", "Apache-2.0" ]
<?xml version="1.0" encoding="UTF-8"?> <!-- ******************************************************************* --> <!-- --> <!-- Copyright IBM Corp. 2010, 2014 --> <!-- --> <!-- Licensed under the Apache License, Version 2.0 (the "License"); --> <!-- you may not use this file except in compliance with the License. --> <!-- You may obtain a copy of the License at: --> <!-- --> <!-- http://www.apache.org/licenses/LICENSE-2.0 --> <!-- --> <!-- Unless required by applicable law or agreed to in writing, software --> <!-- distributed under the License is distributed on an "AS IS" BASIS, --> <!-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or --> <!-- implied. See the License for the specific language governing --> <!-- permissions and limitations under the License. --> <!-- --> <!-- ******************************************************************* --> <!-- DO NOT EDIT. THIS FILE IS GENERATED. --> <faces-config> <faces-config-extension> <namespace-uri>http://www.ibm.com/xsp/coreex</namespace-uri> <default-prefix>xe</default-prefix> </faces-config-extension> <group> <group-type>com.ibm.xsp.extlib.group.Node</group-type> <property> <description>%property.node.descr%</description> <display-name>%property.node.name%</display-name> <property-name>node</property-name> <property-class>java.lang.String</property-class> <property-extension> <designer-extension> <editor>com.ibm.xsp.extlib.designer.tooling.editor.XPageControlIDEditor</editor> </designer-extension> </property-extension> </property> <property> <description>%property.var.descr%</description> <display-name>%property.var.name%</display-name> <property-name>var</property-name> <property-class>java.lang.String</property-class> <property-extension> <designer-extension> <tags> not-server-variable-name </tags> </designer-extension> </property-extension> </property> <property> <description>%property.attributes.descr%</description> <display-name>%property.attributes.name%</display-name> <property-name>attributes</property-name> <property-class>java.util.ArrayList</property-class> <property-extension> <collection-property>true</collection-property> <property-item-class>com.ibm.xsp.complex.Parameter</property-item-class> <property-add-method>addAttribute</property-add-method> <allow-run-time-binding>false</allow-run-time-binding> </property-extension> </property> <group-extension> <designer-extension> <tags> group-in-complex </tags> </designer-extension> </group-extension> </group> <group> <group-type>com.ibm.xsp.extlib.group.FadeEffect</group-type> <property> <description>%property.easing.descr%</description> <display-name>%property.easing.name%</display-name> <property-name>easing</property-name> <property-class>java.lang.String</property-class> <property-extension> <designer-extension> <tags> todo </tags> </designer-extension> </property-extension> </property> <property> <description>%property.duration.descr%</description> <display-name>%property.duration.name%</display-name> <property-name>duration</property-name> <property-class>int</property-class> <property-extension> <default-value>-1</default-value> </property-extension> </property> <group-extension> <designer-extension> <tags> group-in-complex </tags> </designer-extension> </group-extension> </group> <complex-type> <description>%complex-type.dojoFadeOut.descr%</description> <display-name>%complex-type.dojoFadeOut.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.FadeOutAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.FadeOutAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojoFadeOut</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojoFadeIn.descr%</description> <display-name>%complex-type.dojoFadeIn.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.FadeInAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.FadeInAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojoFadeIn</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojoAnimationProps.descr%</description> <display-name>%complex-type.dojoAnimationProps.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.AnimationProps</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.AnimationProps</complex-class> <property> <description>%property.name.descr%</description> <display-name>%property.name.name%</display-name> <property-name>name</property-name> <property-class>java.lang.String</property-class> </property> <property> <description>%property.start.descr%</description> <display-name>%property.start.name%</display-name> <property-name>start</property-name> <property-class>java.lang.String</property-class> </property> <property> <description>%property.end.descr%</description> <display-name>%property.end.name%</display-name> <property-name>end</property-name> <property-class>java.lang.String</property-class> </property> <property> <description>%property.unit.descr%</description> <display-name>%property.unit.name%</display-name> <property-name>unit</property-name> <property-class>java.lang.String</property-class> <property-extension> <designer-extension> <editor>com.ibm.workplace.designer.property.editors.comboParameterEditor</editor> <editor-parameter> em ex in cm mm pt pc px </editor-parameter> </designer-extension> </property-extension> </property> <complex-extension> <tag-name>dojoAnimationProps</tag-name> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojoAnimateProperty.descr%</description> <display-name>%complex-type.dojoAnimateProperty.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.AnimatePropertyAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.AnimatePropertyAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <property> <description>%property.repeat.descr%</description> <display-name>%property.repeat.name%</display-name> <property-name>repeat</property-name> <property-class>int</property-class> </property> <property> <description>%property.rate.descr%</description> <display-name>%property.rate.name%</display-name> <property-name>rate</property-name> <property-class>int</property-class> <property-extension> <default-value>-1</default-value> </property-extension> </property> <property> <description>%property.delay.descr%</description> <display-name>%property.delay.name%</display-name> <property-name>delay</property-name> <property-class>int</property-class> </property> <property> <description>%property.properties.descr%</description> <display-name>%property.properties.name%</display-name> <property-name>properties</property-name> <property-class>java.util.List</property-class> <property-extension> <allow-run-time-binding>false</allow-run-time-binding> <collection-property>true</collection-property> <property-item-class>com.ibm.xsp.extlib.actions.client.dojo.AnimationProps</property-item-class> <property-add-method>addProperty</property-add-method> <pass-through>false</pass-through> </property-extension> </property> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojoAnimateProperty</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojofxWipeIn.descr%</description> <display-name>%complex-type.dojofxWipeIn.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.fx.WipeInAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.fx.WipeInAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojofxWipeIn</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojofxWipeOut.descr%</description> <display-name>%complex-type.dojofxWipeOut.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.fx.WipeOutAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.fx.WipeOutAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojofxWipeOut</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> <complex-type> <description>%complex-type.dojofxSlideTo.descr%</description> <display-name>%complex-type.dojofxSlideTo.name%</display-name> <complex-id>com.ibm.xsp.extlib.actions.client.dojo.fx.SlideToAction</complex-id> <complex-class>com.ibm.xsp.extlib.actions.client.dojo.fx.SlideToAction</complex-class> <group-type-ref>com.ibm.xsp.extlib.group.Node</group-type-ref> <group-type-ref>com.ibm.xsp.extlib.group.FadeEffect</group-type-ref> <property> <description>%property.left.descr%</description> <display-name>%property.left.name%</display-name> <property-name>left</property-name> <property-class>int</property-class> <property-extension> <default-value>-2147483648</default-value> </property-extension> </property> <property> <description>%property.top.descr%</description> <display-name>%property.top.name%</display-name> <property-name>top</property-name> <property-class>int</property-class> <property-extension> <default-value>-2147483648</default-value> </property-extension> </property> <complex-extension> <base-complex-id>simpleActionInterface</base-complex-id> <tag-name>dojofxSlideTo</tag-name> <designer-extension> <action-type>client</action-type> <category>%complex-category.dojo_effects%</category> </designer-extension> </complex-extension> </complex-type> </faces-config>
XPages
3
jesse-gallagher/XPagesExtensionLibrary
extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.controls/src/com/ibm/xsp/extlib/config/extlib-dojo-fx-actions.xsp-config
[ "Apache-2.0" ]
#!/usr/bin/osascript # Required parameters: # @raycast.schemaVersion 1 # @raycast.title Copy Foreground Mail Deeplink # @raycast.mode compact # Optional parameters: # @raycast.icon 📧 # @raycast.packageName Mail # Documentation: # @raycast.description Copies the foreground Mail deeplink # @raycast.author Jesse Claven # @raycast.authorURL https://github.com/jesse-c tell application "System Events" set frontmostApp to name of application processes whose frontmost is true end tell # https://apple.stackexchange.com/questions/122630/applescript-comparing-variable-to-string-is-failing/122631#122631 if frontmostApp as string is equal to "Mail" then # https://daringfireball.net/2007/12/message_urls_leopard_mail tell application "Mail" set _sel to get selection set _links to {} repeat with _msg in _sel set _messageURL to "message://%3c" & _msg's message id & "%3e" set end of _links to _messageURL end repeat set AppleScript's text item delimiters to return set the clipboard to (_links as string) log "Copied email deeplink" end tell else log "Foreground app was " & frontmostApp & ", not Mail" end if
AppleScript
5
daviddzhou/script-commands
commands/apps/mail/copy-foreground-mail-deeplink.applescript
[ "MIT" ]
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0"> <PropertyGroup> <ProductVersion>3.5</ProductVersion> <RootNamespace>Sugar.Test</RootNamespace> <ProjectGuid>{9f930a22-be43-4d5a-83f6-4afc91282e23}</ProjectGuid> <OutputType>Exe</OutputType> <AssemblyName>Sugar.Test</AssemblyName> <AllowGlobals>False</AllowGlobals> <AllowLegacyWith>False</AllowLegacyWith> <AllowLegacyOutParams>False</AllowLegacyOutParams> <AllowLegacyCreate>False</AllowLegacyCreate> <AllowUnsafeCode>False</AllowUnsafeCode> <Configuration Condition="'$(Configuration)' == ''">Release</Configuration> <TargetFrameworkVersion>v4.0</TargetFrameworkVersion> <Name>Sugar.Echoes.Test</Name> <DefaultUses /> <StartupClass /> <InternalAssemblyName /> <ApplicationIcon /> <TargetFrameworkProfile /> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)' == 'Debug' "> <Optimize>False</Optimize> <OutputPath>bin\Debug\.NET\</OutputPath> <DefineConstants>DEBUG;TRACE;</DefineConstants> <GeneratePDB>True</GeneratePDB> <GenerateMDB>True</GenerateMDB> <CaptureConsoleOutput>False</CaptureConsoleOutput> <StartMode>Project</StartMode> <CpuType>anycpu</CpuType> <RuntimeVersion>v25</RuntimeVersion> <XmlDoc>False</XmlDoc> <XmlDocWarningLevel>WarningOnPublicMembers</XmlDocWarningLevel> <EnableUnmanagedDebugging>False</EnableUnmanagedDebugging> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)' == 'Release' "> <Optimize>true</Optimize> <OutputPath>.\bin\Release\.NET</OutputPath> <GeneratePDB>False</GeneratePDB> <GenerateMDB>False</GenerateMDB> <EnableAsserts>False</EnableAsserts> <TreatWarningsAsErrors>False</TreatWarningsAsErrors> <CaptureConsoleOutput>False</CaptureConsoleOutput> <StartMode>Project</StartMode> <RegisterForComInterop>False</RegisterForComInterop> <CpuType>anycpu</CpuType> <RuntimeVersion>v25</RuntimeVersion> <XmlDoc>False</XmlDoc> <XmlDocWarningLevel>WarningOnPublicMembers</XmlDocWarningLevel> <EnableUnmanagedDebugging>False</EnableUnmanagedDebugging> </PropertyGroup> <ItemGroup> <Reference Include="mscorlib" /> <Reference Include="RemObjects.Elements.EUnit" /> <Reference Include="System" /> <Reference Include="System.Configuration" /> <Reference Include="System.Data" /> </ItemGroup> <ItemGroup> <Compile Include="Main\Echoes\Program.pas" /> <Compile Include="Properties\AssemblyInfo.pas" /> </ItemGroup> <ItemGroup> <Folder Include="Main\" /> <Folder Include="Main\Echoes\" /> <Folder Include="Properties\" /> </ItemGroup> <ItemGroup> <ProjectReference Include="..\Sugar.Data\Sugar.Data.Echoes.oxygene"> <Name>Sugar.Data.Echoes</Name> <Project>{77ba48da-3022-4e3c-ab2e-885ff84b5efe}</Project> <Private>True</Private> <HintPath>..\Sugar.Data\bin\.NET\Sugar.Data.dll</HintPath> </ProjectReference> <ProjectReference Include="..\Sugar\Sugar.Echoes.oxygene"> <Name>Sugar.Echoes</Name> <Project>{79301a0c-1f95-4fb0-9605-207e288c6171}</Project> <Private>True</Private> <HintPath>..\Sugar\bin\.NET\Sugar.dll</HintPath> </ProjectReference> </ItemGroup> <Import Project="$(MSBuildExtensionsPath)\RemObjects Software\Oxygene\RemObjects.Oxygene.Echoes.targets" /> <PropertyGroup> <PreBuildEvent /> </PropertyGroup> <Import Project="..\Sugar.Tests\Sugar.Shared.Test.projitems" Label="Shared" /> </Project>
Oxygene
3
mosh/sugar
Sugar.Tests/Sugar.Echoes.Test.oxygene
[ "BSD-3-Clause" ]
package org.jetbrains.kotlin.aspects.refactoring; import org.aspectj.lang.annotation.SuppressAjWarnings; import org.eclipse.jdt.core.IMember; import org.eclipse.jdt.internal.corext.refactoring.Checks; import org.eclipse.ltk.core.refactoring.RefactoringStatus; import org.jetbrains.kotlin.core.resolve.lang.java.structure.EclipseJavaElementUtil; public aspect KotlinRefactoringChecksAspect { pointcut checkIfCuBroken(IMember member) : args(member) && execution(RefactoringStatus Checks.checkIfCuBroken(IMember)); // Disable checking of compilation unit for Kotlin files @SuppressAjWarnings({"adviceDidNotMatch"}) RefactoringStatus around(IMember member) : checkIfCuBroken(member) { if (EclipseJavaElementUtil.isKotlinLightClass(member)) { return new RefactoringStatus(); } return proceed(member); } }
AspectJ
4
jan-zajic/kotlin-eclipse
kotlin-eclipse-aspects/src/org/jetbrains/kotlin/aspects/refactoring/KotlinRefactoringChecksAspect.aj
[ "Apache-2.0" ]
HALVE(I) ;I should be an integer QUIT I\2 DOUBLE(I) ;I should be an integer QUIT I*2 ISEVEN(I) ;I should be an integer QUIT '(I#2) E2(M,N) New W,A,E,L Set W=$Select($Length(M)>=$Length(N):$Length(M)+2,1:$L(N)+2),A=0,L=0,A(L,1)=M,A(L,2)=N Write "Multiplying two numbers:" For Write !,$Justify(A(L,1),W),?W,$Justify(A(L,2),W) Write:$$ISEVEN(A(L,1)) ?(2*W)," Struck" Set:'$$ISEVEN(A(L,1)) A=A+A(L,2) Set L=L+1,A(L,1)=$$HALVE(A(L-1,1)),A(L,2)=$$DOUBLE(A(L-1,2)) Quit:A(L,1)<1 Write ! For E=W:1:(2*W) Write ?E,"=" Write !,?W,$Justify(A,W),! Kill W,A,E,L Q
M
4
LaudateCorpus1/RosettaCodeData
Task/Ethiopian-multiplication/MUMPS/ethiopian-multiplication.mumps
[ "Info-ZIP" ]
import {Component, NgModule} from '@angular/core'; @Component({selector: 'my-component', template: `<div [class]="myClassExp"></div>`}) export class MyComponent { myClassExp = {'foo': true} } @NgModule({declarations: [MyComponent]}) export class MyModule { }
TypeScript
4
John-Cassidy/angular
packages/compiler-cli/test/compliance/test_cases/r3_view_compiler_styling/class_bindings/class_binding.ts
[ "MIT" ]
= PublifyTextfilterCode This project rocks and uses MIT-LICENSE.
RDoc
0
project-kotinos/publify___publify
publify_textfilter_code/README.rdoc
[ "MIT" ]
<% comment = if current.respond_to? :comment_location then current.comment_location else current.comment end table = current.parse(comment).table_of_contents if table.length > 1 then %> <div id="table-of-contents"> <nav class="section"> <h3 class="section-header">Table of Contents</h3> <ul> <% table.each do |heading| %> <li><a href="#<%= heading.aref %>"><%= heading.plain_html %></a> <% end %> </ul> </nav> </div> <% end %>
RHTML
3
prathamesh-sonpatki/jruby
lib/ruby/2.0/rdoc/generator/template/darkfish/_sidebar_table_of_contents.rhtml
[ "Ruby", "Apache-2.0" ]
;; test_nu.nu ;; tests for basic Nu functionality. ;; ;; Copyright (c) 2008 Issac Trotts (class TestNu is NuTestCase (- (id) testThrow* is (assert_throws "NuFunkyException" (throw* "NuFunkyException" "Something funky happened."))) (- (id) testSymbol? is (assert_true (symbol? 'a)) (assert_true (symbol? 'ab)) (assert_false (symbol? 1)) (assert_false (symbol? "a")) (assert_false (symbol? nil)) (assert_false (symbol? '(a b)))) (- (id) testAtom? is (assert_true (atom? 'a)) (assert_true (atom? nil)) (assert_true (atom? 1)) (assert_true (atom? "")) ;; debatable (assert_true (atom? "a")) ;; debatable (assert_true (atom? 'a')) (assert_false (atom? '(1))) (assert_false (atom? '(array 1)))) (- (id) testZero? is (assert_true (zero? 0)) (assert_true (zero? (- 2 2))) (assert_false (zero? 1)) (assert_false (zero? nil)) (assert_false (zero? (+ 1 1)))) (- (id) testAssert is (assert t) (assert (eq 1 (+ 0 1))) (assert_throws "NuAssertionFailure" (do () (assert nil))) (assert_throws "NuAssertionFailure" (do () (assert (eq 0 1))))))
Nu
4
mattbeshara/nu
test/test_nu.nu
[ "Apache-2.0" ]
#![deny(rustdoc::broken_intra_doc_links)] #[derive(Debug)] /// Link to [`S::fmt`] //~^ ERROR unresolved link pub struct S; pub mod inner { use std::fmt::Debug; use super::S; /// Link to [`S::fmt`] pub fn f() {} } pub mod ambiguous { use std::fmt::{Display, Debug}; use super::S; /// Link to [`S::fmt`] pub fn f() {} }
Rust
3
mbc-git/rust
src/test/rustdoc-ui/assoc-item-not-in-scope.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
// RUN: %target-swift-frontend -O %s -emit-sil -o /dev/null public struct S { let args: [Substring] let arg: Substring enum Error: Swift.Error { case Case } public init(arg: String) throws { args = arg.split(separator: "\n") guard args.count > 0 else { throw Error.Case } let parts = args[0].split(separator: " ") guard parts.count > 2 else { throw Error.Case } self.arg = parts[1] } }
Swift
4
gandhi56/swift
test/SILOptimizer/destroy_hoisting_crash.swift
[ "Apache-2.0" ]
<html> <body> <script> function out(id, msg) { var result = document.createElement('h1'); result.setAttribute('id', id); result.innerHTML = msg; document.body.appendChild(result); } window.addEventListener('message', function(e) { console.log("==> SUCCESS"); out('result', e.data); }); </script> <iframe src="http://localhost:{port}" nwfaketop id="iframe_a"></iframe> <script> document.write('<h1 id="res1">Node is ' + (typeof nw === 'undefined' ? 'DISABLED': 'ENABLED') + '</h1>'); </script> </body> </html>
Smarty
3
frank-dspeed/nw.js
test/sanity/issue5781-X-Frame-Options/index.tpl
[ "MIT" ]
: main #1 "me" rmatch pop #1 "here" rmatch pop #1 "home" rmatch pop #1 "test" rmatch pop #1 "John_Doe" rmatch pop ;
MUF
0
revarbat/mufsim
tests/rmatch.muf
[ "BSD-2-Clause" ]
****************************************************************** * Author: Lauryn Brown * Date: 2017 * Purpose: COBOL Common Lisp Interpreter * Tectonics: cobc ****************************************************************** IDENTIFICATION DIVISION. PROGRAM-ID. CISP. ENVIRONMENT DIVISION. INPUT-OUTPUT SECTION. FILE-CONTROL. SELECT TESTS-FILE ASSIGN TO "..\test\tests-lists.txt" ORGANIZATION IS LINE SEQUENTIAL. DATA DIVISION. FILE SECTION. FD TESTS-FILE. 01 LISP-TEST-FILE-NAME PIC X(100). WORKING-STORAGE SECTION. 01 WS-CMD-LINE. 02 WS-CMD-LINE-VAL PIC X(100). 02 WS-CMD-LINE-NUM-AGRS PIC 9(4). ***************************************** * WS Shared with LOGGER SubRoutine ***************************************** 01 WS-LOG-OPERATION-FLAG PIC X(5). 01 WS-LOG-RECORD. 02 WS-LOG-RECORD-FUNCTION-NAME PIC X(40). 02 WS-LOG-RECORD-MESSAGE PIC X(100). ***************************************** * WS Shared with TOKENIZER, LISP SubRoutine ***************************************** *****IF WS-SYMBOL-LENGTH CHANGED HERE PLEASE CHANGE IN TOKENIZER, LISP 01 WS-LISP-FILE-NAME PIC X(100). 78 WS-SYMBOL-LENGTH VALUE 100. 01 WS-LISP-SYMBOLS. 02 WS-SYMBOL-TABLE-SIZE PIC 9(4). 02 WS-SYMBOL PIC X(50) OCCURS WS-SYMBOL-LENGTH TIMES. 02 WS-SYMBOL-LEN PIC 9(2) OCCURS WS-SYMBOL-LENGTH TIMES. PROCEDURE DIVISION. MAIN-PROCEDURE. PERFORM INIT-LOGGER-PROCEDURE. PERFORM READ-CMD-LINE-PROCEDURE. PERFORM TOKENIZE-LISP-PROCEDURE. PERFORM EVALUTE-LISP-PROCEDURE. PERFORM CLOSE-LOGGER-PROCEDURE. GOBACK. READ-CMD-LINE-PROCEDURE. ********* Read the lisp file name and save to working storage ACCEPT WS-CMD-LINE-NUM-AGRS FROM ARGUMENT-NUMBER. ACCEPT WS-CMD-LINE-VAL FROM ARGUMENT-VALUE. MOVE WS-CMD-LINE-VAL TO WS-LISP-FILE-NAME. MOVE "ADD" TO WS-LOG-OPERATION-FLAG. MOVE "CISP:READ-CMD-LINE-PROCEDURE" TO WS-LOG-RECORD-FUNCTION-NAME. MOVE "Reading commandline argument" TO WS-LOG-RECORD-MESSAGE. CALL 'LOGGER' USING WS-LOG-OPERATION-FLAG, WS-LOG-RECORD. TOKENIZE-LISP-PROCEDURE. ********* Tokenize the Lisp string MOVE "ADD" TO WS-LOG-OPERATION-FLAG. MOVE "TOKENIZER" TO WS-LOG-RECORD-FUNCTION-NAME. MOVE "Starting Tokenizer" TO WS-LOG-RECORD-MESSAGE. CALL 'LOGGER' USING WS-LOG-OPERATION-FLAG, WS-LOG-RECORD. CALL "TOKENIZER" USING WS-LISP-FILE-NAME, WS-SYMBOL-LENGTH, WS-LISP-SYMBOLS. EVALUTE-LISP-PROCEDURE. ********* Evalute lisp MOVE "ADD" TO WS-LOG-OPERATION-FLAG. MOVE "LISP" TO WS-LOG-RECORD-FUNCTION-NAME. MOVE "Starting Lisp Evalutation" TO WS-LOG-RECORD-MESSAGE. CALL 'LOGGER' USING WS-LOG-OPERATION-FLAG, WS-LOG-RECORD. CALL "LISP" USING WS-LISP-SYMBOLS. INIT-LOGGER-PROCEDURE. MOVE "OPEN" TO WS-LOG-OPERATION-FLAG. CALL 'LOGGER' USING WS-LOG-OPERATION-FLAG, WS-LOG-RECORD. CLOSE-LOGGER-PROCEDURE. MOVE "CLOSE" TO WS-LOG-OPERATION-FLAG. CALL 'LOGGER' USING WS-LOG-OPERATION-FLAG, WS-LOG-RECORD. END PROGRAM CISP.
COBOL
4
aanunez/Cisp
cisp.cbl
[ "MIT" ]
// deno-lint-ignore-file /// <reference no-default-lib="true"/> /// <reference lib="dom" /> /// <reference lib="deno.ns" /> export const div = document.createElement("div"); div.innerHTML = `<span>Hello World!</span>`; console.log(Deno.args);
TypeScript
3
Preta-Crowz/deno
cli/tests/tsc2/file_libref.ts
[ "MIT" ]
(ns wisp.test.escodegen (:require [wisp.test.util :refer [is thrown?]] [wisp.src.sequence :refer [concat cons vec take first rest second third list list? count drop lazy-seq? seq nth map]] [wisp.src.runtime :refer [subs = dec identity keys nil? vector? string? dec re-find satisfies?]] [wisp.src.compiler :refer [compile]] [wisp.src.reader :refer [read* read-from-string] :rename {read-from-string read-string}] [wisp.src.ast :refer [meta name pr-str symbol]])) (defprotocol INope (nope? [self])) (is (thrown? (nope? 1) #"method") "protocol isn't implemented") (is (not (satisfies? INope js/Number)) "number doesn't satisfies INope") (deftype Nope [x] INope (nope? [_] true)) (is (Nope. 1) "Can be instantiated") (is (satisfies? INope (Nope.)) "satisfies protocol") (is (nope? (Nope.)) "implements protocol method") (extend-type number INope (nope? [x] true)) (is (satisfies? INope 4) "numbers implement protocol") (is (nope? 3) "numbers implement protocol") (is (not (satisfies? INope "foo")) "strings do not satisfy") (extend-type default INope (nope? [_] false)) (is (satisfies? INope "foo") "everything satisfies protocol now") (is (= (nope? "foo") false) "default implementation") (is (= (nope? 3) true) "overriden implementation") (is (= (nope? true) false) "default implementation") (defprotocol IType (-type [x])) (defn satisfaction [protocol] {:nil (satisfies? protocol nil) :boolean (satisfies? protocol true) :number (satisfies? protocol 1) :string (satisfies? protocol "foo") :pattern (satisfies? protocol #"foo") :fn (satisfies? protocol (fn [x] x)) :vector (satisfies? protocol [1 2 3]) :object (satisfies? protocol {})}) (is (= (satisfaction IType) {:nil false :boolean false :number false :string false :pattern false :fn false :vector false :object false}) "no types satisfy protocol") (extend-type nil IType (-type [_] :nil)) (is (= (satisfaction IType) {:nil true :boolean false :number false :string false :pattern false :fn false :vector false :object false}) "only nil satisfyies protocol") (extend-type boolean IType (-type [_] :boolean)) (is (= (satisfaction IType) {:nil true :boolean true :number false :string false :pattern false :fn false :vector false :object false}) "nil & booleans satisfyies protocol") (extend-type number IType (-type [_] :number)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string false :pattern false :fn false :vector false :object false}) "nil, booleans & numbers satisfyies protocol") (extend-type string IType (-type [_] :string)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string true :pattern false :fn false :vector false :object false}) "nil, booleans, numbers & strings satisfyies protocol") (extend-type re-pattern IType (-type [_] :pattern)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string true :pattern true :fn false :vector false :object false}) "nil, booleans, numbers, strings & patterns satisfyies protocol") (extend-type function IType (-type [_] :function)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string true :pattern true :fn true :vector false :object false}) "nil, booleans, numbers, strings, patterns & functions satisfyies protocol") (extend-type vector IType (-type [_] :vector)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string true :pattern true :fn true :vector true :object false}) "nil, booleans, numbers, strings, patterns, functions & vectors satisfyies protocol") (extend-type default IType (-type [_] :default)) (is (= (satisfaction IType) {:nil true :boolean true :number true :string true :pattern true :fn true :vector true :object true}) "all types satisfyies protocol") (is (= (-type nil) :nil)) (is (= (-type true) :boolean)) (is (= (-type false) :boolean)) (is (= (-type 1) :number)) (is (= (-type 0) :number)) (is (= (-type 17) :number)) (is (= (-type "hello") :string)) (is (= (-type "") :string)) (is (= (-type #"foo") :pattern)) (is (= (-type (fn [x] x)) :function)) (is (= (-type #(inc %)) :function)) (is (= (-type []) :vector)) (is (= (-type [1]) :vector)) (is (= (-type [1 2 3]) :vector)) (is (= (-type {}) :default)) (is (= (-type {:a 1}) :default)) (defprotocol IFoo (foo? [x])) (is (= (satisfaction IFoo) {:nil false :boolean false :number false :string false :pattern false :fn false :vector false :object false}) "no types satisfyies protocol") (extend-type default IFoo (foo? [_] false)) (is (= (satisfaction IFoo) {:nil true :boolean true :number true :string true :pattern true :fn true :vector true :object true}) "all types satisfy protocol") (defprotocol IBar (bar? [x])) (extend-type js/Object IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean false :number false :string false :pattern false :fn false :vector false :object true}) "only objects satisfy protocol") (extend-type js/Number IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean false :number true :string false :pattern false :fn false :vector false :object true}) "only objects & numbers satisfy protocol") (extend-type js/String IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean false :number true :string true :pattern false :fn false :vector false :object true}) "only objects, numbers & strings satisfy protocol") (extend-type js/Boolean IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean true :number true :string true :pattern false :fn false :vector false :object true}) "only objects, numbers, strings & booleans satisfy protocol") (extend-type js/Function IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean true :number true :string true :pattern false :fn true :vector false :object true}) "only objects, numbers, strings, booleans & functions satisfy protocol") (extend-type js/Array IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean true :number true :string true :pattern false :fn true :vector true :object true}) "only objects, numbers, strings, booleans, functions & array satisfy protocol") (extend-type js/RegExp IBar (bar? [_] true)) (is (= (satisfaction IBar) {:nil false :boolean true :number true :string true :pattern true :fn true :vector true :object true}) "only objects, numbers, strings, booleans, functions & patterns satisfy protocol")
wisp
5
rcarmo/wisp
test/protocols.wisp
[ "BSD-3-Clause" ]
include($$PWD/common.pri) QT += sql SRC_FOLDER = $$PWD/../src LIBS_FOLDER = $$PWD/../libs INCLUDEPATH *= $$SRC_FOLDER include($$LIBS_FOLDER/vtextedit/src/editor/editor_export.pri) include($$LIBS_FOLDER/vtextedit/src/libs/syntax-highlighting/syntax-highlighting_export.pri) include($$LIBS_FOLDER/QHotkey/QHotkey_export.pri) include($$SRC_FOLDER/utils/utils.pri) include($$SRC_FOLDER/export/export.pri) include($$SRC_FOLDER/search/search.pri) include($$SRC_FOLDER/snippet/snippet.pri) include($$SRC_FOLDER/imagehost/imagehost.pri) include($$SRC_FOLDER/task/task.pri) include($$SRC_FOLDER/core/core.pri) include($$SRC_FOLDER/widgets/widgets.pri) include($$SRC_FOLDER/unitedentry/unitedentry.pri)
QMake
2
tamlok/vnote
tests/commonfull.pri
[ "MIT" ]
-- reads luac listings and reports global variable usage -- lines where a global is written to are marked with "*" -- typical usage: luac -p -l file.lua | lua globals.lua | sort | lua table.lua while 1 do local s=io.read() if s==nil then break end local ok,_,l,op,g=string.find(s,"%[%-?(%d*)%]%s*([GS])ETGLOBAL.-;%s+(.*)$") if ok then if op=="S" then op="*" else op="" end io.write(g,"\t",l,op,"\n") end end
Lua
4
tomliugen/tomliugen-redis-3.2.2-rc
deps/lua/test/globals.lua
[ "BSD-3-Clause" ]
#!/bin/bash set -eux -o pipefail source "/Users/distiller/project/env" mkdir -p "$PYTORCH_FINAL_PACKAGE_DIR" # For some reason `unbuffer` breaks if we change the PATH here, so we # write a script with the PATH change in it and unbuffer the whole # thing build_script="$workdir/build_script.sh" touch "$build_script" chmod +x "$build_script" # Build cat >"$build_script" <<EOL export PATH="$workdir/miniconda/bin:$PATH" if [[ "$CIRCLE_BRANCH" == "nightly" ]]; then export USE_PYTORCH_METAL_EXPORT=1 export USE_COREML_DELEGATE=1 fi if [[ "$PACKAGE_TYPE" == conda ]]; then "$workdir/builder/conda/build_pytorch.sh" else export TORCH_PACKAGE_NAME="$(echo $TORCH_PACKAGE_NAME | tr '-' '_')" "$workdir/builder/wheel/build_wheel.sh" fi EOL unbuffer "$build_script" | ts
Shell
3
xiaohanhuang/pytorch
.circleci/scripts/binary_macos_build.sh
[ "Intel" ]
#Signature file v4.1 #Version 2.48 CLSS public java.lang.Object cons public init() meth protected java.lang.Object clone() throws java.lang.CloneNotSupportedException meth protected void finalize() throws java.lang.Throwable meth public boolean equals(java.lang.Object) meth public final java.lang.Class<?> getClass() meth public final void notify() meth public final void notifyAll() meth public final void wait() throws java.lang.InterruptedException meth public final void wait(long) throws java.lang.InterruptedException meth public final void wait(long,int) throws java.lang.InterruptedException meth public int hashCode() meth public java.lang.String toString() CLSS public final org.netbeans.modules.maven.indexer.api.ui.ArtifactViewer fld public final static java.lang.String HINT_ARTIFACT = "art" fld public final static java.lang.String HINT_DEPENDENCIES = "dep" fld public final static java.lang.String HINT_GRAPH = "grf" fld public final static java.lang.String HINT_PROJECT = "prj" meth public static void showArtifactViewer(org.apache.maven.artifact.Artifact,java.util.List<org.apache.maven.artifact.repository.ArtifactRepository>,java.lang.String) meth public static void showArtifactViewer(org.netbeans.modules.maven.indexer.api.NBVersionInfo) supr java.lang.Object CLSS public abstract interface org.netbeans.modules.maven.indexer.spi.ui.ArtifactNodeSelector meth public abstract void select(org.netbeans.modules.maven.indexer.api.NBVersionInfo) CLSS public abstract interface org.netbeans.modules.maven.indexer.spi.ui.ArtifactViewerFactory meth public abstract org.openide.util.Lookup createLookup(org.apache.maven.artifact.Artifact,java.util.List<org.apache.maven.artifact.repository.ArtifactRepository>) anno 0 org.netbeans.api.annotations.common.NonNull() anno 1 org.netbeans.api.annotations.common.NonNull() anno 2 org.netbeans.api.annotations.common.NullAllowed() meth public abstract org.openide.util.Lookup createLookup(org.netbeans.api.project.Project) anno 0 org.netbeans.api.annotations.common.CheckForNull() anno 1 org.netbeans.api.annotations.common.NonNull() meth public abstract org.openide.util.Lookup createLookup(org.netbeans.modules.maven.indexer.api.NBVersionInfo) anno 0 org.netbeans.api.annotations.common.NonNull() anno 1 org.netbeans.api.annotations.common.NonNull() meth public abstract org.openide.windows.TopComponent createTopComponent(org.openide.util.Lookup) anno 0 org.netbeans.api.annotations.common.NonNull() anno 1 org.netbeans.api.annotations.common.NonNull() CLSS public abstract interface org.netbeans.modules.maven.indexer.spi.ui.ArtifactViewerPanelProvider meth public abstract org.netbeans.core.spi.multiview.MultiViewDescription createPanel(org.openide.util.Lookup)
Standard ML
3
timfel/netbeans
java/maven.indexer.ui/nbproject/org-netbeans-modules-maven-indexer-ui.sig
[ "Apache-2.0" ]
module System.Text.Json.Tests.FSharp.RecordTests open System.Text.Json open System.Text.Json.Serialization open System.Text.Json.Tests.FSharp.Helpers open Xunit type MyRecord = { Name : string MiddleName : string option LastName : string Age : int IsActive : bool } with static member Value = { Name = "John" ; MiddleName = None ; LastName = "Doe" ; Age = 34 ; IsActive = true } static member ExpectedJson = """{"Name":"John","MiddleName":null,"LastName":"Doe","Age":34,"IsActive":true}""" [<Fact>] let ``Support F# record serialization``() = let actualJson = JsonSerializer.Serialize(MyRecord.Value) Assert.Equal(MyRecord.ExpectedJson, actualJson) [<Fact>] let ``Support F# record deserialization``() = let result = JsonSerializer.Deserialize<MyRecord>(MyRecord.ExpectedJson) Assert.Equal(MyRecord.Value, result) [<Struct>] type MyStructRecord = { Name : string MiddleName : string option LastName : string Age : int IsActive : bool } with static member Value = { Name = "John" ; MiddleName = None ; LastName = "Doe" ; Age = 34 ; IsActive = true } static member ExpectedJson = """{"Name":"John","MiddleName":null,"LastName":"Doe","Age":34,"IsActive":true}""" [<Fact>] let ``Support F# struct record serialization``() = let actualJson = JsonSerializer.Serialize(MyStructRecord.Value) Assert.Equal(MyStructRecord.ExpectedJson, actualJson) [<Fact>] let ``Support F# struct record deserialization``() = let result = JsonSerializer.Deserialize<MyStructRecord>(MyStructRecord.ExpectedJson) Assert.Equal(MyStructRecord.Value, result)
F#
4
pyracanda/runtime
src/libraries/System.Text.Json/tests/System.Text.Json.FSharp.Tests/RecordTests.fs
[ "MIT" ]
// Copyright 2010-2016 RethinkDB, all rights reserved. #ifndef RDB_PROTOCOL_GEO_LON_LAT_TYPES_HPP_ #define RDB_PROTOCOL_GEO_LON_LAT_TYPES_HPP_ #include <utility> #include <vector> #include "rpc/serialize_macros.hpp" struct lon_lat_point_t { double longitude; double latitude; lon_lat_point_t() { } lon_lat_point_t(double lon, double lat) : longitude(lon), latitude(lat) { } bool operator==(const lon_lat_point_t &other) const { return (longitude == other.longitude) && (latitude == other.latitude); } bool operator!=(const lon_lat_point_t &other) const { return !(*this == other); } }; RDB_DECLARE_SERIALIZABLE(lon_lat_point_t); typedef std::vector<lon_lat_point_t> lon_lat_line_t; #endif // RDB_PROTOCOL_GEO_LON_LAT_TYPES_HPP_
C++
4
zadcha/rethinkdb
src/rdb_protocol/geo/lon_lat_types.hpp
[ "Apache-2.0" ]
testbean.name=p3TestBean from.p3=p3Value
INI
0
spreoW/spring-framework
spring-context/src/test/resources/org/springframework/context/annotation/p3.properties
[ "Apache-2.0" ]
constant TMPDIR = "/tmp/emotes"; //The Pike downloader doesn't like some of the HTTPS servers //(possibly a cert issue). Easier to deal with it using curl. string low_download(string url) { //Use the internal downloader: //return Protocols.HTTP.get_url_data(url); //Use curl: return Process.run(({"curl", url}))->stdout; //Or use wget, or Python + requests, or whatever else. } //Download with a local cache. The cache is never expired. string download(string url, string fn) { fn = TMPDIR + "/" + replace(fn, "/", "SLASH"); if (string data = Stdio.read_file(fn)) return data; string data = low_download(url); Stdio.write_file(fn, data); return data; } enum {CODE, IMG, CMP, AVG, CMPAVG, DENSITY, PERM, USAGE}; array parse_image(mapping em, int permanent) { string data = download("https://static-cdn.jtvnw.net/emoticons/v1/" + em->id + "/2.0", em->code); Image.Image img = Image.PNG.decode(data); Image.Image alpha = Image.PNG.decode_alpha(data); //Put the image onto a neutral background using its alpha channel //~ img = Image.Image(img->xsize(), img->ysize(), 204, 204, 204)->paste_mask(img, alpha); //Or onto its average img = Image.Image(img->xsize(), img->ysize(), @(array(int))img->average())->paste_mask(img, alpha); object cmp = img->rgb_to_hsv(); //Or rgb_to_yuv(), or just same as img return ({ //Must match the enum above em->code, img, cmp, (array(int))img->average(), (array(int))cmp->average(), (int)alpha->average()[0], permanent, 0, //Usage counter (mutable) }); } //Figure out a relative score (lower is better) for an image //1) Permanent is better than ephemeral //2) Not yet used is WAY better than previously used //3) Find the one closest to the target pixel colour //~ constant W_R = 87, W_G = 127, W_B = 41; //Using RGB but scaling as per the grey() method //~ constant W_R = 1, W_G = 1, W_B = 1; //Flat scaling //~ constant W_R = 2, W_G = 4, W_B = 3; //Alternate but still simple scaling //~ constant W_R = 1, W_G = 2, W_B = 1; //When using YUV (brightness in green, colour components in red and blue) //~ constant W_R = 3, W_G = 1, W_B = 3; //YUV favouring the colour above the brightness constant W_R = 5, W_G = 2, W_B = 2; //HSV favouring the hue int score_image(array image, int r, int g, int b, int fast) { int score = 0; Image.Image img = image[CMP]; int w = img->xsize(), h = img->ysize(); if (fast == 2) write("==> %O\n", image[CODE]); if (fast) { //Fast mode - pretend the image is a solid block of its average colour [int rr, int gg, int bb] = image[CMPAVG]; score += W_R * (rr-r) ** 2 * w * h; score += W_G * (gg-g) ** 2 * w * h; score += W_B * (bb-b) ** 2 * w * h; } else { for (int x = 0; x < w; ++x) for (int y = 0; y < h; ++y) { [int rr, int gg, int bb] = img->getpixel(x, y); score += W_R * (rr-r) ** 2; score += W_G * (gg-g) ** 2; score += W_B * (bb-b) ** 2; } } if (!image[PERM]) score += w * h * 500000; //That's about half the maximum distance for each colour angle score += image[USAGE] * w * h * 1000000; //~ score += (256-image[DENSITY]) * w * h * 100000; if (fast == 2) { if (!score) score = 1; [int rr, int gg, int bb] = image[CMPAVG]; int red = W_R * (rr-r) ** 2 * w * h; int grn = W_G * (gg-g) ** 2 * w * h; int blu = W_B * (bb-b) ** 2 * w * h; int perm = image[PERM] ? 0 : w * h * 5000000; int usage = image[USAGE] * w * h * 10000000; write("R: 87 * (%3d-%3d) ** 2 * %d * %d = %d => %d\n", rr, r, w, h, red, red * 100 / score); write("G: 127 * (%3d-%3d) ** 2 * %d * %d = %d => %d\n", gg, g, w, h, grn, grn * 100 / score); write("B: 41 * (%3d-%3d) ** 2 * %d * %d = %d => %d\n", bb, b, w, h, blu, blu * 100 / score); write("Perm: %d => %d\n", perm, perm * 100 / score); write("Usage: %d => %d\n", usage, usage * 100 / score); write("Total: %d => %d\n", score, (red+grn+blu+perm+usage) * 100 / score); } return score; } int main(int argc, array(string) argv) { if (argc < 2) exit(1, "USAGE: pike %s emotename\n", argv[0]); Stdio.mkdirhier(TMPDIR, 0700); write("Fetching... "); mapping emotes = Standards.JSON.decode(low_download("https://sikorsky.rosuav.com/emotes?format=json")); //write("\rFetching %d emotes... ", sizeof(emotes->ephemeral) + sizeof(emotes->permanent)); //need to drill another level in to get a counter array images = ({ }); //~ foreach (emotes->ephemeral; string channel; array em) images += parse_image(em[*], 0); foreach (emotes->permanent; string channel; array em) images += parse_image(em[*], 1); //~ images = parse_image(emotes->permanent->rosuav[*], 1); //~ images += parse_image(emotes->permanent->stephenangelico[*], 1); //Okay. Now we have a ton of images. //Pick the best based on a score (see score_image). //To do this perfectly, we would have to look at every image and every pixel in that //image, and find the distance-squared to the three dimensional location of the colour //that we want. But simplifying it can be done by taking the average colour of each //image, and then ranking the images based on that. write("%d loaded.\n", sizeof(images)); array base; foreach (images, array im) if (im[CODE] == argv[1]) {base = im; break;} if (!base) exit(1, "Emote %O not found, or unavailable\n", argv[1]); Image.Image base_img = base[IMG]; Image.Image base_cmp = base[CMP]; int w = base_img->xsize(), h = base_img->ysize(); Image.Image target = Image.Image(w * w, h * h, 204, 204, 204); //~ [int r, int g, int b] = base_img->getpixel(w/2, h/2); //~ [int r, int g, int b] = (array(int))base_img->average(); //~ array(int) scores = score_image(images[*], r, g, b, 2); //~ return 1; for (int x = 0; x < w; ++x) for (int y = 0; y < h; ++y) { array pixel = base_img->getpixel(x, y); array(int) scores = score_image(images[*], @pixel, 1); //Caution: can segfault some versions of Pike //[int r, int g, int b] = base_cmp->getpixel(x, y); //array(int) scores = score_image(images[*], r, g, b, 1); //But avoiding @ is safe on all versions. array imgs = images + ({ }); sort(scores, imgs); //~ write("%{%d %}|%{ %d%}\n", scores[..2], scores[<2..]); //Having scored every image using the fast algorithm, we now refine it //by scoring the best handful using the more accurate algorithm. //~ imgs = imgs[..5]; //Adjust the limit to taste //~ scores = score_image(imgs[*], r, g, b, 0); //~ sort(scores, imgs); //~ write("%O %O %O\n", scores[0], scores[-1], imgs[0]); target->paste(imgs[0][IMG], x * w, y * h); //~ imgs[0][USAGE]++; //Increment the usage counter to deprefer that one } write("%O\n", target); Stdio.write_file("meta-emote.png", Image.PNG.encode(target)); }
Pike
4
stephenangelico/shed
emote-image.pike
[ "MIT" ]
class <%= application_name.camelize %>.<%= class_name %>Service extends Ember.Service
EmberScript
2
JakeKaad/ember-rails
lib/generators/templates/service.em
[ "MIT" ]
-@val skill: model.Skill -@val s: skinny.Skinny %h3 #{s.i18n.get("skill.detail")} %hr - for (notice <- s.flash.notice) %p(class="alert alert-info") #{notice} %table(class="table table-bordered") %thead %tr %th #{s.i18n.get("skill.id")} %th #{skill.id} %tr %th #{s.i18n.get("skill.name")} %td #{skill.name} %hr %div(class="form-actions") %a(class="btn btn-default" href={uri("/skills")}) #{s.i18n.get("backToList")} %a(href={uri("/skills/"+skill.id+"/edit")} class="btn btn-info") #{s.i18n.get("edit")} %a(data-method="delete" data-confirm={s.i18n.get("skill.delete.confirm")} href={uri("/skills/"+skill.id)} rel="nofollow" class="btn btn-danger") #{s.i18n.get("delete")}
Scaml
4
tototoshi/skinny-framework
example/src/main/webapp/WEB-INF/views/skills/show.html.scaml
[ "MIT" ]
#!/bin/bash # Se https://github.com/gohugoio/hugo/issues/8955 objdump -T dist/hugo_extended_linux_linux_amd64/hugo | grep -E -q 'GLIBC_2.2[0-9]' RESULT=$? if [ $RESULT -eq 0 ]; then echo "Found GLIBC_2.2x in Linux binary, this will not work in older Vercel/Netlify images."; exit -1; fi
Shell
4
graemephi/hugo
goreleaser-hook-post-linux.sh
[ "Apache-2.0" ]
r := RECORD UNSIGNED id; STRING name; END; streamed dataset(r) myDataset(unsigned numRows, boolean isLocal = false, unsigned numParallel = 0) := EMBED(C++ : activity, local(isLocal), parallel(numParallel)) static const char * const names[] = {"Gavin","John","Bart"}; static const unsigned numNames = (unsigned)(sizeof(names) / sizeof(names[0])); #body class MyStreamInlineDataset : public RtlCInterface, implements IRowStream { public: MyStreamInlineDataset(IEngineRowAllocator * _resultAllocator, unsigned _first, unsigned _last) : resultAllocator(_resultAllocator), first(_first), last(_last) { current = first; } RTLIMPLEMENT_IINTERFACE virtual const void *nextRow() override { if (current >= last) return nullptr; unsigned id = current++; unsigned curName = id % numNames; const char * name = names[curName]; size32_t lenName = strlen(name); RtlDynamicRowBuilder rowBuilder(resultAllocator); unsigned len = sizeof(__int64) + sizeof(size32_t) + lenName; byte * row = rowBuilder.ensureCapacity(len, NULL); *(__uint64 *)(row) = id; *(size32_t *)(row + sizeof(__uint64)) = lenName; memcpy(row+sizeof(__uint64)+sizeof(size32_t), name, lenName); return rowBuilder.finalizeRowClear(len); } virtual void stop() override { current = (unsigned)-1; } protected: Linked<IEngineRowAllocator> resultAllocator; unsigned current; unsigned first; unsigned last; }; unsigned numRows = numrows; unsigned numSlaves = activity->numSlaves(); unsigned numParallel = numSlaves * activity->numStrands(); unsigned rowsPerPart = (numRows + numParallel - 1) / numParallel; unsigned thisSlave = activity->querySlave(); unsigned thisIndex = thisSlave * activity->numStrands() + activity->queryStrand(); unsigned first = thisIndex * rowsPerPart; unsigned last = first + rowsPerPart; if (first > numRows) first = numRows; if (last > numRows) last = numRows; return new MyStreamInlineDataset(_resultAllocator, first, last); ENDEMBED; //Global activity - fixed number of rows output(myDataset(10)); //Local version of the activity output(count(myDataset(10, isLocal := true)) = CLUSTERSIZE * 10); //Check that stranding (if implemented) still generates unique records output(COUNT(DEDUP(myDataset(1000, numParallel := 5), id, ALL))); r2 := RECORD UNSIGNED id; DATASET(r) child; END; //Check that the activity can also be executed in a child query output(DATASET(10, TRANSFORM(r2, SELF.id := COUNTER; SELF.child := myDataset(COUNTER)))); //Test stranding inside a child query output(DATASET(10, TRANSFORM(r2, SELF.id := COUNTER; SELF.child := myDataset(COUNTER, NumParallel := 3))));
ECL
5
davidarcher/HPCC-Platform
testing/regress/ecl/embedactivity1.ecl
[ "Apache-2.0" ]
<div> <span>This is text with curly brackets {}</span> </div>
Latte
0
timfel/netbeans
php/php.latte/test/unit/data/testfiles/lexer/top/testIssue230530.latte
[ "Apache-2.0" ]
module MsfDataService def get_msf_version raise 'MsfDataService#get_msf_version is not implemented' end end
Ruby
3
OsmanDere/metasploit-framework
lib/metasploit/framework/data_service/stubs/msf_data_service.rb
[ "BSD-2-Clause", "BSD-3-Clause" ]
/** * */ import Util; import OpenApi; import OpenApiUtil; import EndpointUtil; extends OpenApi; init(config: OpenApi.Config){ super(config); @endpointRule = ''; checkConfig(config); @endpoint = getEndpoint('nbftestpop', @regionId, @endpointRule, @network, @suffix, @endpointMap, @endpoint); } function getEndpoint(productId: string, regionId: string, endpointRule: string, network: string, suffix: string, endpointMap: map[string]string, endpoint: string) throws: string{ if (!Util.empty(endpoint)) { return endpoint; } if (!Util.isUnset(endpointMap) && !Util.empty(endpointMap[regionId])) { return endpointMap[regionId]; } return EndpointUtil.getEndpointRules(productId, regionId, endpointRule, network, suffix); } model MutilComplexRequest { arg0?: string(name='arg0'), arg1?: int32(name='arg1'), arg2?: { stringValue?: string(name='stringValue'), intValue?: int32(name='intValue'), booleanValue?: boolean(name='booleanValue'), integerValue?: int32(name='integerValue'), longValue?: long(name='longValue'), secondInput?: { secondString?: string(name='secondString'), thirdList?: [ { thirdInt?: int32(name='thirdInt'), thirdString?: string(name='thirdString'), } ](name='thirdList'), secondInt?: int32(name='secondInt'), }(name='secondInput'), }(name='arg2'), } model MutilComplexShrinkRequest { arg0?: string(name='arg0'), arg1?: int32(name='arg1'), arg2Shrink?: string(name='arg2'), } model MutilComplexResponseBody = { stringValue?: string(name='stringValue'), secondOutput?: { thirdList?: [ { thirdInt?: int32(name='thirdInt'), thirdString?: string(name='thirdString'), } ](name='thirdList'), }(name='secondOutput'), intValue?: int32(name='intValue'), booleanValue?: boolean(name='booleanValue'), integerValue?: int32(name='integerValue'), longValue?: long(name='longValue'), } model MutilComplexResponse = { headers: map[string]string(name='headers'), body: MutilComplexResponseBody(name='body'), } async function mutilComplexWithOptions(tmpReq: MutilComplexRequest, runtime: Util.RuntimeOptions): MutilComplexResponse { Util.validateModel(tmpReq); var request = new MutilComplexShrinkRequest{}; OpenApiUtil.convert(tmpReq, request); if (!Util.isUnset(tmpReq.arg2)) { request.arg2Shrink = OpenApiUtil.arrayToStringWithSpecifiedStyle(tmpReq.arg2, 'arg2', 'json'); } var req = new OpenApi.OpenApiRequest{ body = Util.toMap(request), }; return doRPCRequest('MutilComplex', '2021-08-06_15-12-55-420', 'HTTPS', 'POST', 'AK', 'json', req, runtime); } async function mutilComplex(request: MutilComplexRequest): MutilComplexResponse { var runtime = new Util.RuntimeOptions{}; return mutilComplexWithOptions(request, runtime); }
Tea
4
aliyun/alibabacloud-sdk
nbftestpop-20210806_151255420/main.tea
[ "Apache-2.0" ]
""" Disposable.constructor before block inside block Disposable.Dispose after block """ import BooCompiler.Tests.SupportingClasses from BooCompiler.Tests d = Disposable() print("before block") using d: print("inside block") print("after block") assert d is not null
Boo
3
popcatalin81/boo
tests/testcases/macros/using-3.boo
[ "BSD-3-Clause" ]
// // Moments.mm // // Created by Giles Payne on 2019/10/09. // #import "Moments.h" @implementation Moments { cv::Moments native; } -(cv::Moments&)nativeRef { return native; } - (double)m00 { return native.m00; } - (void)setM00:(double)val { native.m00 = val; } - (double)m10 { return native.m10; } - (void)setM10:(double)val { native.m10 = val; } - (double)m01 { return native.m01; } - (void)setM01:(double)val { native.m01 = val; } - (double)m20 { return native.m20; } - (void)setM20:(double)val { native.m20 = val; } - (double)m11 { return native.m11; } - (void)setM11:(double)val { native.m11 = val; } - (double)m02 { return native.m02; } - (void)setM02:(double)val { native.m02 = val; } - (double)m30 { return native.m30; } - (void)setM30:(double)val { native.m30 = val; } - (double)m21 { return native.m21; } - (void)setM21:(double)val { native.m21 = val; } - (double)m12 { return native.m12; } - (void)setM12:(double)val { native.m12 = val; } - (double)m03 { return native.m03; } - (void)setM03:(double)val { native.m03 = val; } - (double)mu20 { return native.mu20; } - (void)setMu20:(double)val { native.mu20 = val; } - (double)mu11 { return native.mu11; } - (void)setMu11:(double)val { native.mu11 = val; } - (double)mu02 { return native.mu02; } - (void)setMu02:(double)val { native.mu02 = val; } - (double)mu30 { return native.mu30; } - (void)setMu30:(double)val { native.mu30 = val; } - (double)mu21 { return native.mu21; } - (void)setMu21:(double)val { native.mu21 = val; } - (double)mu12 { return native.mu12; } - (void)setMu12:(double)val { native.mu12 = val; } - (double)mu03 { return native.mu03; } - (void)setMu03:(double)val { native.mu03 = val; } - (double)nu20 { return native.nu20; } - (void)setNu20:(double)val { native.nu20 = val; } - (double)nu11 { return native.nu11; } - (void)setNu11:(double)val { native.nu11 = val; } - (double)nu02 { return native.nu02; } - (void)setNu02:(double)val { native.nu02 = val; } - (double)nu30 { return native.nu30; } - (void)setNu30:(double)val { native.nu30 = val; } - (double)nu21 { return native.nu21; } - (void)setNu21:(double)val { native.nu21 = val; } - (double)nu12 { return native.nu12; } - (void)setNu12:(double)val { native.nu12 = val; } - (double)nu03 { return native.nu03; } - (void)setNu03:(double)val { native.nu03 = val; } -(instancetype)initWithM00:(double)m00 m10:(double)m10 m01:(double)m01 m20:(double)m20 m11:(double)m11 m02:(double)m02 m30:(double)m30 m21:(double)m21 m12:(double)m12 m03:(double)m03 { self = [super init]; if (self) { self.m00 = m00; self.m10 = m10; self.m01 = m01; self.m20 = m20; self.m11 = m11; self.m02 = m02; self.m30 = m30; self.m21 = m21; self.m12 = m12; self.m03 = m03; [self completeState]; } return self; } -(instancetype)init { return [self initWithM00:0 m10:0 m01:0 m20:0 m11:0 m02:0 m30:0 m21:0 m12:0 m03:0]; } -(instancetype)initWithVals:(NSArray<NSNumber*>*)vals { self = [super init]; if (self) { [self set:vals]; } return self; } +(instancetype)fromNative:(cv::Moments&)moments { return [[Moments alloc] initWithM00:moments.m00 m10:moments.m10 m01:moments.m01 m20:moments.m20 m11:moments.m11 m02:moments.m02 m30:moments.m30 m21:moments.m21 m12:moments.m12 m03:moments.m03]; } -(void)set:(NSArray<NSNumber*>*)vals { self.m00 = (vals != nil && vals.count > 0) ? vals[0].doubleValue : 0; self.m10 = (vals != nil && vals.count > 1) ? vals[1].doubleValue : 0; self.m01 = (vals != nil && vals.count > 2) ? vals[2].doubleValue : 0; self.m20 = (vals != nil && vals.count > 3) ? vals[3].doubleValue : 0; self.m11 = (vals != nil && vals.count > 4) ? vals[4].doubleValue : 0; self.m02 = (vals != nil && vals.count > 5) ? vals[5].doubleValue : 0; self.m30 = (vals != nil && vals.count > 6) ? vals[6].doubleValue : 0; self.m21 = (vals != nil && vals.count > 7) ? vals[7].doubleValue : 0; self.m12 = (vals != nil && vals.count > 8) ? vals[8].doubleValue : 0; self.m03 = (vals != nil && vals.count > 9) ? vals[9].doubleValue : 0; [self completeState]; } -(void)completeState { double cx = 0, cy = 0; double mu20, mu11, mu02; double inv_m00 = 0.0; if (abs(self.m00) > 0.00000001) { inv_m00 = 1. / self.m00; cx = self.m10 * inv_m00; cy = self.m01 * inv_m00; } // mu20 = m20 - m10*cx mu20 = self.m20 - self.m10 * cx; // mu11 = m11 - m10*cy mu11 = self.m11 - self.m10 * cy; // mu02 = m02 - m01*cy mu02 = self.m02 - self.m01 * cy; self.mu20 = mu20; self.mu11 = mu11; self.mu02 = mu02; // mu30 = m30 - cx*(3*mu20 + cx*m10) self.mu30 = self.m30 - cx * (3 * mu20 + cx * self.m10); mu11 += mu11; // mu21 = m21 - cx*(2*mu11 + cx*m01) - cy*mu20 self.mu21 = self.m21 - cx * (mu11 + cx * self.m01) - cy * mu20; // mu12 = m12 - cy*(2*mu11 + cy*m10) - cx*mu02 self.mu12 = self.m12 - cy * (mu11 + cy * self.m10) - cx * mu02; // mu03 = m03 - cy*(3*mu02 + cy*m01) self.mu03 = self.m03 - cy * (3 * mu02 + cy * self.m01); double inv_sqrt_m00 = sqrt(abs(inv_m00)); double s2 = inv_m00*inv_m00, s3 = s2*inv_sqrt_m00; self.nu20 = self.mu20*s2; self.nu11 = self.mu11*s2; self.nu02 = self.mu02*s2; self.nu30 = self.mu30*s3; self.nu21 = self.mu21*s3; self.nu12 = self.mu12*s3; self.nu03 = self.mu03*s3; } - (NSString *)description { return [NSString stringWithFormat:@"Moments [ \nm00=%lf, \nm10=%lf, m01=%lf, \nm20=%lf, m11=%lf, m02=%lf, \nm30=%lf, m21=%lf, m12=%lf, m03=%lf, \nmu20=%lf, mu11=%lf, mu02=%lf, \nmu30=%lf, mu21=%lf, mu12=%lf, mu03=%lf, \nnu20=%lf, nu11=%lf, nu02=%lf, \nnu30=%lf, nu21=%lf, nu12=%lf, nu03=%lf, \n]", self.m00, self.m10, self.m01, self.m20, self.m11, self.m02, self.m30, self.m21, self.m12, self.m03, self.mu20, self.mu11, self.mu02, self.mu30, self.mu21, self.mu12, self.mu03, self.nu20, self.nu11, self.nu02, self.nu30, self.nu21, self.nu12, self.nu03]; } @end
Objective-C++
4
artun3e/opencv
modules/imgproc/misc/objc/common/Moments.mm
[ "BSD-3-Clause" ]
module Hasura.RQL.DDL.Schema.Source where import Control.Lens (at, (.~), (^.)) import Control.Monad.Trans.Control (MonadBaseControl) import Data.Aeson import Data.Aeson qualified as J import Data.Aeson.TH import Data.Has import Data.HashMap.Strict qualified as HM import Data.HashMap.Strict.InsOrd qualified as OMap import Data.Text.Extended import Hasura.Base.Error import Hasura.EncJSON import Hasura.Logging qualified as L import Hasura.Prelude import Hasura.RQL.DDL.Deps import Hasura.RQL.DDL.Schema.Common import Hasura.RQL.Types import Hasura.SQL.AnyBackend qualified as AB import Hasura.Server.Logging (MetadataLog (..)) -------------------------------------------------------------------------------- -- Add source data AddSource b = AddSource { _asName :: !SourceName, _asConfiguration :: !(SourceConnConfiguration b), _asReplaceConfiguration :: !Bool } instance (Backend b) => FromJSON (AddSource b) where parseJSON = withObject "AddSource" $ \o -> AddSource <$> o .: "name" <*> o .: "configuration" <*> o .:? "replace_configuration" .!= False runAddSource :: forall m b. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => AddSource b -> m EncJSON runAddSource (AddSource name sourceConfig replaceConfiguration) = do sources <- scSources <$> askSchemaCache metadataModifier <- MetadataModifier <$> if HM.member name sources then if replaceConfiguration then pure $ metaSources . ix name . toSourceMetadata @b . smConfiguration .~ sourceConfig else throw400 AlreadyExists $ "source with name " <> name <<> " already exists" else do let sourceMetadata = mkSourceMetadata @b name sourceConfig pure $ metaSources %~ OMap.insert name sourceMetadata buildSchemaCacheFor (MOSource name) metadataModifier pure successMsg -------------------------------------------------------------------------------- -- Rename source data RenameSource = RenameSource { _rmName :: !SourceName, _rmNewName :: !SourceName } $(deriveFromJSON hasuraJSON ''RenameSource) runRenameSource :: forall m. (MonadError QErr m, CacheRWM m, MetadataM m) => RenameSource -> m EncJSON runRenameSource RenameSource {..} = do sources <- scSources <$> askSchemaCache unless (HM.member _rmName sources) $ throw400 NotExists $ "Could not find source with name " <>> _rmName when (HM.member _rmNewName sources) $ throw400 AlreadyExists $ "Source with name " <> _rmNewName <<> " already exists" let metadataModifier = MetadataModifier $ metaSources %~ renameBackendSourceMetadata _rmName _rmNewName buildSchemaCacheFor (MOSource _rmNewName) metadataModifier pure successMsg where renameBackendSourceMetadata :: SourceName -> SourceName -> OMap.InsOrdHashMap SourceName BackendSourceMetadata -> OMap.InsOrdHashMap SourceName BackendSourceMetadata renameBackendSourceMetadata oldKey newKey m = case OMap.lookup oldKey m of Just val -> OMap.insert newKey (AB.mapBackend val (renameSource newKey)) . OMap.delete oldKey $ m Nothing -> m renameSource :: forall b. SourceName -> SourceMetadata b -> SourceMetadata b renameSource newName metadata = metadata {_smName = newName} -------------------------------------------------------------------------------- -- Drop source data DropSource = DropSource { _dsName :: !SourceName, _dsCascade :: !Bool } deriving (Show, Eq) instance FromJSON DropSource where parseJSON = withObject "DropSource" $ \o -> DropSource <$> o .: "name" <*> o .:? "cascade" .!= False runDropSource :: forall m r. ( MonadError QErr m, CacheRWM m, MonadIO m, MonadBaseControl IO m, MetadataM m, MonadReader r m, Has (L.Logger L.Hasura) r ) => DropSource -> m EncJSON runDropSource (DropSource name cascade) = do sc <- askSchemaCache logger <- asks getter let sources = scSources sc case HM.lookup name sources of Just backendSourceInfo -> AB.dispatchAnyBackend @BackendMetadata backendSourceInfo $ dropSource logger sc Nothing -> do metadata <- getMetadata void $ onNothing (metadata ^. metaSources . at name) $ throw400 NotExists $ "source with name " <> name <<> " does not exist" if cascade then -- Without sourceInfo we can't cascade, so throw an error throw400 Unexpected $ "source with name " <> name <<> " is inconsistent" else -- Drop source from metadata buildSchemaCacheFor (MOSource name) dropSourceMetadataModifier pure successMsg where dropSource :: forall b. (BackendMetadata b) => L.Logger L.Hasura -> SchemaCache -> SourceInfo b -> m () dropSource logger sc sourceInfo = do let sourceConfig = _siConfiguration sourceInfo let indirectDeps = mapMaybe getIndirectDep $ getDependentObjs sc (SOSource name) when (not cascade && indirectDeps /= []) $ reportDepsExt (map (SOSourceObj name . AB.mkAnyBackend) indirectDeps) [] metadataModifier <- execWriterT $ do mapM_ (purgeDependentObject name >=> tell) indirectDeps tell dropSourceMetadataModifier buildSchemaCacheFor (MOSource name) metadataModifier -- We only log errors that arise from 'postDropSourceHook' here, and not -- surface them as end-user errors. See comment -- https://github.com/hasura/graphql-engine/issues/7092#issuecomment-873845282 runExceptT (postDropSourceHook @b sourceConfig) >>= either logDropSourceHookError pure where logDropSourceHookError err = let msg = "Error executing cleanup actions after removing source '" <> toTxt name <> "'. Consider cleaning up tables in hdb_catalog schema manually." in L.unLogger logger $ MetadataLog L.LevelWarn msg (J.toJSON err) getIndirectDep :: SchemaObjId -> Maybe (SourceObjId b) getIndirectDep = \case SOSourceObj s o -> if s == name then Nothing else -- consider only *this* backend specific dependencies AB.unpackAnyBackend o _ -> Nothing dropSourceMetadataModifier = MetadataModifier $ metaSources %~ OMap.delete name
Haskell
5
andykcom/graphql-engine
server/src-lib/Hasura/RQL/DDL/Schema/Source.hs
[ "Apache-2.0", "MIT" ]
# Check the handling of "continue-after-failure". # Check that we cancel the build immediately by default. # # RUN: rm -rf %t.build # RUN: mkdir -p %t.build # RUN: cp %s %t.build/build.ninja # RUN: %{llbuild} ninja build --jobs 1 --chdir %t.build --no-db &> %t1.out || true # RUN: %{FileCheck} --check-prefix CHECK-DEFAULT --input-file %t1.out %s # # CHECK-DEFAULT: [{{.*}}/{{.*}}] false # CHECK-DEFAULT: stopping build due to command failures # Check that we honor -k 2. # # RUN: rm -rf %t.build # RUN: mkdir -p %t.build # RUN: cp %s %t.build/build.ninja # RUN: %{llbuild} ninja build --jobs 1 --chdir %t.build --no-db -k 2 &> %t2.out || true # RUN: %{FileCheck} --check-prefix CHECK-TWO --input-file %t2.out %s # # CHECK-TWO: [{{.*}}/{{.*}}] false # CHECK-TWO: [{{.*}}/{{.*}}] false # CHECK-TWO: stopping build due to command failures # Check that we honor -k 0. # # RUN: rm -rf %t.build # RUN: mkdir -p %t.build # RUN: cp %s %t.build/build.ninja # RUN: %{llbuild} ninja build --jobs 1 --chdir %t.build --no-db -k 0 &> %t3.out || true # RUN: %{FileCheck} --check-prefix CHECK-UNLIMITED --input-file %t3.out %s # # CHECK-DAG-UNLIMITED: [{{.*}}/{{.*}}] echo > output-0 # CHECK-DAG-UNLIMITED: [{{.*}}/{{.*}}] echo > output-5 # CHECK-UNLIMITED: build had 2 command failures rule CAT command = cat ${in} > ${out} rule ECHO command = echo > ${out} rule FALSE command = false build output-0: ECHO build output-1: FALSE build output-2: ECHO build output-3: FALSE build output-4: ECHO build output-5: ECHO build output: CAT output-0 output-1 output-2 output-3 output-4 output-5
Ninja
5
uraimo/swift-llbuild
tests/Ninja/Build/continue-on-failure.ninja
[ "Apache-2.0" ]
from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.models.expressions import Col, Func class PostGISSchemaEditor(DatabaseSchemaEditor): geom_index_type = 'GIST' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' rast_index_template = 'ST_ConvexHull(%(expressions)s)' sql_alter_column_to_3d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s" sql_alter_column_to_2d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s" def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def _field_should_be_indexed(self, model, field): if getattr(field, 'spatial_index', False): return True return super()._field_should_be_indexed(model, field) def _create_index_sql(self, model, *, fields=None, **kwargs): if fields is None or len(fields) != 1 or not hasattr(fields[0], 'geodetic'): return super()._create_index_sql(model, fields=fields, **kwargs) field = fields[0] expressions = None opclasses = None if field.geom_type == 'RASTER': # For raster fields, wrap index creation SQL statement with ST_ConvexHull. # Indexes on raster columns are based on the convex hull of the raster. expressions = Func(Col(None, field), template=self.rast_index_template) fields = None elif field.dim > 2 and not field.geography: # Use "nd" ops which are fast on multidimensional cases opclasses = [self.geom_index_ops_nd] name = kwargs.get('name') if not name: name = self._create_index_name(model._meta.db_table, [field.column], '_id') return super()._create_index_sql( model, fields=fields, name=name, using=' USING %s' % self.geom_index_type, opclasses=opclasses, expressions=expressions, ) def _alter_column_type_sql(self, table, old_field, new_field, new_type): """ Special case when dimension changed. """ if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'): return super()._alter_column_type_sql(table, old_field, new_field, new_type) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d elif old_field.dim == 3 and new_field.dim == 2: sql_alter = self.sql_alter_column_to_2d else: sql_alter = self.sql_alter_column_type return ( ( sql_alter % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], )
Python
4
KaushikSathvara/django
django/contrib/gis/db/backends/postgis/schema.py
[ "BSD-3-Clause", "0BSD" ]
1 /* ** comment ** */
Objective-J
0
justinmann/sj
tests/comment3.sj
[ "Apache-2.0" ]
const createTestCases = require("../_helpers/createTestCases"); module.exports = createTestCases({ nothing: { usedExports: [], expect: { "./test": [] } }, nonEval: { usedExports: ["x"], expect: { "./test": ["a"] } }, directEval: { usedExports: ["y"], expect: { "./test": ["a", "b", "c"] } }, indirectEval: { usedExports: ["z"], expect: { "./test": ["a", "b", "c"] } } });
JavaScript
3
fourstash/webpack
test/configCases/inner-graph/eval-bailout/webpack.config.js
[ "MIT" ]
################################################################ ## Title: Data Science For Database Professionals ## Description:: Data Exploration ## Author: Microsoft ################################################################ #################################################################################################### ##Compute context #################################################################################################### connection_string <- "Driver=SQL Server;Server=.;Database=telcoedw2;Trusted_Connection=yes;" sql <- RxInSqlServer(connectionString = connection_string, autoCleanup = FALSE, consoleOutput = TRUE) local <- RxLocalParallel() rxOptions(reportProgress = 0) #################################################################################################### ##Connect to the data #################################################################################################### rxSetComputeContext(local) ##SQL data source myDataTb <- RxSqlServerData( connectionString = connection_string, table = "edw_cdr", colInfo = col_info) rxGetInfo(myDataTb, getVarInfo = T, numRows = 3) ##Data frame myData <- rxDataStep(myDataTb, overwrite = TRUE) str(myData) #################################################################################################### ##Data exploration and visualization on the data frame myData #################################################################################################### #pie chart of customer churn library(ggplot2) ggplot(data = myData, aes(x = factor(1), fill = churn)) + geom_bar(width = 1) + coord_polar(theta = "y") #density plot of age ggplot(data = myData, aes(x = age)) + geom_density(fill = "salmon", bw = "SJ", colour = NA) + geom_rug(colour = "salmon") + theme_minimal() #boxplot of age by churn ggplot(data = myData, aes(x = reorder(churn, - age), y = age, colour = churn)) + geom_boxplot() + labs(x = "churn", y = "age") + theme_minimal() + theme(legend.position = "none") #density plot of annualincome ggplot(data = myData, aes(x = annualincome)) + geom_density(fill = "salmon", bw = "SJ", colour = NA) + geom_rug(colour = "salmon") + theme_minimal() #boxplot of annualincome by churn ggplot(data = myData, aes(x = reorder(churn, - annualincome), y = annualincome, colour = churn)) + geom_boxplot() + labs(x = "churn", y = "annualincome") + theme_minimal() + theme(legend.position = "none") #impact of education level on churn library(dplyr) myData %>% group_by(month, education) %>% summarize(countofchurn = sum(as.numeric(churn))) %>% ggplot(aes(x = month, y = countofchurn, group = education, fill = education)) + geom_bar(stat = "identity", position = position_dodge()) + labs(x = "month", y = "Counts of churn") + theme_minimal() #impact of callfailure rate (%) on churn myData %>% group_by(month, callfailurerate) %>% summarize(countofchurn = sum(as.numeric(churn)) ) %>% ggplot(aes(x = month, y = countofchurn, group = factor(callfailurerate), fill = factor(callfailurerate))) + geom_bar(stat = "identity", position = position_dodge()) + labs(x = "month", y = "Counts of churn") + theme_minimal() #count of customers by % callsoutsidenetwork myData %>% group_by(percentagecalloutsidenetwork) %>% summarize(countofcustomer = length(unique(customerid))) %>% ggplot(aes(x = percentagecalloutsidenetwork, y = countofcustomer)) + geom_bar(stat = "identity", fill = "orange") + labs(x = "% callsoutsidenetwork", y = "Counts of Customerid") + theme_minimal() #count of customers by calldroprate myData %>% group_by(calldroprate) %>% summarize(countofcustomer = length(unique(customerid))) %>% ggplot(aes(x = calldroprate, y = countofcustomer)) + geom_bar(stat = "identity", fill = "steelblue") + labs(x = "% calldroprate", y = "Counts of Customerid") + theme_minimal() # Comparing the proportions of customer churn at individual state for several education levels library(GGally) proportions <- myData %>% group_by(state, education) %>% summarise(countofchurn = sum(as.numeric(churn))) %>% ungroup() %>% group_by(state) %>% summarise(Prop_BA = sum(countofchurn[education == "Bachelor or equivalent"]) / sum(countofchurn), Prop_HS = sum(countofchurn[education == "High School or below"]) / sum(countofchurn), Prop_MA = sum(countofchurn[education == "Master or equivalent"]) / sum(countofchurn)) ggpairs(proportions, columns = 2:ncol(proportions)) + theme_bw() #################################################################################################### ##Data exploration and visualization on the SQL data source myDataTb #################################################################################################### rxSetComputeContext(sql) #Counts(Percentages) of customers churned or non-churned (Pareto Chart) tmp <- rxCube( ~ churn, myDataTb, means = FALSE) Results_df <- rxResultsDF(tmp) library(qcc) CountOfChurn <- setNames(as.numeric(Results_df[, 2]), Results_df[, 1]) par(oma = c(2, 2, 2, 2)) pareto <- pareto.chart(CountOfChurn, xlab = "Churn", ylab = "Counts", ylab2 = "Cumulative Percentage", cex.names = 0.5, las = 1, col = heat.colors(length(CountOfChurn)), plot = TRUE) #Counts of customer over age/annualincome by churn rxHistogram( ~ age | churn, myDataTb) rxHistogram(~ annualincome | churn, myDataTb) #Counts of churned customer by age and state (Interactive HeatMap) tmp <- rxCrossTabs(N(churn) ~ F(age):state, myDataTb, means = FALSE) Results_df <- rxResultsDF(tmp, output = "sums") colnames(Results_df) <- substring(colnames(Results_df), 7) library(Rcpp) library(d3heatmap) d3heatmap(data.matrix(Results_df[, -1]), scale = "none", labRow = Results_df[, 1], dendrogram = "none", color = cm.colors(255))
R
5
manikanth/sql-server-samples
samples/features/r-services/telco-customer-churn/R/TelcoChurn-DataExploration.r
[ "MIT" ]
#pragma semicolon 1 #pragma newdecls required #include <sourcemod> public void OnPluginStart() { PrintToServer("Hello world!"); }
SourcePawn
3
PushpneetSingh/Hello-world
SourcePawn/hello-world.sp
[ "MIT" ]
data Nat := ('zero : Nat) ; ('suc : Nat -> Nat) ; let A (m : Nat)(n : Nat) : Nat ; <= Nat.Ind m ; define A 'zero n := 'suc n ; <= Nat.Ind n ; define A ('suc m) 'zero := A m ('suc 'zero) ; define A ('suc m) ('suc n) := A m (A ('suc m) n) ; give \ n -> _ ; root ; elab A 'zero 'zero ; elab A ('suc 'zero) 'zero ; elab A ('suc ('suc 'zero)) 'zero ; elab A ('suc ('suc ('suc 'zero))) 'zero ; elab A ('suc ('suc ('suc ('suc 'zero)))) 'zero ; elab A 'zero ('suc 'zero) ; elab A ('suc 'zero) ('suc 'zero) ; elab A ('suc ('suc 'zero)) ('suc 'zero) ; elab A ('suc ('suc ('suc 'zero))) ('suc 'zero) ; elab A 'zero ('suc ('suc 'zero)) ; elab A ('suc 'zero) ('suc('suc 'zero)) ; elab A ('suc ('suc 'zero)) ('suc('suc 'zero)) ; elab A ('suc ('suc ('suc 'zero))) ('suc('suc 'zero)) ;
PigLatin
3
mietek/epigram
test/Ackermann2.pig
[ "MIT" ]
#pragma once void schedule_restart_as_elevated(bool openSettings); void schedule_restart_as_non_elevated(); bool is_restart_scheduled(); bool restart_if_scheduled(); bool restart_same_elevation();
C
4
szlatkow/PowerToys
src/runner/restart_elevated.h
[ "MIT" ]
publishing_version = 1.0-5.3-1.6.255-SNAPSHOT kotlin.mpp.enableGranularSourceSetsMetadata=true kotlin.native.enableDependencyPropagation=false
INI
1
margarita-nedzelska-sonarsource/kotlin
libraries/tools/kotlin-gradle-plugin-integration-tests/src/test/resources/testProject/hierarchical-mpp-multi-modules/gradle.properties
[ "ECL-2.0", "Apache-2.0" ]
\* graph.shen --- a library for graph definition and manipulation Copyright (C) 2011, Eric Schulte *** License: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *** Commentary: Graphs are represented as two dictionaries one for vertices and one for edges. It is important to note that the dictionary implementation used is able to accept arbitrary data structures as keys. This structure technically encodes hypergraphs (a generalization of graphs in which each edge may contain any number of vertices). Examples of a regular graph G and a hypergraph H with the corresponding data structure are given below. --G=<graph Vertices Edges>------------------------------------------------ Vertices Edges ---------- ------- +----Graph G-----+ hash | key -> value hash | key -> value | | -----+------>-------- -----+-------->--------- | a---b---c g | 1 | a -> [1] 1 | [a b] -> [1 2] | | | | 2 | b -> [1 2 3] 2 | [b c] -> [2 3] | d---e---f | 3 | c -> [2 4] 3 | [b d] -> [2 4] | | 4 | d -> [3 5] 4 | [c e] -> [3 5] +----------------+ 5 | e -> [4 5 6] 5 | [d e] -> [4 5] 6 | f -> [6] 6 | [e f] -> [5 6] 7 | g -> [] --H=<graph Vertices Edges>------------------------------------------------ Vertices Edges ---------- ------- hash | key -> value hash | key -> value +-- Hypergraph H----+ -----+------>-------- -----+-------->--------- | | 1 | a -> [1] 1 | [a b [1 2 | +------+ | 2 | b -> [1] | c d -> 3 4 | +------+------+ | 3 | c -> [1] | e f] 5 6] | |a b c |d e f | | 4 | d -> [1 2] | | +------+------+ | 5 | e -> [1 2] 2 | [d e [4 5 | |g h i | j | 6 | f -> [1 2] | f g -> 6 7 | +------+ | 7 | g -> [2] | h i] 8 9] | | 8 | h -> [2] +-------------------+ 9 | i -> [2] 10 | j -> [] --G=<graph Vertices Edges>-------Graph with associated edge/vertex data--------- Vertices Edges ---------- ------- +----Graph G-----+ hash | key -> value hash | key -> value | 4 6 7 | -----+------>-------- -----+-------->--------- |0a---b---c g | 1 | a -> (@p 0 [1]) 1 | [a b] -> (@p 4 [1 2]) | 1| 3| | 2 | b -> [1 2 3] 2 | [b c] -> (@p 6 [2 3]) | d---e---f | 3 | c -> [2 4] 3 | [b d] -> (@p 1 [2 4]) | 2 5 | 4 | d -> [3 5] 4 | [c e] -> (@p 3 [3 5]) +----------------+ 5 | e -> [4 5 6] 5 | [d e] -> (@p 2 [4 5]) 6 | f -> [6] 6 | [e f] -> (@p 5 [5 6]) 7 | g -> (@p 7 []) V = # of vertices E = # of edges M = # of vertex edge associations size = size of all vertices + all vertices stored in Vertices dict M * sizeof(int) * 4 + indices into Vertices & Edge dicts V * sizeof(dict entry) + storage in the Vertex dict E * sizeof(dict entry) + storage in the Edge dict 2 * sizeof(dict) the Vertices and Edge dicts *** Code: *\ (require dict) (require sequence) (datatype graph Vertices : dictionary; Edges : dictoinary; =================== (vector symbol Vertices Edges);) (package graph- [graph graph? vertices edges add-vertex add-edge has-edge? has-vertex? edges-for neighbors connected-to connected? connected-components vertex-partition bipartite? \* included from the sequence library\ *\ take drop take-while drop-while range flatten filter complement seperate zip indexed reduce mapcon partition partition-with unique frequencies shuffle pick remove-first interpose subset? cartesian-product \* included from the dict library\ *\ dict? dict dict-> <-dict contents key? keys vals dictionary make-dict] (define graph? X -> (= graph (<-address X 0))) (define make-graph \* create a graph with specified sizes for the vertex dict and edge dict *\ {number --> number --> graph} Vertsize Edgesize -> (let Graph (absvector 3) (do (address-> Graph 0 graph) (address-> Graph 1 (make-dict Vertsize)) (address-> Graph 2 (make-dict Edgesize)) Graph))) (defmacro graph-macro \* return a graph taking optional sizes for the vertex and edge dicts *\ [graph] -> [make-graph 1024 1024] [graph N] -> [make-graph N 1024] [graph N M] -> [make-graph N M]) (define vert-dict Graph -> (<-address Graph 1)) (define edge-dict Graph -> (<-address Graph 2)) (define vertices {graph --> (list A)} Graph -> (keys (vert-dict Graph))) (define edges {graph --> (list (list A))} Graph -> (keys (edge-dict Graph))) (define get-data Value V -> (if (tuple? Value) (fst Value) (error (make-string "no data for ~S~%" V)))) (define vertex-data Graph V -> (get-data (<-dict (vert-dict Graph) V) V)) (define edge-data Graph V -> (get-data (<-dict (edge-dict Graph) V) V)) (define resolve {(vector (list A)) --> (@p number number) --> A} Vector (@p Index Place) -> (nth (+ 1 Place) (<-vector Vector Index))) (define resolve-vert {graph --> (@p number number) --> A} Graph Place -> (resolve (<-address (vert-dict Graph) 2) Place)) (define resolve-edge {graph --> (@p number number) --> A} Graph Place -> (resolve (<-address (edge-dict Graph) 2) Place)) (define edges-for {graph --> A --> (list (list A))} Graph Vert -> (let Val (trap-error (<-dict (vert-dict Graph) Vert) (/. E [])) Edges (if (tuple? Val) (snd Val) Val) (map (lambda X (fst (resolve-edge Graph X))) Val))) (define add-vertex-w-data \* add a vertex to a graph *\ {graph --> A --> B --> A} G V Data -> (do (dict-> (vert-dict G) V (@p Data (edges-for G V))) V)) (define add-vertex-w/o-data \* add a vertex to a graph *\ {graph --> A --> B --> A} G V -> (do (dict-> (vert-dict G) V (edges-for G V)) V)) (defmacro add-vertex-macro [add-vertex G V] -> [add-vertex-w/o-data G V] [add-vertex G V D] -> [add-vertex-w-data G V D]) (define update-vert \* in a dict, add an edge to a vertex's edge list *\ {vector --> (@p number number) --> A --> number} Vs Edge V -> (let Store (<-address Vs 2) N (hash V (limit Store)) VertLst (trap-error (<-vector Store N) (/. E [])) Contents (trap-error (<-dict Vs V) (/. E [])) (do (dict-> Vs V (if (tuple? Contents) (@p (fst Contents) (adjoin Edge (snd Contents))) (adjoin Edge Contents))) (@p N (length VertLst))))) (define update-edges-vertices \* add an edge to a graph *\ {graph --> (list A) --> (list A)} Graph Edge -> (let Store (<-address (edge-dict Graph) 2) EdgeID (hash Edge (limit Store)) EdgeLst (trap-error (<-vector Store EdgeID) (/. E [])) (map (update-vert (vert-dict Graph) (@p EdgeID (length EdgeLst))) Edge))) (define add-edge-w-data G E D -> (do (dict-> (edge-dict G) E (@p D (update-edges-vertices G E))) E)) (define add-edge-w/o-data G E -> (do (dict-> (edge-dict G) E (update-edges-vertices G E)) E)) (defmacro add-edge-macro [add-edge G E] -> [add-edge-w/o-data G E] [add-edge G E V] -> [add-edge-w-data G E V]) (define has-edge? {graph --> (list A) --> boolean} Graph Edge -> (key? (edge-dict Graph) Edge)) (define has-vertex? {graph --> A --> boolean} Graph Vertex -> (key? (vert-dict Graph) Vertex)) (define neighbors \* Return the neighbors of a vertex *\ {graph --> A --> (list A)} Graph Vert -> (unique (mapcon (remove-first Vert) (edges-for Graph Vert)))) (define connected-to- {graph --> (list A) --> (list A) --> (list A)} Graph [] Already -> Already Graph New Already -> (let Reachable (unique (mapcon (neighbors Graph) New)) New (difference Reachable Already) (connected-to- Graph New (append New Already)))) (define connected-to \* return all vertices connected to the given vertex, including itself *\ {graph --> A --> (list A)} Graph V -> (connected-to- Graph [V] [V])) (define connected? \* return if a graph is fully connected *\ {graph --> boolean} Graph -> (reduce (/. V Acc (and Acc (subset? (vertices Graph) (connected-to Graph V)))) true (vertices Graph))) (define connected-components- \* given a graph return a list of connected components *\ {graph --> (list A) --> (list (list A)) --> (list graph)} Graph [] _ -> [] Graph VS [] -> (map (/. V (let Component (graph 1 0) (do (add-vertex Component V) Component))) VS) Graph [V|VS] ES -> (let Con-verts (connected-to Graph V) Con-edges (filter (/. E (subset? E Con-verts)) ES) Component (graph (length Con-verts) (length Con-edges)) (do (map (add-edge-w/o-data Component) Con-edges) (cons Component (connected-components- Graph (difference VS Con-verts) (difference ES Con-edges)))))) (define connected-components {graph --> (list graph)} Graph -> (connected-components- Graph (vertices Graph) (edges Graph))) (define place-vertex \* given a graph, vertex and list of partitions, partition the vertex *\ {graph --> A --> (list (list A)) --> (list (list A))} Graph V [] -> (if (element? V (neighbors Graph V)) (simple-error (make-string "self-loop ~S, no vertex partition" V)) [[V]]) Graph V [C|CS] -> (let Neighbors (neighbors Graph V) (if (element? V Neighbors) (simple-error (make-string "self-loop ~S, no vertex partition" V)) (if (empty? (intersection C Neighbors)) [[V|C]|CS] [C|(place-vertex Graph V CS)])))) (define vertex-partition \* partition the vertices of a graph *\ {graph --> (list (list A))} Graph -> (reduce (place-vertex Graph) [] (vertices Graph))) (define bipartite? \* check if a graph is bipartite *\ {graph --> boolean} Graph -> (= 2 (length (vertex-partition Graph)))) ) \* simple tests (set g (graph)) (add-edge (value g) [chris patton]) (add-edge (value g) [eric chris]) (add-vertex (value g) nobody) (has-edge? (value g) [patton chris]) (edges-for (value g) chris) (neighbors (value g) chris) (neighbors (value g) nobody) (connected-to (value g) chris) (connected? (value g)) (connected-components (value g)) <- fail when package wrapper is used (map (function vertices) (connected-components (value g))) *\
Shen
5
JavascriptID/sourcerer-app
src/test/resources/samples/langs/Shen/graph.shen
[ "MIT" ]
$TTL 300 ;@ IN CF_REDIRECT test.foo.com,https://goo.com/$1 ;@ IN CF_TEMP_REDIRECT test.foo.com,https://goo.com/$1
DNS Zone
2
IT-Sumpfling/dnscontrol
pkg/js/parse_tests/011-cfRedirect/foo.com.zone
[ "MIT" ]
sleep 2 t api system button shutter press sleep 1 t api system button shutter release
AGS Script
1
waltersgrey/autoexechack
HERO4/OneButtonMode/autoexec.ash
[ "MIT" ]
--- layout: foo --- Test <a href="https://prettier.io">abc</a>.
HTML
0
toplenboren/prettier
tests/html/front-matter/issue-9042.html
[ "MIT" ]
import createSvgIcon from './utils/createSvgIcon'; import { jsx as _jsx } from "react/jsx-runtime"; export default createSvgIcon( /*#__PURE__*/_jsx("path", { d: "m14.09 7.51 1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.3-.51-.48-.86-.48h-2.45c-.35 0-.68.18-.86.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.17.3.5.48.85.48h2.45c.36 0 .69-.18.87-.49zM9.91 9.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.18.3.51.49.86.49h2.46c.35 0 .68-.18.86-.49l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.31-.51-.49-.86-.49h-2.46c-.35 0-.68.18-.86.49zm7.1 2.02h2.45c.35 0 .68-.18.86-.49l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.3-.51-.49-.86-.49h-2.45c-.35 0-.68.18-.86.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.19.31.51.49.86.49zm2.46 1h-2.46c-.35 0-.68.18-.86.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.18.3.51.49.86.49h2.46c.35 0 .68-.18.86-.49l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.3-.51-.49-.86-.49zM7.84 11.03l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.3-.5-.49-.85-.49H4.53c-.35 0-.68.19-.86.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.18.3.51.49.86.49h2.45c.36-.01.68-.19.86-.49zm-.85 1.48H4.53c-.35 0-.68.18-.86.49l-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.18.3.51.49.86.49h2.46c.35 0 .68-.18.86-.49l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.19-.3-.51-.49-.86-.49zm2.92 3.98-1.2 2c-.19.32-.19.71 0 1.03l1.2 2c.18.3.51.49.86.49h2.46c.35 0 .68-.18.86-.49l1.2-2c.19-.32.19-.71 0-1.03l-1.2-2c-.18-.3-.51-.49-.86-.49h-2.46c-.35 0-.68.18-.86.49z" }), 'HiveRounded');
JavaScript
3
dany-freeman/material-ui
packages/mui-icons-material/lib/esm/HiveRounded.js
[ "MIT" ]
#![crate_type = "staticlib"] extern crate b; extern crate a;
Rust
1
Eric-Arellano/rust
src/test/run-make-fulldeps/issue-37893/c.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
module M { module K { } import this.this.K; }
Chapel
1
jhh67/chapel
test/visibility/import/relative/import-relative-error2.chpl
[ "ECL-2.0", "Apache-2.0" ]
[OPTIONS] Auto Index=Yes Compatibility=1.1 or later Compiled file=<%= File.basename(package.meta_chm.file) %> Contents file=contents.hhc Display compile progress=No Default topic=index.html Language=0x409 English (United States) Title=<%= package.meta_chm.title %> Full-text search=Yes Index file=index.hhk [FILES] <% package.meta_chm.files.each do |file| %> <%= file %> <% end %> [MERGE FILES] <% package.meta_chm.merge_files.each do |merge_file| %> <%= merge_file %> <% end %> [INFOTYPES]
RHTML
2
i-s-o/rubyinstaller
resources/chm/project.hhp.rhtml
[ "BSD-3-Clause" ]
// MIR for `BOP` 0 mir_map static BOP: &i32 = { let mut _0: &i32; // return place in scope 0 at $DIR/const-promotion-extern-static.rs:16:13: 16:17 let _1: &i32; // in scope 0 at $DIR/const-promotion-extern-static.rs:16:20: 16:23 let _2: i32; // in scope 0 at $DIR/const-promotion-extern-static.rs:16:21: 16:23 bb0: { StorageLive(_1); // scope 0 at $DIR/const-promotion-extern-static.rs:16:20: 16:23 StorageLive(_2); // scope 0 at $DIR/const-promotion-extern-static.rs:16:21: 16:23 _2 = const 13_i32; // scope 0 at $DIR/const-promotion-extern-static.rs:16:21: 16:23 _1 = &_2; // scope 0 at $DIR/const-promotion-extern-static.rs:16:20: 16:23 _0 = &(*_1); // scope 0 at $DIR/const-promotion-extern-static.rs:16:20: 16:23 StorageDead(_1); // scope 0 at $DIR/const-promotion-extern-static.rs:16:22: 16:23 return; // scope 0 at $DIR/const-promotion-extern-static.rs:16:1: 16:24 } }
Mirah
3
mbc-git/rust
src/test/mir-opt/const_promotion_extern_static.BOP.mir_map.0.mir
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
// SPDX-License-Identifier: MIT // OpenZeppelin Contracts v4.4.1 (interfaces/IERC3156.sol) pragma solidity ^0.8.0; import "./IERC3156FlashBorrower.sol"; import "./IERC3156FlashLender.sol";
Solidity
2
mennat1/simple-yield-farm-truffle-version
node_modules/@openzeppelin/contracts/interfaces/IERC3156.sol
[ "MIT" ]
#!/bin/sh # # Copyright 2020 PingCAP, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -eu check_cluster_version 4 0 0 "incremental restore" || exit 0 DB_NAME=incr for backend in importer local; do run_sql "DROP DATABASE IF EXISTS incr;" run_lightning --backend $backend for tbl in auto_random pk_auto_inc rowid_uk_inc uk_auto_inc; do run_sql "SELECT count(*) from incr.$tbl" check_contains "count(*): 3" done for tbl in auto_random pk_auto_inc rowid_uk_inc uk_auto_inc; do if [ "$tbl" = "auto_random" ]; then run_sql "SELECT id & b'000001111111111111111111111111111111111111111111111111111111111' as inc FROM incr.$tbl" else run_sql "SELECT id as inc FROM incr.$tbl" fi check_contains 'inc: 1' check_contains 'inc: 2' check_contains 'inc: 3' done for tbl in pk_auto_inc rowid_uk_inc; do run_sql "SELECT group_concat(v) from incr.$tbl group by 'all';" check_contains "group_concat(v): a,b,c" done run_sql "SELECT sum(pk) from incr.uk_auto_inc;" check_contains "sum(pk): 6" # incrementally import all data in data1 run_lightning --backend $backend -d "tests/$TEST_NAME/data1" for tbl in auto_random pk_auto_inc rowid_uk_inc uk_auto_inc; do run_sql "SELECT count(*) from incr.$tbl" check_contains "count(*): 6" done for tbl in auto_random pk_auto_inc rowid_uk_inc uk_auto_inc; do if [ "$tbl" = "auto_random" ]; then run_sql "SELECT id & b'000001111111111111111111111111111111111111111111111111111111111' as inc FROM incr.$tbl" else run_sql "SELECT id as inc FROM incr.$tbl" fi check_contains 'inc: 4' check_contains 'inc: 5' check_contains 'inc: 6' done for tbl in pk_auto_inc rowid_uk_inc; do run_sql "SELECT group_concat(v) from incr.$tbl group by 'all';" check_contains "group_concat(v): a,b,c,d,e,f" done run_sql "SELECT sum(pk) from incr.uk_auto_inc;" check_contains "sum(pk): 21" done
Shell
4
Howie59/tidb
br/tests/lightning_incremental/run.sh
[ "Apache-2.0", "BSD-3-Clause" ]
mod foo { enum Bar { Baz { a: isize }, } } fn f(b: foo::Bar) { //~^ ERROR enum `Bar` is private match b { foo::Bar::Baz { a: _a } => {} //~ ERROR enum `Bar` is private } } fn main() {}
Rust
3
mbc-git/rust
src/test/ui/structs/struct-variant-privacy.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
MACCHINA(7) # NAME macchina - themeing. # SYNOPSIS *$XDG_CONFIG_HOME/macchina/themes*, *~/.config/macchina/themes*. # DESCRIPTION Themes are your interface to customizing all visual aspects of macchina. # GENERAL OPTIONS ## spacing Defines the amount of spacing to leave between the separator and the content besides it, e.g. spacing = 1 ## padding Defines the amount of padding to leave between the content and its surroundings, e.g. padding = 0 ## hide_ascii Disables the rendering of ASCII, whether it be built-in or custom, e.g. hide_ascii = false ## prefer_small_ascii For built-in ASCII, always use smaller variants, e.g. prefer_small_ascii = true ## separator Defines the glyph to use for the separator, e.g. separator = "-->" ## key_color Defines the color of the keys. Takes hexadecimal/indexed/predefined color names, where casing is insensitive e.g. - key_color = "#00FF00" or - key_color = "046" or - key_color = "Green" ## separator_color Defines the color of the separator. Takes hexadecimal/indexed/predefined color names, where casing is insensitive e.g. - separator_color = "#00FF00" or - separator_color = "046" or - separator_color = "Green" # PALETTE SECTION This section is entirely optional, omitting it from the configuration hides the palette completely. ## type Defines the glyph to use for the palette. You should append a space to leave some room between the glyphs. Accepted values: - "Dark" - "Light" - "Full" ## glyph Defines the glyph to use for the palette. You should append a space to leave some room between each glyph, e.g. glyph = "() " ## visible Defines whether to show or hide the palette, e.g. visible = true # BAR SECTION Bars are a good way to visualize your system usage without seeing the statistics, they range from 0% to 100%. This section is entirely optional, omitting it from the configuration hides bars completely. ## glyph Defines the glyph to use for all bars, e.g. glyph = "o" ## symbol_open Defines the character to use for opening delimiters. Be sure to surround the value with single quotes and not double quotes, e.g. symbol_open = '(' ## symbol_close Defines the character to use for closing delimiters. Be sure to surround the value with single quotes and not double quotes, e.g. symbol_close = ')' ## visible Defines whether to show or hide the bars, e.g. visible = true ## hide_delimiters Defines whether to show or hide the bars delimiters, i.e. the characters that surround the bars themselves, e.g. hide_delimiters = false # BOX SECTION The box component renders a box that surrounds your system information. ## title Defines the title of the box, e.g. title = "Hydrogen" ## border Defines the type of border to use for the box. Accepted values: - border = "plain" or - border = "thick" or - border = "rounded" or - border = "double" ## visible Defines whether to show or hide the box, e.g. visible = true # BOX.INNER_MARGIN SECTION ## x Defines the horizontal margin to leave between the content and the box, e.g. x = 2 ## y Defines the vertical margin to leave between the content and the box, e.g. y = 1 # CUSTOM_ASCII SECTION This section can be used to specify your own ASCII. ANSI escape sequences are supported. ## color Defines the color of the ASCII. Takes hexadecimal/indexed/predefined color names, where casing is insensitive e.g. - separator_color = "#00FF00" or - separator_color = "046" or - separator_color = "Green" ## path Defines the path to a file on your filesystem which contains the ASCII art you want to display, e.g. path = "~/ascii/arch_linux" # RANDOMIZE SECTION This section is used to randomize color selection. ## key_color Defines whether to randomize the color of the keys, e.g. key_color = true ## separator_color Defines whether to randomize the color of the separator, e.g. separator_color = true ## pool Defines the pool of colors from which to pick a random color, e.g. Accepted values: - pool = "hexadecimal" or - pool = "indexed" or - pool = "base" If "hexadecimal" is specified, you'll get a random color ranging from #000000 to #FFFFFF If "indexed" is specified, you'll get a random color ranging from 0 to 255 If "base" is specified, you'll get random color from the following set of colors: "Black", "White", "Red", "Green", "Blue", "Yellow", "Magenta" and "Cyan". # KEYS SECTION This section provides the options needed to customize the text of each readout's key. ## host Defines the text of the Host readout, e.g. host = "Host" ## kernel Defines the text of the Kernel readout, e.g. kernel = "Kernel" ## os Defines the text of the OperatingSystem readout, e.g. os = "OS" ## machine Defines the text of the Machine readout, e.g. machine= "Machine" ## de Defines the text of the DesktopEnvironment readout, e.g. de = "DE" ## wm Defines the text of the WindowManager readout, e.g. wm = "WM" ## distro Defines the text of the Distribution readout, e.g. distro = "Distro" ## terminal Defines the text of the Terminal readout, e.g. terminal = "Term" ## shell Defines the text of the Shell readout, e.g. shell = "Shell" ## packages Defines the text of the Packages readout, e.g. packages = "Packages" ## uptime Defines the text of the Uptime readout, e.g. uptime = "Uptime" ## local_ip Defines the text of the LocalIP readout, e.g. local_ip = "Local IP" ## memory Defines the text of the Memory readout, e.g. memory = "Memory" ## battery Defines the text of the Battery readout, e.g. battery = "Battery" ## backlight Defines the text of the Backlight readout, e.g. backlight = "Brightness" ## resolution Defines the text of the Resolution readout, e.g. resolution = "Resolution" ## cpu Defines the text of the Processor readout, e.g. cpu = "CPU" ## cpu_load Defines the text of the ProcessorLoad readout, e.g. cpu_load = "CPU %" # SEE ALSO macchina(1)
SuperCollider
4
Macchina-CLI/macchina
doc/macchina.7.scd
[ "MIT" ]
# Copyright 2019 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # distutils: language=c++ from libc cimport string from libc.stdlib cimport malloc from libcpp.string cimport string as cppstring cdef grpc_error_handle grpc_error_none(): return <grpc_error_handle>0 cdef grpc_error_handle socket_error(str syscall, str err): error_str = "{} failed: {}".format(syscall, err) error_bytes = str_to_bytes(error_str) return grpc_socket_error(error_bytes) cdef resolved_addr_to_tuple(grpc_resolved_address* address): cdef cppstring res_str port = grpc_sockaddr_get_port(address) res_str = grpc_sockaddr_to_string(address, False) byte_str = _decode(res_str) if byte_str.endswith(':' + str(port)): byte_str = byte_str[:(0 - len(str(port)) - 1)] byte_str = byte_str.lstrip('[') byte_str = byte_str.rstrip(']') byte_str = '{}'.format(byte_str) return byte_str, port cdef sockaddr_to_tuple(const grpc_sockaddr* address, size_t length): cdef grpc_resolved_address c_addr string.memcpy(<void*>c_addr.addr, <void*> address, length) c_addr.len = length return resolved_addr_to_tuple(&c_addr) cdef sockaddr_is_ipv4(const grpc_sockaddr* address, size_t length): cdef grpc_resolved_address c_addr string.memcpy(<void*>c_addr.addr, <void*> address, length) c_addr.len = length return grpc_sockaddr_get_uri_scheme(&c_addr) == b'ipv4' cdef grpc_resolved_addresses* tuples_to_resolvaddr(tups): cdef grpc_resolved_addresses* addresses tups_set = set((tup[4][0], tup[4][1]) for tup in tups) addresses = <grpc_resolved_addresses*> malloc(sizeof(grpc_resolved_addresses)) addresses.naddrs = len(tups_set) addresses.addrs = <grpc_resolved_address*> malloc(sizeof(grpc_resolved_address) * len(tups_set)) i = 0 for tup in set(tups_set): hostname = str_to_bytes(tup[0]) grpc_string_to_sockaddr(&addresses.addrs[i], hostname, tup[1]) i += 1 return addresses
Cython
4
warlock135/grpc
src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pyx.pxi
[ "Apache-2.0" ]
export { default } from './StepLabel'; export { default as stepLabelClasses } from './stepLabelClasses'; export * from './stepLabelClasses';
JavaScript
3
good-gym/material-ui
packages/material-ui/src/StepLabel/index.js
[ "MIT" ]
extends ../base-flat.jade
Jade
0
cihatislamdede/codecombat
app/templates/vue-base/base-flat.jade
[ "CC-BY-4.0", "MIT" ]
%% %unicode 5.1 %public %class UnicodeBlocks_f %type int %standalone %include ../../resources/common-unicode-enumerated-property-java %% \p{General Category:This will never be a general category property name} { setCurCharPropertyValue("Not a general category property name"); } <<EOF>> { printOutput(); return 1; }
JFlex
2
Mivik/jflex
testsuite/testcases/src/test/cases/unicode-general-category/unicode-general-category-f.flex
[ "BSD-3-Clause" ]
body { font: 100% Helvetica, sans-serif; color: #333; } nav ul { margin: 0; padding: 0; list-style: none; } nav li { display: inline-block; } nav a { display: block; padding: 6px 12px; text-decoration: none; }
CSS
3
FlyAboveGrass/wepy
packages/compiler-sass/test/fixtures/css/basic.scss.css
[ "BSD-3-Clause" ]
# Copyright (c) 1999 Boulder Real Time Technologies, Inc. # # This software module is wholly owned by Boulder Real Time # Technologies, Inc. Any use of this software module without # express written permission from Boulder Real Time Technologies, # Inc. is prohibited. use Getopt::Std ; if ( ! getopts('dD:p:v') ) { die ( "Usage: $0 [-dv] [-D dir] [-p pf] [files]\n" ) ; } use Datascope ; use Mail::Internet ; if ( $opt_D ) { chdir $opt_D ; } $VersionId = "GSE2.1" ; $Pf = $opt_p ? $opt_p : "autodrm" ; $Environment{'receipt'} = $receipt = now() ; @request = <> ; close STDIN ; if ( ! -d "tmp" ) { mkdir "tmp", 0775 || die ( "can't create working directory 'tmp'\n" ) ; } $LOCKFILE = "tmp/lockfile" ; open(LOCK, ">$LOCKFILE" ) || die ( "Can't open autodrm lock file $LOCKFILE" ) ; flock (LOCK, 2) || die ( "Can't lock $LOCKFILE for sequential autodrm processing" ) ; $request = Mail::Internet->new(\@request) ; chomp($from = $request->get('From' )) ; $Environment{'e-mail'} = $from ; $Environment{'msg_type'} = "request" ; $Environment{'msg_id'} = "no-msg-id" ; $Environment{'source_code'} = "no-source-code" ; $Environment{'ftp'} = 0 ; $Environment{'to'} = $receipt ; $Environment{'time_stamp'} = 0 ; $bull_type = pfget($Pf, "default_bulletin_type" ) ; if ( $bull_type ne "" ) { $Environment{'bull_type'} = $bull_type ; } $Environment{'max_email_size'} = pfget($Pf, "max_email_size" ) ; $Environment{'max_ftp_size'} = pfget($Pf, "max_ftp_size" ) ; $Reference_coordinate_system = pfget($Pf, "reference_coordinate_system" ) ; $reply_id_format = pfget($Pf, "reply_id_format") ; $reply_id_timezone = pfget($Pf, "reply_id_timezone") ; $Environment{'reply_id'} = $reply_id = epoch2str($receipt, $reply_id_format, $reply_id_timezone) ; $return_address = $Environment{'return_address'} = pfget($Pf, "return_address" ) ; $Network = pfget($Pf, "Network" ) ; $No_loopback = '(/=-#$%^+@.*.@+^%$#-=\)' ; @std_preface = (" This response was generated by Antelope autodrm", ' from Boulder Real Time Technologies, Inc.', '', ' For instructions in the use of this server, please', ' reply to this mail with the message "please help".', "\t$No_loopback\t", ) ; $Prefix = " ->" ; if ( ! $opt_d || ! -e "/dev/tty" || ! -t STDIN ) { $Log = &log_output(pfget($Pf, "error-log") ) ; } check_for_loopback(@request) ; if ( grep (/^DATA_TYPE\s|^MSG_TYPE\s+DATA/i, @request ) ) { $dir = pfget($Pf, "incoming_replies_directory" ) ; if ( $dir ne "" ) { open ( REPLY, ">$dir/$reply_id" ) ; print REPLY @request ; close REPLY ; $merge_db = pfget ($Pf, "incoming_database" ) ; $program = pfget ( $Pf, "incoming_program" ) ; if ( $merge_db ne "" && $program ne "" ) { system ( "$program $dir/$reply_id $merge_db" ) ; } $result = -99 ; &log_reply($request, $receipt, $reply_id, $reply, $result ) ; } } else { if ( pfget($Pf, "save_requests" )) { if ( ! -d requests ) { mkdir "requests", 0775 ; } open ( REQUEST, ">requests/$reply_id" ) ; print REQUEST @request ; close REQUEST ; } if ( $from ne "" && &ok($from) ) { ($result, $reply) = form_reply ( $request, $reply_id ) ; $reply->replace('To',$Environment{'e-mail'}); $reply->replace('From',$Environment{'return_address'}); if ( defined $Environment{'subject'} ) { $reply->replace('Subject', $Environment{'subject'}); } else { $reply->replace('Subject', "Antelope AutoDRM Response : $Environment{'msg_id'}" ) ; } if ( $opt_d ) { $reply->print(\*STDERR) ; } else { @recipients = $reply->smtpsend() ; if ( @recipients == 0 ) { print STDERR "failed to send reply to $Environment{'e-mail'}\n" ; } if ( pfget($Pf, "save_replies" )) { if ( ! -d replies ) { mkdir "replies", 0775 ; } open ( REQUEST, ">replies/$reply_id" ) ; $reply->print(\*REQUEST) ; print REQUEST @request ; close REQUEST ; } } &log_reply($request, $receipt, $reply_id, $reply, $result ) ; } else { print STDERR "mail from $from rejected\n" ; foreach $_ (@request) { if ( /MSG_ID\s+(\S+)\s*(\S+)?/i ) { $Environment{'msg_id'} = $1 ; $Environment{'source_code'} = $2 ; } } $reply = Mail::Internet->new([]) ; $result = -1 ; &log_reply($request, $receipt, $reply_id, $reply, $result ) ; } } check_diskspace(pfget($Pf, "minimum_free_space" )) ; cleanup_logs() ; flock(LOCK, 8); # unlock lock file exit(0) ; sub check_for_loopback { my (@input) = @_ ; if ( grep(index($_,$No_loopback)>=0, @input)) { my $msg = "The following request was rejected as loopback mail:\n" ; print STDERR $msg ; unshift ( @input, $msg ) ; chomp(@input) ; &notify_operator( \@input ) ; $reply = Mail::Internet->new([]) ; $result = -1 ; &log_reply($request, $receipt, $reply_id, $reply, $result ) ; exit(0) ; } if ((@input > 10 && grep (/^BEGIN\b/i, @input) < 1)) { my $msg = "The following request was rejected as misdirected or possibly spam:\n" ; print STDERR $msg ; unshift ( @input, $msg ) ; chomp(@input) ; &notify_operator( \@input ) ; $reply = Mail::Internet->new([]) ; $result = -1 ; &log_reply($request, $receipt, $reply_id, $reply, $result ) ; exit(0) ; } } sub check_diskspace { my ($disk_minspace) = @_ ; open ( DF, "/bin/df -k .|" ) ; my $result = "" ; while ( <DF> ) { next if /^Filesystem/ ; $result .= " " . $_ ; } my ( $filesystem, $kbytes, $used, $avail, $capacity, $mountpt) = split(' ', $result) ; if ( $avail < $disk_minspace ) { my @msg = ( "autodrm disk partition has only $avail kbytes free", "It should have $disk_minspace kbytes free" ) ; &notify_operator( \@msg ) ; } } sub cleanup_logs { my $request_expiration_age = pfget($Pf, "request_expiration_age" ) ; my $reply_expiration_age = pfget($Pf, "reply_expiration_age" ) ; my @old = () ; if ( -d "requests" ) { open ( OLD, "cd requests ; find . -xdev -type f -mtime +$request_expiration_age|" ) ; while ( <OLD> ) { chomp; push(@old, "requests/$_") ; } if ( @old > 0 ) { print STDERR "deleting old requests: @old\n" ; unlink @old ; } } @old = () ; if ( -d "replies" ) { open ( OLD, "cd replies ; find . -xdev -type f -mtime +$reply_expiration_age|" ) ; while ( <OLD> ) { chomp; push(@old, "replies/$_") ; } if ( @old > 0 ) { print STDERR "deleting old replies: @old\n" ; unlink @old ; } } } sub form_reply { my ( $request, $reply_id) = @_ ; my @reply = () ; my $reply_preface = pfget($Pf, "reply_preface" ) ; my @reply_preface = split('\n', $reply_preface) ; push (@reply, @reply_preface ) ; push (@reply, "") ; push (@reply, @std_preface ) ; push (@reply, "") ; push (@reply, "BEGIN $VersionID") ; push (@reply, "MSG_TYPE DATA") ; my $source_code = pfget($Pf, "source_code") ; push (@reply, "MSG_ID $reply_id $source_code") ; ($result, $logref, $resultref) = &process_body ( $request->body) ; push (@reply, "REF_ID $Environment{'msg_id'} $Environment{'source_code'}") ; push (@reply, "" ) ; # append Log output if ( $result != 0 ) { push (@reply, "DATA_TYPE ERROR_LOG GSE2.1" ) ; push (@reply, "$Prefix Some problems occurred during processing." ) ; } else { push (@reply, "DATA_TYPE LOG GSE2.1" ) ; push (@reply, "$Prefix Your request was processed successfully." ) ; } push (@reply, "$Prefix Following is a log of your request." ) ; push (@reply, @$logref ) ; # append results push (@reply, @$resultref) ; # end with STOP push (@reply, "\nSTOP" ) ; grep (s/$/\n/, @reply ) ; my $reply = $request->reply() ; my $ref = $reply->body() ; @$ref = @reply ; return ($result, $reply) ; } sub log_reply { my ( $request, $receipt, $reply_id, $reply, $result ) = @_ ; my $in_kbytes = &kbytes ( $request->body ) ; my $out_kbytes = &kbytes ( $reply->body ) ; my $database = pfget($Pf, "log_database" ) ; my $schema = pfget($Pf, "log_schema" ) ; if ( ! -e $database ) { open ( DB, ">$database" ) ; print DB "#\nschema $schema\n" ; close DB ; } my @db = dbopen ( $database, "r+" ) ; @db = dblookup ( @db, 0, "log", 0, 0 ) ; my $code ; if ( $result == 0 ) { $code = 's' ; } elsif ( $result == -99 ) { $code = 'd' ; } elsif ( $result > 0 ) { $code = 'e' ; } else { $code = 'r' ; } $db[3] = dbaddnull(@db) ; dbputv (@db, "email", $Environment{'e-mail'}, "msg_id", $Environment{'msg_id'}, "msg_type", substr($Environment{'msg_type'}, 0, 1), "receipt", $receipt, "version", $Environment{'version'}, "source_code", $Environment{'source_code'}, "in_kbytes", $in_kbytes, "result", $code, "rsp_id", $reply_id, "out_kbytes", $out_kbytes, "disposal", now() ) ; } sub kbytes { my ( $ref ) = @_ ; my ($line, $cnt) ; $cnt = 0 ; foreach $line ( @$ref ) { $cnt += length ( $line ) ; } return $cnt/1024. ; } sub process_body { my ( $ref ) = @_ ; my @body = @$ref ; my @log = () ; my @result = () ; my $result = 0 ; my $process = 0 ; my $start_command_time ; my @empty = () ; $Database = pfget($Pf, "database_for_requests" ) ; @Db = dbopen ( $Database, "r" ) ; my (@db, $n) ; foreach $table ( qw(site sensor sitechan instrument) ) { @db = dblookup (@Db, 0, $table, 0, 0 ) ; $n = dbquery (@db, "dbRECORD_COUNT" ) ; if ( $n < 1 ) { push(@log, errlog("Database is incorrect or not present: no $table table") ) ; $result++ ; $Notify_operator = 1; } } push (@log, "" ) if $result ; while ( $_ = shift @body ) { chomp; push (@log, "% $_") ; if (/^BEGIN\s*(\S+)?/i ) { $Environment{'version'} = $1 ; $process = 1 ; } elsif (/^STOP/i ) { $process = 0 ; } elsif ( /\b(HELP|GUIDE|INFOR\w*)\b/i && ! /^(MSG_ID)/) { &send_help() ; push (@log, "$Prefix sent guide to $Environment{'e-mail'}\n" ) ; } elsif ( /^(E-MAIL|EMAIL|E_MAIL)\s+(.*)/i ) { $Environment{'e-mail'} = $2 ; } if ( $opt_V ) { if ( $process ) { print STDERR "-> $_\n" ; } else { print STDERR "## $_\n" ; } } if ( $process ) { $start_command_time = now() ; $subresult = 0 ; $logref = \@empty ; $resultref = \@empty ; if ( /^%/ | /^\s*$/ ) { next ; # skip comments and blank lines } elsif ( /^(BEGIN|STOP|E-MAIL|EMAIL|E_MAIL|HELP)/i ) { next ; # handled above # Environment Variables } elsif ( /MSG_TYPE\s+(\S+)/i ) { $Environment{'msg_type'} = $1 ; } elsif ( /MSG_ID\s+(\S+)?\s+(\S+)/i ) { $Environment{'msg_id'} = $1 ; $Environment{'source_code'} = $2 ; } elsif ( /REF_ID\s+(\S+)?\s+(\S+)/i ) { $Environment{'ref_id'} = $1 ; $Environment{'ref_source_code'} = $2 ; } elsif ( /^TIME\s+(.*)\s*\bTO\b\s*(.*)/i ) { $Environment{'from'} = str2epoch($1) ; $Environment{'to'} = str2epoch($2) ; &push_timerange(\@log) ; } elsif ( /^TIME\s+(.*)/i ) { $Environment{'from'} = $1 ; &push_timerange(\@log) ; } elsif ( /^LAT\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'low_lat'} = $1 ? $1 : -90.0 ; $Environment{'high_lat'} = $2 ? $2 : 90.0 ; push (@log, "$Prefix Latitude range is $Environment{'low_lat'} to $Environment{'high_lat'} degrees." ) ; } elsif ( /^LON(G)?\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'low_lon'} = $2 ? $2 : -180.0 ; $Environment{'high_lon'} = $3 ? $3 : 180.0 ; push (@log, "$Prefix Longitude range is $Environment{'low_lon'} to $Environment{'high_lon'} degrees." ) ; } elsif ( /^EVENT_STA_DIST\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'low_dist'} = $1 ? $1 : 0.0 ; $Environment{'high_dist'} = $2 ? $2 : 180.0 ; push (@log, "$Prefix Event to station distance range is $Environment{'low_dist'} to $Environment{'high_dist'} degrees." ) ; } elsif ( /^DEPTH\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'shallow'} = $1 ? $1 : 0.0 ; $Environment{'deep'} = $2 ? $2 : 6400.0 ; push (@log, "$Prefix Depth range is $Environment{'shallow'} to $Environment{'deep'} kilometers deep." ) ; } elsif ( /^DEPTH_MINUS_ERROR\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'shallow_90'} = $1 ? $1 : 0.0 ; $Environment{'deep_90'} = $2 ? $2 : 6400.0 ; push (@log, "$Prefix 90% probability depth range is $Environment{'shallow_90'} to $Environment{'deep_90'} kilometers deep." ) ; } elsif ( /^MAG\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'low_mag'} = $1 ? $1 : -10.0 ; $Environment{'high_mag'} = $2 ? $2 : 20.0 ; push (@log, "$Prefix Magnitude range is $Environment{'low_mag'} to $Environment{'high_mag'}." ) ; } elsif ( /^MAG_TYPE\s+(.*)/i ) { @list = to_list($1) ; my @bad = grep (!/^(mb|Ms|ML)$/, @list) ; if ( @bad > 0 ) { push (@log, &errlog("magnitude types '@bad' not recognized")); } my $i ; my @ok=() ; foreach $i ( @list ) { push @ok, "mb" if ( $i eq "mb" ) ; push @ok, "ms" if ( $i eq "Ms" ) ; push @ok, "ml" if ( $i eq "ML" ) ; } $Environment{'mag_type'} = join(' ', @ok) ; push (@log, "$Prefix Magnitude types are $Environment{'mag_type'}." ) ; } elsif ( /^MB_MINUS_MS\s+(.*)?\s*TO\b\s*(.*)?/i ) { $Environment{'low_mag_diff'} = $1 ? $1 : 0.0 ; $Environment{'high_mag_diff'} = $2 ? $2 : 20.0 ; push (@log, "$Prefix Difference between mb and Ms Magnitudes range is $Environment{'low_mag_diff'} to $Environment{'high_mag_diff'}." ) ; } elsif ( /^NET_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'net_list'} = join(' ', @list) ; push (@log, "$Prefix networks to include are $Environment{'net_list'}." ) ; } elsif ( /^STA_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'sta_list'} = join(' ', @list) ; push (@log, "$Prefix stations to include are $Environment{'sta_list'}." ) ; } elsif ( /^CHAN_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'chan_list'} = join(' ', @list) ; push (@log, "$Prefix channels to include are $Environment{'chan_list'}." ) ; } elsif ( /^BEAM_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'beam_list'} = join(' ', @list) ; push (@log, "$Prefix beams to include are $Environment{'beam_list'}." ) ; } elsif ( /^AUX_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'aux_list'} = join(' ', @list) ; push (@log, "$Prefix auxiliary ids to include are $Environment{'aux_list'}." ) ; } elsif ( /^BULL_TYPE\s+(\S+)/i ) { $Environment{'bull_type'} = $1 ; } elsif ( /^GROUP_BULL_LIST\s+(\S+)/i ) { @list = to_list($1) ; $Environment{'group_bull_list'} = join(' ', @list) ; } elsif ( /^ARRIVAL_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'arrival_list'} = join(' ', @list) ; push (@log, "$Prefix arrival ids to include are $Environment{'arrival_list'}." ) ; } elsif ( /^ORIGIN_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'origin_list'} = join(' ', @list) ; push (@log, "$Prefix origin ids to include are $Environment{'origin_list'}." ) ; } elsif ( /^EVENT_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'event_list'} = join(' ', @list) ; push (@log, "$Prefix event ids to include are $Environment{'event_list'}." ) ; } elsif ( /^COMM_LIST\s+(.*)/i ) { @list = to_list($1) ; $Environment{'comm_list'} = join(' ', @list) ; push (@log, "$Prefix communication links to include are $Environment{'comm_list'}." ) ; } elsif ( /^RELATIVE_TO\s+(\S+)/i ) { $Environment{'relative_to'} = $1 ; } elsif ( /^TIME_STAMP/i ) { $Environment{'time_stamp'} = 1 ; } elsif ( /^FTP\s+(\S+)/i ) { $Environment{'e-mail'} = $1 ; $Environment{'ftp'} = 1 ; } elsif ( /^FTP/i ) { $Environment{'ftp'} = 1 ; # Unimplemented commands } elsif ( /^INTER\s+(\S+)/i ) { $result++ ; push (@log, &errlog("ftp transfer to $1 not supported")); $Environment{'ftp'} = 1 ; # Actual data returned } elsif ( /^DATA_TYPE\s+(\S+)?\s*(\S+)?/i ) { $type = $1 ; $format = $2 ; ($subresult, $logref, $resultref) = &data_type($type,$format) ; # Actual data requests } elsif ( /^WAVEFORM\b\s*(.*)/i ) { ($subresult, $logref, $resultref) = &waveform($1) ; } elsif ( /^WAVEF\s+(.*)/i ) { $save = $Environment{'sta_list'} ; $Environment{'sta_list'} = $1 ; ($subresult, $logref, $resultref) = &waveform($Environment{'version'}) ; $Environment{'sta_list'} = $save ; } elsif ( /^STATION\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &station($1) ; } elsif ( /^CHANNEL\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &channel($1) ; } elsif ( /^RESPONSE\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &response($1) ; } elsif ( /^BULLETIN\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &bulletin($1) ; } elsif ( /^ORIGIN\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &origin($1) ; } elsif ( /^ARRIVAL\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &arrival($1) ; } elsif ( /^DETEC\S*\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &detections($1) ; } elsif ( /^CALIB\b\s*(.*)?/i ) { $save = $Environment{'sta_list'} ; $Environment{'sta_list'} = $1 ; ($subresult, $logref, $resultref) = &response($Environment{'version'}); $Environment{'sta_list'} = $save ; } elsif ( /^OUTAGE\b\s*(.*)?/i ) { ($subresult, $logref, $resultref) = &outage($1) ; } elsif ( /^(TITLE|SUBJE\S*)\s+(.*)/i ) { $Environment{'subject'} = $2 ; # Anything else (an error) } else { $result++ ; push (@log, &errlog("'$_' <not recognized>" ) ); ($cmd, $rest) = split (' ', $_, 2 ) ; $cmd = lc($cmd) ; if ( $ref = pfget($Pf, "examples{$cmd}")) { if ( /^\s+/ ) { push (@log, "$Prefix requests must start in column one.\n" ) ; } push (@log, "$Prefix The correct syntax for $cmd is:" ) ; if ( !ref($ref) ) { push (@log, "$Prefix '$ref'" ) ; } elsif ( ref($ref) eq "SCALAR" ) { push (@log, "$Prefix '$$ref'" ) ; } elsif ( ref($ref) eq "ARRAY" ) { @examples = @$ref ; foreach $example ( @examples ) { push (@log, "$Prefix '$example'" ) ; } } } $line = pop(@log) ; $line .= "\n" ; push(@log, $line) ; } if ( @$logref + @$resultref > 0 || $subresult != 0 ) { if ( $Environment{'time_stamp'} ) { push (@result, "\ntime_stamp " . epoch2str($start_command_time,"%Y/%m/%d %H:%M:%S","")); } $result += $subresult ; push(@log, @$logref) ; if ( @$resultref > 0 ) { $result_kbytes = &kbytes ( @$resultref ) + &kbytes(@result) ; if ( $result_kbytes < $Environment{'max_email_size'}) { push(@result, "" ) ; push(@result, @$resultref) ; } else { push (@log, "$Prefix maximum message size $Environment{'max_email_size'} exceeded" ) ; push (@log, "$Prefix result data omitted") ; $Excessive_size = 1 ; } } } } } if ( $Environment{'time_stamp'} ) { push (@result, "\ntime_stamp " . epoch2str(now(),"%Y/%m/%d %H:%M:%S","")); } &notify_operator( \@log ) if $Notify_operator ; push ( @log, "$Prefix The operator has been alerted to the errors above." ) if $Notify_operator ; return ($result, \@log, \@result ) ; } sub ok { my ( $from ) = @_ ; my $ref = pfget($Pf, "allow") ; my @allow = @$ref ; my $ref = pfget($Pf, "deny") ; my @deny = @$ref ; my $re ; my $ok = 1 ; if ( @allow > 0 ) { $ok = 0 ; foreach $re (@allow) { if ( $from =~ /$re/i ) { $ok = 1 ; last ; } } } foreach $re (@deny) { if ( $from =~ /$re/i ) { $ok = 0 ; last ; } } return $ok ; } sub send_help { my $help = pfget($Pf, "help_msg" ) ; open ( HELP, $help ) || &notify_operator ( "Can't open '$help'" ) ; my $mail = Mail::Internet->new(\*HELP) ; $mail->replace('To',$Environment{'e-mail'}); $mail->replace('From',$Environment{'return_address'}); $mail->replace('Subject', "User's Guide to Antelope_autodrm" ) ; if ( $opt_d ) { print STDERR ">> would send help response as follows:\n" ; $mail->print(\*STDERR) ; print STDERR ">>\n" ; } else { my @recipients = $mail->smtpsend() ; if ( @recipients == 0 ) { print STDERR "failed to send help to $Environment{'e-mail'}\n" ; } } } sub notify_operator { my ( $msg ) = @_ ; my $time_str = epoch2str($receipt, "%a %B %d, %Y %H:%M:%S", "" ) ; my @body = () ; push (@body, "An error occurred while processing request $Environment{'msg_id'}\n"); push (@body, "received from $Environment{'e-mail'} at $time_str\n\n" ) ; if ( ref($msg) eq "ARRAY" ) { push (@body, "\nThe resulting error log is:\n" ); push (@body, "\n---------------------------------------\n") ; my @log = @$msg ; grep(s/$/\n/, @log) ; grep(s/^/> /, @log) ; push (@body, @log) ; push (@body, "---------------------------------------\n\n") ; } else { push (@body, " $msg\n\n" ) ; } push (@body, "The return reply id is $Environment{'reply_id'}\n") ; my $note = Mail::Internet->new(\@body) ; my $operator = pfget($Pf, "operator" ) ; $note->replace ('To', $operator ) ; $note->replace('From',$Environment{'return_address'}); $note->replace('Subject', "autodrm: errors during request processing" ) ; if ( $opt_d ) { print STDERR "\nSend following error message to '$operator'\n" ; print STDERR "***********************************************\n" ; $note->print(\*STDERR) ; print STDERR "***********************************************\n\n" ; } else { @recipients = $note->smtpsend() ; if ( @recipients == 0 ) { $note->replace ('To', "root" ) ; $note->replace ('Cc', "mailer-daemon" ) ; @recipients = $note->smtpsend() ; } } } sub log_output { my ($log) = @_ ; if ( ! $opt_d ) { open(STDOUT, ">$log") || die "Can't redirect stdout"; open(STDERR, ">&STDOUT") || die "Can't dup stdout"; select(STDERR); $| = 1; # make unbuffered select(STDOUT); $| = 1; # make unbuffered } return $log ; } sub errlog { my ($msg) = @_ ; return "$Prefix *** $msg ***" ; } sub list2re { my ($list) = @_ ; if ( $list ne "*" ) { $list =~ s/ /|/g ; $list =~ s/\*/.*/g ; } else { $list = undef ; } return $list ; } sub data_type { my ( $type, $format) = @_ ; my $result = 1 ; my @log = ( &errlog("DATA_TYPE '$type' not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub data_for { my ( $sta_chan, $record, $reflog, $refresult, @dbwf ) = @_ ; if ( $record >= 0 ) { push(@$refresult, "WID2 $sta_chan" ) ; push(@$refresult, "STA2" ) ; if ( /relative_to/ ) { push(@$refresult, "EID2" ) ; } if ( /beam/ ) { push(@$refresult, "BEA2" ) ; } push(@$refresult, "DAT2" ) ; push(@$refresult, "CHK2" ) ; } else { push(@$refresult, "OUT2 $sta_chan" ) ; push(@$refresult, "STA2" ) ; } } sub subset_net { my (@db) = @_ ; my ($re, $n) ; if ( defined $Environment{'net_list'} ) { $re = list2re($Environment{'net_list'}) ; if ( $Network !~ /$re/ ) { $db[1] = -102 ; } } return @db ; } sub subset_sta { my (@db) = @_ ; my ($re, $n) ; if ( defined $Environment{'sta_list'} ) { $re = list2re($Environment{'sta_list'}) ; @db = dbsubset(@db, "sta =~/$re/" ) if $re ; } return @db ; } sub subset_chan { my (@db) = @_ ; my ($re, $n) ; if ( defined $Environment{'chan_list'} ) { $re = list2re($Environment{'chan_list'}) ; @db = dbsubset(@db, "chan =~/$re/" ) if $re ; } return @db ; } sub subset_lat { my (@db) = @_ ; my ($lo, $hi, $subset, $n) ; if ( defined $Environment{'low_lat'} ) { $lo = $Environment{'low_lat'} ; $hi = $Environment{'high_lat'} ; $subset = "lat >= $lo && lat <= $hi" ; @db = dbsubset(@db, $subset) ; } return @db ; } sub subset_lon { my (@db) = @_ ; my ($lo, $hi, $subset, $n) ; if ( defined $Environment{'low_lon'} ) { $lo = $Environment{'low_lon'} ; $hi = $Environment{'high_lon'} ; if ( $lo < $hi ) { $subset = "lon >= $lo && lon <= $hi" ; } else { $subset = "(lon >= $lo && lon < 180) || (lon > -180 && lon < $hi)" ; } @db = dbsubset(@db, $subset) ; } return @db ; } sub subset_date_or_now { my (@db) = @_ ; my ($from, $to, $n) ; if ( defined $Environment{'from'} ) { $from = $Environment{'from'} ; $to = $Environment{'to'} ; } else { $from = time() - 86400 ; $to = time() ; } $from = &yearday($from) ; $to = &yearday($to) ; @db = dbsubset(@db, "(ondate>= $from && (offdate<= $to|| offdate==NULL)) || ($from >=ondate && ($from <offdate || offdate == NULL))" ) ; return @db ; } sub subset_time_or_now { my (@db) = @_ ; my ($from, $to, $n) ; if ( defined $Environment{'from'} ) { $from = $Environment{'from'} ; $to = $Environment{'to'} ; } else { $to = $from = time() ; $from -= 86400 ; } @db = dbsubset(@db, "(time>= $from && time<= $to) || ($from >=time && $from <endtime)" ) ; return @db ; } sub subset_date { my (@db) = @_ ; my ($from, $to, $n) ; if ( defined $Environment{'from'} ) { $from = $Environment{'from'} ; $to = $Environment{'to'} ; $from = &yearday($from) ; $to = &yearday($to) ; @db = dbsubset(@db, "(ondate>= $from && (offdate<= $to|| offdate==NULL)) || ($from >=ondate && ($from <offdate || offdate == NULL))" ) ; } return @db ; } sub subset_time { my (@db) = @_ ; my ($from, $to, $n) ; if ( defined $Environment{'from'} ) { $from = $Environment{'from'} ; $to = $Environment{'to'} ; @db = dbsubset(@db, "(time>= $from && time<= $to) || ($from >=time && $from <endtime)" ) ; } return @db ; } #----------------------------------------------------------------- # Request implementation: # each routine returns three results: # result-code which is the number of errors which occurred # \@log any log messages # \@result any results # # Global variables which can be used: # @Db : the input database # Database : the name of the input database # Reference_coordinate_system : coordinate system used for lat/lon # %Environment : values from the request. #----------------------------------------------------------------- sub origin { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub arrival { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub detection { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub alert { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub ppick { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub avail { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub slist { my ( $version ) = @_ ; my $result = 1 ; my @log = ( &errlog("not implemented") ) ; my @result = () ; return ($result, \@log, \@result ) ; } sub fmttime { my ($time) = @_ ; return epoch2str($time, "%a %B %d, %Y %H:%M:%S", "UTC" ) ; } sub push_timerange { my ( $ref ) = @_ ; my $from = &fmttime($Environment{'from'}) ; my $to = &fmttime($Environment{'to'}) ; push (@$ref, " $Prefix Time range is $from" ) ; push (@$ref, " $Prefix to $to\n" ) ; } sub to_list { my ( $line ) = @_ ; $line =~ s/,/ /g ; $line =~ s/\s+/ /g ; return split ( ' ', $line) ; }
XProc
4
jreyes1108/antelope_contrib
bin/rt/autodrm/autodrm.xpl
[ "BSD-2-Clause", "MIT" ]
package com.baeldung.guava.mathutils; import static org.junit.Assert.*; import java.math.RoundingMode; import com.google.common.math.LongMath; import org.junit.Test; public class GuavaLongMathUnitTest { @Test public void whenPerformBinomialOnTwoLongValues_shouldReturnResultIfUnderLong() { long result = LongMath.binomial(6, 3); assertEquals(20L, result); } @Test public void whenProformCeilPowOfTwoLongValues_shouldReturnResult() { long result = LongMath.ceilingPowerOfTwo(20L); assertEquals(32L, result); } @Test public void whenCheckedAddTwoLongValues_shouldAddThemAndReturnTheSumIfNotOverflow() { long result = LongMath.checkedAdd(1L, 2L); assertEquals(3L, result); } @Test(expected = ArithmeticException.class) public void whenCheckedAddTwoLongValues_shouldThrowArithmeticExceptionIfOverflow() { LongMath.checkedAdd(Long.MAX_VALUE, 100L); } @Test public void whenCheckedMultiplyTwoLongValues_shouldMultiplyThemAndReturnTheResultIfNotOverflow() { long result = LongMath.checkedMultiply(3L, 2L); assertEquals(6L, result); } @Test(expected = ArithmeticException.class) public void whenCheckedMultiplyTwoLongValues_shouldThrowArithmeticExceptionIfOverflow() { LongMath.checkedMultiply(Long.MAX_VALUE, 100L); } @Test public void whenCheckedPowTwoLongValues_shouldPowThemAndReturnTheResultIfNotOverflow() { long result = LongMath.checkedPow(2L, 3); assertEquals(8L, result); } @Test(expected = ArithmeticException.class) public void gwhenCheckedPowTwoLongValues_shouldThrowArithmeticExceptionIfOverflow() { LongMath.checkedPow(Long.MAX_VALUE, 100); } @Test public void whenCheckedSubstractTwoLongValues_shouldSubstractThemAndReturnTheResultIfNotOverflow() { long result = LongMath.checkedSubtract(4L, 1L); assertEquals(3L, result); } @Test(expected = ArithmeticException.class) public void gwhenCheckedSubstractTwoLongValues_shouldThrowArithmeticExceptionIfOverflow() { LongMath.checkedSubtract(Long.MAX_VALUE, -100); } @Test public void whenDivideTwoLongValues_shouldDivideThemAndReturnTheResultForCeilingRounding() { long result = LongMath.divide(10L, 3L, RoundingMode.CEILING); assertEquals(4L, result); } @Test public void whenDivideTwoLongValues_shouldDivideThemAndReturnTheResultForFloorRounding() { long result = LongMath.divide(10L, 3L, RoundingMode.FLOOR); assertEquals(3L, result); } @Test(expected = ArithmeticException.class) public void whenDivideTwoLongValues_shouldThrowArithmeticExceptionIfRoundingNotDefinedButNecessary() { LongMath.divide(10L, 3L, RoundingMode.UNNECESSARY); } @Test public void whenFactorailLong_shouldFactorialThemAndReturnTheResultIfInIntRange() { long result = LongMath.factorial(5); assertEquals(120L, result); } @Test public void whenFactorailLong_shouldFactorialThemAndReturnIntMaxIfNotInIntRange() { long result = LongMath.factorial(Integer.MAX_VALUE); assertEquals(Long.MAX_VALUE, result); } @Test public void whenFloorPowerOfLong_shouldReturnValue() { long result = LongMath.floorPowerOfTwo(30L); assertEquals(16L, result); } @Test public void whenGcdOfTwoLongs_shouldReturnValue() { long result = LongMath.gcd(30L, 40L); assertEquals(10L, result); } @Test public void whenIsPowOfLong_shouldReturnTrueIfPowerOfTwo() { boolean result = LongMath.isPowerOfTwo(16L); assertTrue(result); } @Test public void whenIsPowOfLong_shouldReturnFalseeIfNotPowerOfTwo() { boolean result = LongMath.isPowerOfTwo(20L); assertFalse(result); } @Test public void whenIsPrineOfLong_shouldReturnFalseeIfNotPrime() { boolean result = LongMath.isPrime(20L); assertFalse(result); } @Test public void whenLog10LongValues_shouldLog10ThemAndReturnTheResultForCeilingRounding() { int result = LongMath.log10(30L, RoundingMode.CEILING); assertEquals(2, result); } @Test public void whenLog10LongValues_shouldog10ThemAndReturnTheResultForFloorRounding() { int result = LongMath.log10(30L, RoundingMode.FLOOR); assertEquals(1, result); } @Test(expected = ArithmeticException.class) public void whenLog10LongValues_shouldThrowArithmeticExceptionIfRoundingNotDefinedButNecessary() { LongMath.log10(30L, RoundingMode.UNNECESSARY); } @Test public void whenLog2LongValues_shouldLog2ThemAndReturnTheResultForCeilingRounding() { int result = LongMath.log2(30L, RoundingMode.CEILING); assertEquals(5, result); } @Test public void whenLog2LongValues_shouldog2ThemAndReturnTheResultForFloorRounding() { int result = LongMath.log2(30L, RoundingMode.FLOOR); assertEquals(4, result); } @Test(expected = ArithmeticException.class) public void whenLog2LongValues_shouldThrowArithmeticExceptionIfRoundingNotDefinedButNecessary() { LongMath.log2(30L, RoundingMode.UNNECESSARY); } @Test public void whenMeanTwoLongValues_shouldMeanThemAndReturnTheResult() { long result = LongMath.mean(30L, 20L); assertEquals(25L, result); } @Test public void whenModLongAndIntegerValues_shouldModThemAndReturnTheResult() { int result = LongMath.mod(30L, 4); assertEquals(2, result); } @Test public void whenModTwoLongValues_shouldModThemAndReturnTheResult() { long result = LongMath.mod(30L, 4L); assertEquals(2L, result); } @Test public void whenPowTwoLongValues_shouldPowThemAndReturnTheResult() { long result = LongMath.pow(6L, 4); assertEquals(1296L, result); } @Test public void whenSaturatedAddTwoLongValues_shouldAddThemAndReturnTheResult() { long result = LongMath.saturatedAdd(6L, 4L); assertEquals(10L, result); } @Test public void whenSaturatedAddTwoLongValues_shouldAddThemAndReturnIntMaxIfOverflow() { long result = LongMath.saturatedAdd(Long.MAX_VALUE, 1000L); assertEquals(Long.MAX_VALUE, result); } @Test public void whenSaturatedAddTwoLongValues_shouldAddThemAndReturnIntMinIfUnderflow() { long result = LongMath.saturatedAdd(Long.MIN_VALUE, -1000); assertEquals(Long.MIN_VALUE, result); } @Test public void whenSaturatedMultiplyTwoLongValues_shouldMultiplyThemAndReturnTheResult() { long result = LongMath.saturatedMultiply(6L, 4L); assertEquals(24L, result); } @Test public void whenSaturatedMultiplyTwoLongValues_shouldMultiplyThemAndReturnIntMaxIfOverflow() { long result = LongMath.saturatedMultiply(Long.MAX_VALUE, 1000L); assertEquals(Long.MAX_VALUE, result); } @Test public void whenSaturatedPowTwoLongValues_shouldPowThemAndReturnTheResult() { long result = LongMath.saturatedPow(6L, 2); assertEquals(36L, result); } @Test public void whenSaturatedPowTwoLongValues_shouldPowThemAndReturnIntMaxIfOverflow() { long result = LongMath.saturatedPow(Long.MAX_VALUE, 2); assertEquals(Long.MAX_VALUE, result); } @Test public void whenSaturatedPowTwoLongValues_shouldPowThemAndReturnIntMinIfUnderflow() { long result = LongMath.saturatedPow(Long.MIN_VALUE, 3); assertEquals(Long.MIN_VALUE, result); } @Test public void whenSaturatedSubstractTwoLongValues_shouldSubstractThemAndReturnTheResult() { long result = LongMath.saturatedSubtract(6L, 2L); assertEquals(4L, result); } @Test public void whenSaturatedSubstractTwoLongValues_shouldSubstractwThemAndReturnIntMaxIfOverflow() { long result = LongMath.saturatedSubtract(Long.MAX_VALUE, -2L); assertEquals(Long.MAX_VALUE, result); } @Test public void whenSaturatedSubstractTwoLongValues_shouldSubstractThemAndReturnIntMinIfUnderflow() { long result = LongMath.saturatedSubtract(Long.MIN_VALUE, 3L); assertEquals(Long.MIN_VALUE, result); } @Test public void whenSqrtLongValues_shouldSqrtThemAndReturnTheResultForCeilingRounding() { long result = LongMath.sqrt(30L, RoundingMode.CEILING); assertEquals(6L, result); } @Test public void whenSqrtLongValues_shouldSqrtThemAndReturnTheResultForFloorRounding() { long result = LongMath.sqrt(30L, RoundingMode.FLOOR); assertEquals(5L, result); } @Test(expected = ArithmeticException.class) public void whenSqrtLongValues_shouldThrowArithmeticExceptionIfRoundingNotDefinedButNecessary() { LongMath.sqrt(30L, RoundingMode.UNNECESSARY); } }
Java
5
DBatOWL/tutorials
guava-modules/guava-utilities/src/test/java/com/baeldung/guava/mathutils/GuavaLongMathUnitTest.java
[ "MIT" ]
import * as React from 'react'; import Result from '..'; describe('Result.typescript', () => { it('status', () => { const result = ( <> <Result status="404" title="404" subTitle="Sorry, the page you visited does not exist." /> <Result status={404} title="404" subTitle="Sorry, the page you visited does not exist." /> </> ); expect(result).toBeTruthy(); }); });
TypeScript
3
chnliquan/ant-design
components/result/__tests__/type.test.tsx
[ "MIT" ]
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.javascript2.jade.editor.lexer; import org.netbeans.spi.lexer.LexerInput; import org.netbeans.spi.lexer.LexerRestartInfo; %% %public %final %class JadeColoringLexer %type JadeTokenId %unicode %caseless %char %{ private LexerInput input; private boolean canFollowTag = false; int parenBalance = 1; int braceBalance = 0; int bracketBalance = 0; int indent = 0; int eolPosition = 0; boolean dotAfterTag = false; int blockIndent = -1; boolean hasCssId = false; int lastReaded = 0; boolean continueJS = false; boolean inString = false; int whereToGo = 0; private static enum TAG_TYPE { OTHER, SCRIPT, STYLE}; TAG_TYPE lastTag = TAG_TYPE.OTHER; public JadeColoringLexer(LexerRestartInfo info) { this.input = info.input(); if(info.state() != null) { //reset state setState((LexerState)info.state()); } else { //initial state zzState = zzLexicalState = YYINITIAL; } } public LexerState getState() { if (zzState == YYINITIAL && zzLexicalState == YYINITIAL) { return null; } return new LexerState(zzState, zzLexicalState, canFollowTag, indent, hasCssId, lastTag, braceBalance, parenBalance, bracketBalance); } public void setState(LexerState state) { this.zzState = state.zzState; this.zzLexicalState = state.zzLexicalState; this.canFollowTag = state.canFollowTag; this.indent = state.indent; this.hasCssId = state.hasCssId; this.lastTag = state.lastTag; this.braceBalance = state.braceBalance; this.parenBalance = state.parenBalance; this.bracketBalance = state.bracketBalance; } public JadeTokenId nextToken() throws java.io.IOException { JadeTokenId token = yylex(); return token; } public static final class LexerState { /** the current state of the DFA */ final int zzState; /** the current lexical state */ final int zzLexicalState; final boolean canFollowTag; /** indent of the new line */ final int indent; final boolean hasCssId; /** last readed tag to switch embeding of js , css or html*/ final TAG_TYPE lastTag; /** balance of brances */ final int braceBalance; final int parenBalance; final int bracketBalance; LexerState (int zzState, int zzLexicalState, boolean canFollowTag, int indent, boolean hasCssId, TAG_TYPE lastTag, int braceBalance, int parenBalance, int bracketBalance) { this.zzState = zzState; this.zzLexicalState = zzLexicalState; this.canFollowTag = canFollowTag; this.indent = indent; this.hasCssId = hasCssId; this.lastTag = lastTag; this.braceBalance = braceBalance; this.parenBalance = parenBalance; this.bracketBalance = bracketBalance; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final LexerState other = (LexerState) obj; if (this.zzState != other.zzState) { return false; } if (this.zzLexicalState != other.zzLexicalState) { return false; } if (this.canFollowTag != other.canFollowTag) { return false; } if (this.hasCssId != other.hasCssId) { return false; } if (this.indent != other.indent) { return false; } if (this.lastTag != other.lastTag) { return false; } if ((this.braceBalance != other.braceBalance) || (this.parenBalance != other.parenBalance) || (this.bracketBalance != other.bracketBalance)) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 31 * hash + this.zzState; hash = 31 * hash + this.zzLexicalState; hash = 31 * hash + (this.canFollowTag ? 0 : 1); hash = 31 * hash + (this.hasCssId ? 0 : 1); hash = 31 * hash + this.indent; hash = 31 * hash + this.lastTag.hashCode(); hash = 31 * hash + this.braceBalance; hash = 31 * hash + this.parenBalance; hash = 31 * hash + this.bracketBalance; return hash; } @Override public String toString() { return "LexerState{" + "zzState=" + zzState + ", zzLexicalState=" + zzLexicalState + '}'; } } JadeTokenId getTokenIdFromTagType (TAG_TYPE tagType, JadeTokenId defaultId) { switch (tagType) { case SCRIPT: return JadeTokenId.JAVASCRIPT; case STYLE: return JadeTokenId.CSS; default: return defaultId; } } // End user code boolean checkEndJS(int tokenLength, char ch) { if (!continueJS && ((ch == ')' && parenBalance == 0) || (ch != ')' && parenBalance == 1)) && braceBalance == 0 && bracketBalance == 0) { if (lastReaded > 0 && ((tokenLength - lastReaded) > 0)) { yypushback(tokenLength - lastReaded); yybegin(HTML_ATTRIBUTE); return true; } } lastReaded = tokenLength; continueJS = false; return false; } %} /* states */ %state AFTER_EOL %state DOCTYPE %state AFTER_DOCTYPE %state DOCTYPE_STRING %state DOCTYPE_STRING_END %state AFTER_TAG %state AFTER_CODE_DELIMITER %state AFTER_CODE_DELIMITER_WITH_BLOCK_EXPANSION %state IN_COMMENT %state IN_COMMENT_AFTER_EOL %state IN_UNBUFFERED_COMMENT %state IN_UNBUFFERED_COMMENT_AFTER_EOL %state TEXT_LINE %state IN_PLAIN_TEXT_LINE %state IN_PLAIN_TEXT_BLOCK %state IN_PLAIN_TEXT_BLOCK_AFTER_EOL %state AFTER_PLAIN_TEXT_BLOCK_DELIMITER %state HTML_ATTRIBUTE %state HTML_ATTRIBUTE_VALUE %state JAVASCRIPT_VALUE %state JAVASCRIPT %state JAVASCRIPT_LINE %state JAVASCRIPT_EXPRESSION %state JAVASCRIPT_WITH_BLOCK_EXPANSION %state JS_SSTRING %state JS_STRING %state FILEPATH %state IN_FILTER_BLOCK %state IN_FILTER_BLOCK_AFTER_EOL %state AFTER_INCLUDE %state AFTER_BLOCK %state AFTER_COLON_IN_TAG %state AFTER_EACH %state JAVASCRIPT_AFTER_EACH %state AFTER_MIXIN %state MIXIN_ARGUMENTS %state AFTER_PLUS_MIXIN %state MIXIN_CALL_ARGUMENT %state AFTER_ATTRIBUTES /* base structural elements */ AnyChar = (.|[\n]) h = [0-9a-f] nonascii = [\200-\377] unicode = \\{h}{1,6}(\r\n|[ \t\r\n\f])? escape = {unicode}|\\[ -~\200-\377] nmstart = [_a-zA-Z]|{nonascii}|{escape} nmchar = [_a-zA-Z0-9-]|{nonascii}|{escape} HtmlString = [<] [^"\r"|"\n"|"\r\n"|">"|"*"]* [>]? HtmlIdentifierPart = [[:letter:][:digit:]]+[[:letter:][:digit:]\-]* HtmlIdentifier = {HtmlIdentifierPart}({HtmlIdentifierPart})* CssIdentifier = -?{nmstart}{nmchar}* LineTerminator = \r|\n|\r\n StringCharacter = [^\r\n\"\\] | \\{LineTerminator} WS = [ \t\f\u00A0\u000B] WhiteSpace = [ \t\f\u00A0\u000B]+ Input = [^\r\n \t\f\u00A0\u000B]+ IdentifierPart = [:jletterdigit:] Identifier = [:jletter:]{IdentifierPart}* MixinName = {CssIdentifier} Comment = "//" UnbufferedComment = "//-" %% /* TODO: - TagInterPolation http://jade-lang.com/reference/interpolation/ - check interpolation in the text block - mixin default atributes - default attributes at all */ <YYINITIAL> { {AnyChar} { yypushback(1); indent = 0; yybegin(AFTER_EOL); } } <AFTER_EOL> { /* doctype */ "doctype" { yybegin(AFTER_DOCTYPE); return JadeTokenId.DOCTYPE; } "if" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.KEYWORD_IF;} "else" { return JadeTokenId.KEYWORD_ELSE;} "unless" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.KEYWORD_UNLESS;} "each" { yybegin(AFTER_EACH); return JadeTokenId.KEYWORD_EACH;} "in" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.KEYWORD_IN;} "for" { yybegin(AFTER_EACH); return JadeTokenId.KEYWORD_FOR;} "while" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.KEYWORD_WHILE;} "case" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.KEYWORD_CASE;} "when" { yybegin(AFTER_CODE_DELIMITER_WITH_BLOCK_EXPANSION); return JadeTokenId.KEYWORD_WHEN;} "default" { yybegin(AFTER_TAG); // handling : after the keyword return JadeTokenId.KEYWORD_DEFAULT;} "block" { yybegin(AFTER_BLOCK); return JadeTokenId.KEYWORD_BLOCK;} "extends" { yybegin(FILEPATH); return JadeTokenId.KEYWORD_EXTENDS;} "include" { yybegin(AFTER_INCLUDE); return JadeTokenId.KEYWORD_INCLUDE;} "mixin" { yybegin(AFTER_MIXIN); return JadeTokenId.KEYWORD_MIXIN; } "+" { yybegin(AFTER_PLUS_MIXIN); return JadeTokenId.OPERATOR_PLUS; } "-"|"="|"!=" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.CODE_DELIMITER; } {WhiteSpace} { indent = tokenLength; return JadeTokenId.WHITESPACE; } "script" { yybegin(AFTER_TAG); dotAfterTag = true; hasCssId = false; lastTag = TAG_TYPE.SCRIPT; return JadeTokenId.TAG ;} "style" { yybegin(AFTER_TAG); dotAfterTag = true; hasCssId = false; lastTag = TAG_TYPE.STYLE; return JadeTokenId.TAG ;} {HtmlIdentifier} { yybegin(AFTER_TAG); dotAfterTag = true; hasCssId = false; lastTag = TAG_TYPE.OTHER; return JadeTokenId.TAG ;} {LineTerminator} { indent = 0; return JadeTokenId.EOL; } {UnbufferedComment} { yybegin(IN_UNBUFFERED_COMMENT); return JadeTokenId.UNBUFFERED_COMMENT_DELIMITER; } {Comment} { yybegin(IN_COMMENT); return JadeTokenId.COMMENT_DELIMITER; } [#\.!] { hasCssId = false; yypushback(1); yybegin(AFTER_TAG); } "|" { yybegin(IN_PLAIN_TEXT_LINE); return JadeTokenId.PLAIN_TEXT_DELIMITER; } ":"{Input} { yybegin (IN_FILTER_BLOCK); blockIndent = -1; return JadeTokenId.FILTER; } "<" { yybegin(IN_PLAIN_TEXT_LINE); } "&" { yybegin(IN_PLAIN_TEXT_LINE); } . { return JadeTokenId.UNKNOWN;} } /* TODO - this rure shold be rewrite. I don't like it. Mainly because the dot after tag handling*/ <AFTER_TAG> { "#"{CssIdentifier} { if (!hasCssId) { hasCssId = true; return JadeTokenId.CSS_ID; } else { // only one css id is allowed in tag return JadeTokenId.UNKNOWN; } } "\."{CssIdentifier} { return JadeTokenId.CSS_CLASS; } "(" { yybegin(HTML_ATTRIBUTE); return JadeTokenId.BRACKET_LEFT_PAREN; } ":" { yybegin(AFTER_COLON_IN_TAG); return JadeTokenId.OPERATOR_COLON; } {WhiteSpace} { yybegin(TEXT_LINE); return JadeTokenId.WHITESPACE; } {LineTerminator} { yybegin(AFTER_EOL); indent = 0; if (tokenLength > 0) { return JadeTokenId.EOL; } } "="|"!=" { yybegin(AFTER_CODE_DELIMITER); return JadeTokenId.CODE_DELIMITER; } "/" { return JadeTokenId.OPERATOR_DIVISION;} "\." { yybegin(AFTER_PLAIN_TEXT_BLOCK_DELIMITER); return JadeTokenId.PLAIN_TEXT_DELIMITER; } "#{"|"!{" { yypushback(2); yybegin(JAVASCRIPT_EXPRESSION); whereToGo = TEXT_LINE; } "&attributes" { yybegin(AFTER_ATTRIBUTES); return JadeTokenId.ATTRIBUTE; } . { yybegin(TEXT_LINE); } } <AFTER_COLON_IN_TAG> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {HtmlIdentifier} { yybegin(AFTER_TAG); dotAfterTag = true; hasCssId = false; return JadeTokenId.TAG ;} {LineTerminator} { yybegin(AFTER_EOL); indent = 0; return JadeTokenId.EOL; } . { return JadeTokenId.UNKNOWN; } } <TEXT_LINE> { [#!]"{" { yypushback(2); yybegin(JAVASCRIPT_EXPRESSION); whereToGo = TEXT_LINE; if (tokenLength > 2) { return JadeTokenId.TEXT; } } {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); indent = 0; if (tokenLength -1 > 0) { return JadeTokenId.TEXT; } } {AnyChar} { } } <HTML_ATTRIBUTE> { {HtmlIdentifier} { return JadeTokenId.ATTRIBUTE; } "=" { yybegin(HTML_ATTRIBUTE_VALUE); return JadeTokenId.OPERATOR_ASSIGNMENT; } "!=" { yybegin(HTML_ATTRIBUTE_VALUE); return JadeTokenId.OPERATOR_NOT_EQUALS; } "," { return JadeTokenId.OPERATOR_COMMA; } {LineTerminator} { return JadeTokenId.EOL; } {WhiteSpace} { return JadeTokenId.WHITESPACE; } ")" { yybegin(AFTER_TAG); return JadeTokenId.BRACKET_RIGHT_PAREN;} . { return JadeTokenId.UNKNOWN;} } <HTML_ATTRIBUTE_VALUE> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {LineTerminator} { return JadeTokenId.EOL; } {AnyChar} { parenBalance = 1; lastReaded = bracketBalance = braceBalance = 0; yypushback(1); yybegin(JAVASCRIPT_VALUE);} } <AFTER_ATTRIBUTES> { "(" { parenBalance = 1; lastReaded = bracketBalance = braceBalance = 0; yybegin(JAVASCRIPT_VALUE); return JadeTokenId.BRACKET_LEFT_PAREN; } {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } . { yybegin(AFTER_TAG); return JadeTokenId.UNKNOWN;} } <AFTER_EACH> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } {AnyChar} { yypushback(1); yybegin(JAVASCRIPT_AFTER_EACH); } } <JAVASCRIPT_AFTER_EACH> { {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } {WS}*"in"({LineTerminator}|{WS}+) { int delta = tokenLength - lastReaded; if (delta > 0) { yypushback(delta); yybegin(AFTER_EOL); if (tokenLength > delta) { return JadeTokenId.JAVASCRIPT; } } yypushback(tokenLength); yybegin(AFTER_EOL); } {AnyChar} { lastReaded = tokenLength; } } <JAVASCRIPT_VALUE> { \' { yybegin(JS_SSTRING); } \" { yybegin(JS_STRING); } [\+\-\.&\*/%|=!]"="? { continueJS = true; lastReaded = tokenLength; } "[" { braceBalance++; lastReaded = tokenLength; } "]" { braceBalance--; lastReaded = tokenLength; } "{" { bracketBalance++; lastReaded = tokenLength; } "}" { bracketBalance--; lastReaded = tokenLength; } "(" { parenBalance++; lastReaded = tokenLength;} ")" { parenBalance--; if (checkEndJS(tokenLength, (char)zzInput)) { return JadeTokenId.JAVASCRIPT; } } {WS}+ { } "," { if (checkEndJS(tokenLength, (char)zzInput)) { return JadeTokenId.JAVASCRIPT; } } {HtmlIdentifier} { if (zzInput == ')') parenBalance--; if (checkEndJS(tokenLength, (char)zzInput)) { return JadeTokenId.JAVASCRIPT; } if (zzInput == ')') parenBalance++; // ned to return back } {AnyChar} { lastReaded = tokenLength; /*continueJS = false;*/} } <JS_STRING> { \" { continueJS = false; lastReaded = tokenLength; yybegin(JAVASCRIPT_VALUE); } "\\\"" { } {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); if (tokenLength -1 > 0) { return JadeTokenId.UNKNOWN; } } {AnyChar} { } } <JS_SSTRING> { \' { continueJS = false; lastReaded = tokenLength; yybegin(JAVASCRIPT_VALUE); } "\\'" { } {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); if (tokenLength -1 > 0) { return JadeTokenId.UNKNOWN; } } {AnyChar} { } } <AFTER_INCLUDE> { ":"{Input} { return JadeTokenId.FILTER; } {AnyChar} { yypushback(1); yybegin(FILEPATH); } } <AFTER_BLOCK> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {Input} { yybegin(TEXT_LINE); return JadeTokenId.BLOCK_NAME;} {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } } <JAVASCRIPT> { [\"'{}(),\n\r] { switch (zzInput) { case '(': parenBalance++; break; case '{': braceBalance++; break; case '}': braceBalance--; break; case ')': parenBalance--; break; case ',': case '\r': case '\n': if (parenBalance == 1 && braceBalance == 0) { parenBalance = 0; } break; } if (parenBalance == 0 && braceBalance == 0) { yypushback(1); yybegin(HTML_ATTRIBUTE); parenBalance = 1; if (tokenLength > 1) { return JadeTokenId.JAVASCRIPT; } } } {AnyChar} {} } <JAVASCRIPT_EXPRESSION> { [#!]"{" { braceBalance = 1; return JadeTokenId.EXPRESSION_DELIMITER_OPEN; } "{" { braceBalance++; } "}" { braceBalance--; if (braceBalance == 0) { yypushback(1); if (tokenLength > 1) { return JadeTokenId.JAVASCRIPT; } } else if (braceBalance == -1) { yybegin(whereToGo); return JadeTokenId.EXPRESSION_DELIMITER_CLOSE; } } {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); if (tokenLength - 1 > 0) { return JadeTokenId.JAVASCRIPT; } } . { } } <JAVASCRIPT_WITH_BLOCK_EXPANSION> { ":" { yypushback(1); yybegin(AFTER_TAG); } [^:\r\n]+ { return JadeTokenId.JAVASCRIPT; } {LineTerminator} { yybegin(AFTER_EOL); indent = 0; return JadeTokenId.EOL; } } <JAVASCRIPT_LINE> { .+ { return JadeTokenId.JAVASCRIPT; } {LineTerminator} { yybegin(AFTER_EOL); indent = 0; return JadeTokenId.EOL; } } <AFTER_CODE_DELIMITER_WITH_BLOCK_EXPANSION> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {AnyChar} { yypushback(1); yybegin(JAVASCRIPT_WITH_BLOCK_EXPANSION); } } <AFTER_CODE_DELIMITER> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {AnyChar} { yypushback(1); yybegin(JAVASCRIPT_LINE); } } <IN_PLAIN_TEXT_LINE> { [#!]"{" { yypushback(2); yybegin(JAVASCRIPT_EXPRESSION); whereToGo = IN_PLAIN_TEXT_LINE; if (tokenLength > 2) { return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } } {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); if (tokenLength - 1 > 0 ) { return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } } . { } } <AFTER_PLAIN_TEXT_BLOCK_DELIMITER> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } .* { // the text will not be renedered return JadeTokenId.UNKNOWN; } {LineTerminator} { blockIndent = -1; eolPosition = 0; yybegin(IN_PLAIN_TEXT_BLOCK_AFTER_EOL); return JadeTokenId.EOL; } } <IN_PLAIN_TEXT_BLOCK> { [#!]"{" { yypushback(2); yybegin(JAVASCRIPT_EXPRESSION); whereToGo = IN_PLAIN_TEXT_BLOCK; if (tokenLength > 2) { return JadeTokenId.TEXT; } } {LineTerminator} { yybegin(IN_PLAIN_TEXT_BLOCK_AFTER_EOL); eolPosition = tokenLength; } . { } } <IN_PLAIN_TEXT_BLOCK_AFTER_EOL> { {WhiteSpace} { int currentIndent = tokenLength - eolPosition; if (currentIndent <= indent) { // the block has to have one more space than the tag yybegin(AFTER_EOL); indent = currentIndent; if (tokenLength > currentIndent) { yypushback(currentIndent); return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } else { return JadeTokenId.WHITESPACE; } } if (blockIndent < 0) { blockIndent = currentIndent; } if (blockIndent > currentIndent) { yypushback(currentIndent); yybegin(AFTER_EOL); if (tokenLength > currentIndent) { return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } } yybegin(IN_PLAIN_TEXT_BLOCK); } {LineTerminator} {} . { yypushback(1); yybegin(AFTER_EOL); indent = 0; if (tokenLength > 1) { return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } } } <IN_FILTER_BLOCK> { [#!]"{" { yypushback(2); yybegin(JAVASCRIPT_EXPRESSION); whereToGo = IN_FILTER_BLOCK; if (tokenLength > 2) { return JadeTokenId.FILTER_TEXT; } } {LineTerminator} { yybegin(IN_FILTER_BLOCK_AFTER_EOL); eolPosition = tokenLength; } . {} } <IN_FILTER_BLOCK_AFTER_EOL> { {WhiteSpace} { int indentInBlock = tokenLength - eolPosition; if (blockIndent < 0) { blockIndent = indentInBlock; } if (blockIndent > indentInBlock) { yypushback(indentInBlock); yybegin(AFTER_EOL); if (tokenLength > indentInBlock) { return JadeTokenId.FILTER_TEXT; } } yybegin(IN_FILTER_BLOCK); } {LineTerminator} {} . { yypushback(1); yybegin(AFTER_EOL); if (tokenLength - 1 > 0) { return JadeTokenId.FILTER_TEXT; } } } <AFTER_MIXIN> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } {MixinName} { return JadeTokenId.MIXIN_NAME; } "(" { yybegin(MIXIN_ARGUMENTS); return JadeTokenId.BRACKET_LEFT_PAREN;} . { return JadeTokenId.UNKNOWN; } } <MIXIN_ARGUMENTS> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {Identifier} { return JadeTokenId.IDENTIFIER; } "," { return JadeTokenId.OPERATOR_COMMA; } "..." { return JadeTokenId.OPERATOR_REST_ARGUMENTS; } ")" { return JadeTokenId.BRACKET_RIGHT_PAREN;} {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } {AnyChar} { // expect declaration of parameters return JadeTokenId.UNKNOWN; } } <AFTER_PLUS_MIXIN> { {WhiteSpace} { return JadeTokenId.WHITESPACE; } {MixinName} { return JadeTokenId.MIXIN_NAME; } "(" { yybegin(MIXIN_CALL_ARGUMENT); parenBalance = 1; braceBalance = 0; return JadeTokenId.BRACKET_LEFT_PAREN;} "," { yybegin(MIXIN_CALL_ARGUMENT); parenBalance = 1; braceBalance = 0; return JadeTokenId.OPERATOR_COMMA; } ")" { yybegin(AFTER_TAG); return JadeTokenId.BRACKET_LEFT_PAREN;} {LineTerminator} { yybegin(AFTER_EOL); return JadeTokenId.EOL; } . { return JadeTokenId.UNKNOWN; } } <MIXIN_CALL_ARGUMENT> { [(){},] { switch (zzInput) { case '(': parenBalance++; break; case '{': braceBalance++; break; case '}': braceBalance--; break; case ')': parenBalance--; break; case ',': if (parenBalance == 1 && braceBalance == 0) { parenBalance = 0; } break; } if (parenBalance == 0 && braceBalance == 0) { yypushback(1); yybegin(AFTER_PLUS_MIXIN); parenBalance = 1; if (tokenLength > 1) { return JadeTokenId.JAVASCRIPT; } } } {AnyChar} {} } /* This is help rule. Read all until end of line and remember the number of read chars. */ <IN_COMMENT> { .* { } {LineTerminator} { yybegin(IN_COMMENT_AFTER_EOL); eolPosition = tokenLength; } } /* Scan the begining of line in commnet. If there is a whitespace, we need to find out, if the indentation says that the commment continues or finished already. */ <IN_COMMENT_AFTER_EOL> { {WhiteSpace} { int indentInComment = tokenLength - eolPosition; if (indent >= indentInComment) { yypushback(indentInComment + 1); // return back also the EOL yybegin(AFTER_EOL); if (tokenLength > (indentInComment + 1)) { return JadeTokenId.COMMENT; } } else { yybegin(IN_COMMENT); } } {LineTerminator} {} . { yypushback(1); yybegin(AFTER_EOL); if (tokenLength > 1) { return JadeTokenId.COMMENT; } } } /* Copy of the normal comment. Just return the appropriate tokens */ <IN_UNBUFFERED_COMMENT> { .* { } {LineTerminator} { yybegin(IN_UNBUFFERED_COMMENT_AFTER_EOL); eolPosition = tokenLength; } } <IN_UNBUFFERED_COMMENT_AFTER_EOL> { {WhiteSpace} { int indentInComment = tokenLength - eolPosition; if (indent >= indentInComment) { yypushback(indentInComment); yybegin(AFTER_EOL); if (tokenLength > indentInComment) { return JadeTokenId.UNBUFFERED_COMMENT; } } else { yybegin(IN_UNBUFFERED_COMMENT); } } {LineTerminator} {} . { yypushback(1); yybegin(AFTER_EOL); if (tokenLength > 1) { return JadeTokenId.UNBUFFERED_COMMENT; } } } <AFTER_DOCTYPE> { {LineTerminator} { yybegin(AFTER_EOL); indent = 0; if (tokenLength > 0) { return JadeTokenId.EOL; } } {WhiteSpace} { return JadeTokenId.WHITESPACE; } {Input} { yybegin(DOCTYPE); return JadeTokenId.DOCTYPE_TEMPLATE; } } <DOCTYPE> { {LineTerminator} { yybegin(AFTER_EOL); indent = 0; if (tokenLength > 0) { return JadeTokenId.EOL; } } ['\"] { yybegin(DOCTYPE_STRING); return JadeTokenId.DOCTYPE_STRING_START;} {WhiteSpace} { return JadeTokenId.WHITESPACE; } [^'\"\r\n \t\f\u00A0\u000B]+ { return JadeTokenId.DOCTYPE_ATTRIBUTE; } } <DOCTYPE_STRING> { {LineTerminator} { yypushback(1); yybegin(DOCTYPE); if (tokenLength > 1) { return JadeTokenId.UNKNOWN; } } [\"'] { yypushback(1); yybegin(DOCTYPE_STRING_END); if (tokenLength > 1) { return JadeTokenId.DOCTYPE_STRING_END; } } [^\"'\r\n]+ { } } <DOCTYPE_STRING_END> { [\"'] { yybegin(DOCTYPE); return JadeTokenId.DOCTYPE_STRING_END;} } <FILEPATH> { {LineTerminator} { yypushback(1); yybegin(AFTER_EOL); if (tokenLength - 1 > 0) { return JadeTokenId.FILE_PATH; } } [^\r\n] { } } <TEXT_LINE><<EOF>> { { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.TEXT; } else { return null; }} } <IN_UNBUFFERED_COMMENT_AFTER_EOL><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.UNBUFFERED_COMMENT; } else { return null; }} <IN_UNBUFFERED_COMMENT><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.UNBUFFERED_COMMENT; } else { return null; }} <IN_COMMENT_AFTER_EOL><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.COMMENT; } else { return null; }} <IN_COMMENT><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.COMMENT; } else { return null; }} <IN_FILTER_BLOCK_AFTER_EOL><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.FILTER_TEXT; } else { return null; }} <IN_FILTER_BLOCK><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.FILTER_TEXT; } else { return null; }} <IN_PLAIN_TEXT_LINE><<EOF>> { if (input.readLength() > 0 ) { input.backup(1); return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } else { return null; } } <IN_PLAIN_TEXT_BLOCK_AFTER_EOL><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } else { return null; }} <IN_PLAIN_TEXT_BLOCK><<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return getTokenIdFromTagType(lastTag, JadeTokenId.PLAIN_TEXT); } else { return null; }} <<EOF>> { if (input.readLength() > 0) { // backup eof input.backup(1); //and return the text as error token return JadeTokenId.UNKNOWN; } else { return null; } }
JFlex
5
timfel/netbeans
webcommon/javascript2.jade/tools/JadeColoringScanner.flex
[ "Apache-2.0" ]
use("ispec") describe(DefaultBehavior, describe("tuple", it("should return the right kind of tuple for up to nine elements", tuple should be same(Tuple) tuple(1,2) should mimic(Tuple Two) tuple(1,2,3) should mimic(Tuple Three) tuple(1,2,3,4) should mimic(Tuple Four) tuple(1,2,3,4,5) should mimic(Tuple Five) tuple(1,2,3,4,5,6) should mimic(Tuple Six) tuple(1,2,3,4,5,6,7) should mimic(Tuple Seven) tuple(1,2,3,4,5,6,7,8) should mimic(Tuple Eight) tuple(1,2,3,4,5,6,7,8,9) should mimic(Tuple Nine) tuple(1,2,3,4,5,6,7,8,9,10) should mimic(Tuple Many) tuple(1,2,3,4,5,6,7,8,9,10,11) should mimic(Tuple Many) ) it("should add accessor methods for Tuple Many tuples", tx = tuple(1,2,3,4,5,6,7,8,"9",42,"blarg") tx _9 should == "9" tx _10 should == 42 tx _11 should == "blarg" ) it("should set the elements correctly", tx = tuple(5,4,3,2,1) tx first should == 5 tx _1 should == 5 tx second should == 4 tx _2 should == 4 tx third should == 3 tx _3 should == 3 tx fourth should == 2 tx _4 should == 2 tx fifth should == 1 tx _5 should == 1 ) ) describe("", it("should return the object sent in to it if one argument is given", x = (42+5) x should == 47 ) it("should return the empty tuple if no arguments are given", =(x, ()) x should be(Tuple) ) it("should delegate to the tuple method for all other cases", x = (1,2,3,4) x should mimic(Tuple Four) ) ) ) describe(Tuple, it("should have the correct kind", Tuple kind should == "Tuple" ) it("should be possible to mimic", x = Tuple mimic x should not be same(Tuple) x should mimic(Tuple) x should have kind("Tuple") ) it("should mimic Comparing", Tuple should mimic(Mixins Comparing) ) describe("<=>", it("should sort based on the elements inside", (tuple <=> tuple) should == 0 (tuple <=> tuple(1,2)) should == -1 (tuple(1,2) <=> tuple) should == 1 (tuple(1,2) <=> tuple(1,2)) should == 0 (tuple(1,2,3) <=> tuple(1,2)) should == 1 (tuple(1,2) <=> tuple(1,2,3)) should == -1 (tuple(1,2,3) <=> tuple(1,3,3)) should == -1 (tuple(1,3,3) <=> tuple(1,2,3)) should == 1 ) ) describe("==", it("should check equality", (tuple == tuple) should be true (tuple == tuple(1,2)) should be false (tuple(1,2) == tuple) should be false (tuple(1,2) == tuple(1,2)) should be true (tuple(1,2,3) == tuple(1,2)) should be false (tuple(1,2) == tuple(1,2,3)) should be false (tuple(1,2,3) == tuple(1,3,3)) should be false (tuple(1,3,3) == tuple(1,2,3)) should be false (tuple(1,tuple(1,2),3) == tuple(1,tuple(1,2),3)) should be true ) ) describe("!=", it("should check inequality", (tuple != tuple) should be false (tuple != tuple(1,2)) should be true (tuple(1,2) != tuple) should be true (tuple(1,2) != tuple(1,2)) should be false (tuple(1,2,3) != tuple(1,2)) should be true (tuple(1,2) != tuple(1,2,3)) should be true (tuple(1,2,3) != tuple(1,3,3)) should be true (tuple(1,3,3) != tuple(1,2,3)) should be true (tuple(1,tuple(1,2),3) != tuple(1,tuple(1,2),3)) should be false ) ) describe("inspect", it("should return something within parenthesis", tuple inspect should == "()" ) it("should return the inspect format of things inside", tuple(method(nil), method(f, f b), fn(a b)) inspect should == "(method(nil), method(f, f b), fn(a b))" ) it("should return the list of elements separated with , ", tuple(1, 2, :foo, "bar") inspect should == "(1, 2, :foo, \"bar\")" ) ) describe("notice", it("should return something within parenthesis", tuple notice should == "()" ) it("should return the notice format of things inside", tuple(method, method, fn) notice should == "(method(...), method(...), fn(...))" ) it("should return the list of elements separated with , ", tuple(1, 2, :foo, "bar") notice should == "(1, 2, :foo, \"bar\")" ) ) describe("arity", it("should return the arity of the tuple", tuple arity should == 0 tuple(1,3) arity should == 2 tuple(tuple(1,3), 42, 5) arity should == 3 tuple(1,3,5,7,9,2,4,6,8,111) arity should == 10 ) ) describe("asList", it("should return the elements in the tuple as a list", tuple asList should == [] tuple(1,2,3) asList should == [1,2,3] tuple(3,3) asList should == [3,3] ) ) describe("asTuple", it("should return itself", x = tuple x asTuple should be(x) x = tuple(1,2,3,555) x asTuple should be(x) ) ) describe("Two", it("should mimic Tuple", Tuple Two should mimic(Tuple) ) it("should have the correct kind", Tuple Two should have kind("Tuple Two") ) it("should have accessors for the first two elements", Tuple Two cell?(:first) should be true Tuple Two cell?(:"_1") should be true Tuple Two cell?(:second) should be true Tuple Two cell?(:"_2") should be true ) ) describe("Three", it("should mimic Tuple Two", Tuple Three should mimic(Tuple Two) ) it("should have the correct kind", Tuple Three should have kind("Tuple Three") ) it("should have accessor for the third element", Tuple Three cell?(:third) should be true Tuple Three cell?(:"_3") should be true ) ) describe("Four", it("should mimic Tuple Three", Tuple Four should mimic(Tuple Three) ) it("should have the correct kind", Tuple Four should have kind("Tuple Four") ) it("should have accessor for the fourth element", Tuple Four cell?(:fourth) should be true Tuple Four cell?(:"_4") should be true ) ) describe("Five", it("should mimic Tuple Four", Tuple Five should mimic(Tuple Four) ) it("should have the correct kind", Tuple Five should have kind("Tuple Five") ) it("should have accessor for the fifth element", Tuple Five cell?(:fifth) should be true Tuple Five cell?(:"_5") should be true ) ) describe("Six", it("should mimic Tuple Five", Tuple Six should mimic(Tuple Five) ) it("should have the correct kind", Tuple Six should have kind("Tuple Six") ) it("should have accessor for the sixth element", Tuple Six cell?(:sixth) should be true Tuple Six cell?(:"_6") should be true ) ) describe("Seven", it("should mimic Tuple Six", Tuple Seven should mimic(Tuple Six) ) it("should have the correct kind", Tuple Seven should have kind("Tuple Seven") ) it("should have accessor for the seventh element", Tuple Seven cell?(:seventh) should be true Tuple Seven cell?(:"_7") should be true ) ) describe("Eight", it("should mimic Tuple Seven", Tuple Eight should mimic(Tuple Seven) ) it("should have the correct kind", Tuple Eight should have kind("Tuple Eight") ) it("should have accessor for the eighth element", Tuple Eight cell?(:eighth) should be true Tuple Eight cell?(:"_8") should be true ) ) describe("Nine", it("should mimic Tuple Eight", Tuple Nine should mimic(Tuple Eight) ) it("should have the correct kind", Tuple Nine should have kind("Tuple Nine") ) it("should have accessor for the ninth element", Tuple Nine cell?(:ninth) should be true Tuple Nine cell?(:"_9") should be true ) ) describe("Many", it("should mimic Tuple Nine", Tuple Many should mimic(Tuple Nine) ) it("should have the correct kind", Tuple Many should have kind("Tuple Many") ) ) )
Ioke
5
olabini/ioke
test/tuple_spec.ik
[ "ICU", "MIT" ]
const exports = {} exports.add = fn(a, b) { a + b } exports.sub = fn(a, b) { a - b } exports.mul = fn(a, b) { a * b } exports.div = fn(a, b) { a / b } return exports
Inform 7
2
lfkeitel/nitrogen
testdata/math2/mod.ni
[ "BSD-3-Clause" ]
<!DOCTYPE html> <html> <head> <title>Sample styled page</title> <script>alert('test');</script> <script> var message = "Alert!"; alert(message); </script> </head> <body> <h1>Sample styled page</h1> <p>This page is just a demo.</p> </body> </html>
HTML
3
fuelingtheweb/prettier
tests/html_js/simple.html
[ "MIT" ]
defmodule <%= inspect schema.repo %>.Migrations.Create<%= Macro.camelize(schema.table) %>AuthTables do use Ecto.Migration def change do<%= if Enum.any?(migration.extensions) do %><%= for extension <- migration.extensions do %> <%= extension %><% end %> <% end %> create table(:<%= schema.table %><%= if schema.binary_id do %>, primary_key: false<% end %>) do <%= if schema.binary_id do %> add :id, :binary_id, primary_key: true <% end %> <%= migration.column_definitions[:email] %> add :hashed_password, :string, null: false add :confirmed_at, :naive_datetime timestamps() end create unique_index(:<%= schema.table %>, [:email]) create table(:<%= schema.table %>_tokens<%= if schema.binary_id do %>, primary_key: false<% end %>) do <%= if schema.binary_id do %> add :id, :binary_id, primary_key: true <% end %> add :<%= schema.singular %>_id, references(:<%= schema.table %>, <%= if schema.binary_id do %>type: :binary_id, <% end %>on_delete: :delete_all), null: false <%= migration.column_definitions[:token] %> add :context, :string, null: false add :sent_to, :string timestamps(updated_at: false) end create index(:<%= schema.table %>_tokens, [:<%= schema.singular %>_id]) create unique_index(:<%= schema.table %>_tokens, [:context, :token]) end end
Elixir
4
faheempatel/phoenix
priv/templates/phx.gen.auth/migration.ex
[ "MIT" ]
(set-info :smt-lib-version 2.6) (set-logic QF_IDL) (set-info :source |The Averest Framework (http://www.averest.org)|) (set-info :category "industrial") (set-info :status sat) (declare-fun cvclZero () Int) (declare-fun F0 () Int) (declare-fun F2 () Int) (declare-fun F4 () Int) (declare-fun F6 () Int) (declare-fun F8 () Int) (declare-fun F14 () Int) (declare-fun F16 () Int) (declare-fun F18 () Int) (declare-fun F20 () Int) (declare-fun F22 () Int) (declare-fun P10 () Bool) (declare-fun P12 () Bool) (declare-fun P24 () Bool) (declare-fun P26 () Bool) (declare-fun P28 () Bool) (assert (let ((?v_0 (not P10)) (?v_3 (and P10 P24))) (let ((?v_8 (and ?v_0 ?v_3)) (?v_2 (and P10 P26))) (let ((?v_1 (and ?v_8 ?v_2)) (?v_10 (and P10 ?v_3))) (let ((?v_4 (and ?v_2 ?v_10)) (?v_6 (not ?v_3))) (let ((?v_13 (and ?v_0 ?v_6))) (let ((?v_5 (and ?v_2 ?v_13)) (?v_15 (and P10 ?v_6))) (let ((?v_7 (and ?v_2 ?v_15)) (?v_11 (not ?v_2))) (let ((?v_9 (and ?v_8 ?v_11)) (?v_12 (and ?v_10 ?v_11)) (?v_14 (and ?v_13 ?v_11)) (?v_16 (and ?v_15 ?v_11)) (?v_26 (= (- F2 F0) 0)) (?v_25 (= (- F22 F14) 0)) (?v_23 (= (- F4 F0) 0)) (?v_24 (= (- F22 F16) 0)) (?v_21 (= (- F6 F0) 0)) (?v_22 (= (- F22 F18) 0)) (?v_20 (= (- F8 F0) 0)) (?v_19 (= (- F22 F20) 0))) (let ((?v_17 (or (or (or (or (or (or (or (or (or (or (or (or (or (or (or (and ?v_26 (and ?v_0 ?v_1)) (and (= (- F22 F2) 0) (and P10 ?v_1))) (and (= (- F14 F0) 0) (and ?v_0 ?v_4))) (and ?v_25 (and P10 ?v_4))) (and ?v_23 (and ?v_0 ?v_5))) (and (= (- F22 F4) 0) (and P10 ?v_5))) (and (= (- F16 F0) 0) (and ?v_0 ?v_7))) (and ?v_24 (and P10 ?v_7))) (and ?v_21 (and ?v_0 ?v_9))) (and (= (- F22 F6) 0) (and P10 ?v_9))) (and (= (- F18 F0) 0) (and ?v_0 ?v_12))) (and ?v_22 (and P10 ?v_12))) (and (and ?v_0 ?v_14) ?v_20)) (and (and P10 ?v_14) (= (- F22 F8) 0))) (and (and ?v_0 ?v_16) (= (- F20 F0) 0))) (and (and P10 ?v_16) ?v_19)))) (let ((?v_18 (not (and (not (and P10 P28)) ?v_17)))) (and (and (and (and (and (= (- cvclZero F22) 0) (and (= (- cvclZero F20) 0) (and (= (- cvclZero F18) 0) (and (and (= (- cvclZero F14) 0) (and ?v_0 (not P12))) (= (- cvclZero F16) 0))))) (not P24)) (not P26)) (not P28)) (and (not (or ?v_17 (not (or (and ?v_0 ?v_18) (and P12 ?v_18))))) (or (or (and P10 ?v_19) (and ?v_0 ?v_20)) (or (or (and ?v_0 ?v_21) (and P10 ?v_22)) (or (or (and ?v_0 ?v_23) (and P10 ?v_24)) (or (and P10 ?v_25) (and ?v_0 ?v_26)))))))))))))))))) (check-sat) (exit)
SMT
2
livinlife6751/infer
sledge/test/smt/QF_IDL/Averest/linear_search/LinearSearch_safe_bgmc000.smt2
[ "MIT" ]
// $Id: HalPXA27xGeneralIOM.nc,v 1.6 2008-06-11 00:46:24 razvanm Exp $ /* * IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. By * downloading, copying, installing or using the software you agree to * this license. If you do not agree to this license, do not download, * install, copy or use the software. * * Intel Open Source License * * Copyright (c) 2002 Intel Corporation * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * Neither the name of the Intel Corporation nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR ITS * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * */ //@author Phil Buonadonna module HalPXA27xGeneralIOM { provides { interface GeneralIO[uint8_t pin]; interface HalPXA27xGpioInterrupt[uint8_t pin]; interface GpioInterrupt[uint8_t pin]; } uses { interface HplPXA27xGPIOPin[uint8_t pin]; } } implementation { async command void GeneralIO.set[uint8_t pin]() { atomic call HplPXA27xGPIOPin.setGPSRbit[pin](); return; } async command void GeneralIO.clr[uint8_t pin]() { atomic call HplPXA27xGPIOPin.setGPCRbit[pin](); return; } async command void GeneralIO.toggle[uint8_t pin]() { atomic { if (call HplPXA27xGPIOPin.getGPLRbit[pin]()) { call HplPXA27xGPIOPin.setGPCRbit[pin](); } else { call HplPXA27xGPIOPin.setGPSRbit[pin](); } } return; } async command bool GeneralIO.get[uint8_t pin]() { bool result; result = call HplPXA27xGPIOPin.getGPLRbit[pin](); return result; } async command void GeneralIO.makeInput[uint8_t pin]() { atomic call HplPXA27xGPIOPin.setGPDRbit[pin](FALSE); return; } async command bool GeneralIO.isInput[uint8_t pin]() { bool result; result = !call HplPXA27xGPIOPin.getGPLRbit[pin](); return result; } async command void GeneralIO.makeOutput[uint8_t pin]() { atomic call HplPXA27xGPIOPin.setGPDRbit[pin](TRUE); return; } async command bool GeneralIO.isOutput[uint8_t pin]() { bool result; result = call HplPXA27xGPIOPin.getGPDRbit[pin](); return result; } async command error_t HalPXA27xGpioInterrupt.enableRisingEdge[uint8_t pin]() { atomic { call HplPXA27xGPIOPin.setGRERbit[pin](TRUE); call HplPXA27xGPIOPin.setGFERbit[pin](FALSE); } return SUCCESS; } async command error_t HalPXA27xGpioInterrupt.enableFallingEdge[uint8_t pin]() { atomic { call HplPXA27xGPIOPin.setGRERbit[pin](FALSE); call HplPXA27xGPIOPin.setGFERbit[pin](TRUE); } return SUCCESS; } async command error_t HalPXA27xGpioInterrupt.enableBothEdge[uint8_t pin]() { atomic { call HplPXA27xGPIOPin.setGRERbit[pin](TRUE); call HplPXA27xGPIOPin.setGFERbit[pin](TRUE); } return SUCCESS; } async command error_t HalPXA27xGpioInterrupt.disable[uint8_t pin]() { atomic { call HplPXA27xGPIOPin.setGRERbit[pin](FALSE); call HplPXA27xGPIOPin.setGFERbit[pin](FALSE); call HplPXA27xGPIOPin.clearGEDRbit[pin](); } return SUCCESS; } async command error_t GpioInterrupt.enableRisingEdge[uint8_t pin]() { return call HalPXA27xGpioInterrupt.enableRisingEdge[pin](); } async command error_t GpioInterrupt.enableFallingEdge[uint8_t pin]() { return call HalPXA27xGpioInterrupt.enableFallingEdge[pin](); } async command error_t GpioInterrupt.disable[uint8_t pin]() { return call HalPXA27xGpioInterrupt.disable[pin](); } async event void HplPXA27xGPIOPin.interruptGPIOPin[uint8_t pin]() { call HplPXA27xGPIOPin.clearGEDRbit[pin](); signal HalPXA27xGpioInterrupt.fired[pin](); signal GpioInterrupt.fired[pin](); return; } default async event void HalPXA27xGpioInterrupt.fired[uint8_t pin]() { return; } default async event void GpioInterrupt.fired[uint8_t pin]() { return; } }
nesC
4
mtaghiza/tinyos-main-1
tos/chips/pxa27x/gpio/HalPXA27xGeneralIOM.nc
[ "BSD-3-Clause" ]
package com.blankj.utilcode.util; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** * <pre> * author: Blankj * blog : http://blankj.com * time : 2019/01/04 * desc : test CacheMemoryStaticUtils * </pre> */ public class CacheMemoryStaticUtilsTest extends BaseTest { private CacheMemoryUtils mCacheMemoryUtils = CacheMemoryUtils.getInstance(3); @Before public void setUp() { for (int i = 0; i < 10; i++) { CacheMemoryStaticUtils.put(String.valueOf(i), i); } for (int i = 0; i < 10; i++) { CacheMemoryStaticUtils.put(String.valueOf(i), i, mCacheMemoryUtils); } } @Test public void get() { for (int i = 0; i < 10; i++) { assertEquals(i, CacheMemoryStaticUtils.get(String.valueOf(i))); } for (int i = 0; i < 10; i++) { if (i < 7) { assertNull(CacheMemoryStaticUtils.get(String.valueOf(i), mCacheMemoryUtils)); } else { assertEquals(i, CacheMemoryStaticUtils.get(String.valueOf(i), mCacheMemoryUtils)); } } } @Test public void getExpired() throws Exception { CacheMemoryStaticUtils.put("10", 10, 2 * CacheMemoryUtils.SEC); assertEquals(10, CacheMemoryStaticUtils.get("10")); Thread.sleep(1500); assertEquals(10, CacheMemoryStaticUtils.get("10")); Thread.sleep(1500); assertNull(CacheMemoryStaticUtils.get("10")); } @Test public void getDefault() { assertNull(CacheMemoryStaticUtils.get("10")); assertEquals("10", CacheMemoryStaticUtils.get("10", "10")); } @Test public void getCacheCount() { assertEquals(10, CacheMemoryStaticUtils.getCacheCount()); assertEquals(3, CacheMemoryStaticUtils.getCacheCount(mCacheMemoryUtils)); } @Test public void remove() { assertEquals(0, CacheMemoryStaticUtils.remove("0")); assertNull(CacheMemoryStaticUtils.get("0")); assertNull(CacheMemoryStaticUtils.remove("0")); } @Test public void clear() { CacheMemoryStaticUtils.clear(); CacheMemoryStaticUtils.clear(mCacheMemoryUtils); for (int i = 0; i < 10; i++) { assertNull(CacheMemoryStaticUtils.get(String.valueOf(i))); } for (int i = 0; i < 10; i++) { assertNull(CacheMemoryStaticUtils.get(String.valueOf(i), mCacheMemoryUtils)); } assertEquals(0, CacheMemoryStaticUtils.getCacheCount()); assertEquals(0, CacheMemoryStaticUtils.getCacheCount(mCacheMemoryUtils)); } }
Java
4
YashBangera7/AndroidUtilCode
lib/utilcode/src/test/java/com/blankj/utilcode/util/CacheMemoryStaticUtilsTest.java
[ "Apache-2.0" ]
const std = @import("std"); const testing = std.testing; export fn add(a: i32, b: i32) i32 { return a + b; } test "basic add functionality" { try testing.expect(add(3, 7) == 10); }
Zig
4
lukekras/zig
lib/std/special/init-lib/src/main.zig
[ "MIT" ]
/** * An testing lexical grammar. */ module.exports = { "macros": { "id": "[a-zA-Z0-9_]", }, "startConditions": { "comment": 1, // exclusive }, "rules": [ [["*"], "\\s+", "/*skip whitespace*/"], [["*"], "<<EOF>>", "return 'EOF'"], ["\\d+", "return 'NUMBER'"], ["{id}+", "return 'IDENTIFIER'"], ["\\(", "return '('"], ["\\)", "return ')'"], ["\\+", "return '+'"], ["\\*", "return '*'"], ["\\/\\*", "this.pushState('comment');"], [["comment"], "\\*+\\/", "this.popState();"], [["comment"], "\\d+", "return 'NUMBER_IN_COMMENT'"], [["comment"], "{id}+", "return 'IDENTIFIER_IN_COMMENT'"], ], "options": { "case-insensitive": true, }, };
Lex
5
ruby-on-rust/syntax
src/grammar/__tests__/calc.lex
[ "MIT" ]
module Core impl string { fn len () extern("opal.str") "string.len" -> int fn add (x : string) extern("opal.str") "string.add" -> string fn get (i : int) extern("opal.str") "string.get" -> char fn cmp (x : string) extern("opal.str") "string.cmp" -> int fn equal (x : string) extern("opal.str") "string.equal" -> bool fn find (x : string) extern("opal.str") "string.find" -> int fn slice (start : int, end : int) extern("opal.str") "string.slice" -> string fn to_int () extern("opal.str") "string.to_int" -> int fn to_long () extern("opal.str") "string.to_long" -> long fn to_real () extern("opal.str") "string.to_real" -> real fn empty? () { self.len() == 0 } fn slice_from (n : int) { self.slice(n, self.len()) } fn slice_to (n : int) { self.slice(0, n) } fn mul (n : int) { if n <= 0 { "" } else if n == 1 { self } else { let res = ""; while n > 0 { res = res + self; n = n.pred(); } res } } fn str () { self } } impl int { fn str () extern("opal.str") "int.str" -> string } impl real { fn str () extern("opal.str") "real.str" -> string } impl long { fn str () extern("opal.str") "long.str" -> string } impl char { fn str () { string_of(self, 1) }} impl bool { fn str () { if self { "true" } else { "false" } }} impl unit { fn str () { "()" }} // TODO: tuple(n).str pub fn string_of (c : char, n : int) extern("opal.str") "string_of" -> string impl char { fn space? () extern("opal.str") "char.space?" -> bool fn digit? () extern("opal.str") "char.digit?" -> bool fn alpha? () extern("opal.str") "char.alpha?" -> bool fn ident? () extern("opal.str") "char.ident?" -> bool // TODO: add these // fn upper () extern("opal.str") "char.upper" -> char // fn lower () extern("opal.str") "char.loewr" -> char fn add (n : int) { (self.to_int() + n).to_char() } fn sub (c : char) { self.to_int() - c.to_int() } fn mul (n : int) { string_of(self, n) } }
Opal
5
iitalics/Opal
opal_libs/Core/string.opal
[ "MIT" ]
mask_tests! { masksizex2, 2 } mask_tests! { masksizex4, 4 } mask_tests! { masksizex8, 8 }
Rust
2
ohno418/rust
library/portable-simd/crates/core_simd/tests/mask_ops_impl/masksize.rs
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
/home/spinalvm/hdl/riscv-compliance/work//I-RF_width-01.elf: file format elf32-littleriscv Disassembly of section .text.init: 80000000 <_start>: 80000000: 00001817 auipc a6,0x1 80000004: 00080813 mv a6,a6 80000008: 00100013 li zero,1 8000000c: 00100093 li ra,1 80000010: 00100113 li sp,1 80000014: 00100193 li gp,1 80000018: 00100213 li tp,1 8000001c: 00100293 li t0,1 80000020: 00100313 li t1,1 80000024: 00100393 li t2,1 80000028: 00100413 li s0,1 8000002c: 00100493 li s1,1 80000030: 00100513 li a0,1 80000034: 00100593 li a1,1 80000038: 00100613 li a2,1 8000003c: 00100693 li a3,1 80000040: 00100713 li a4,1 80000044: 00100793 li a5,1 80000048: 01f09093 slli ra,ra,0x1f 8000004c: 0000c463 bltz ra,80000054 <_start+0x54> 80000050: 00000093 li ra,0 80000054: 01f11113 slli sp,sp,0x1f 80000058: 00014463 bltz sp,80000060 <_start+0x60> 8000005c: 00000113 li sp,0 80000060: 01f19193 slli gp,gp,0x1f 80000064: 0001c463 bltz gp,8000006c <_start+0x6c> 80000068: 00000193 li gp,0 8000006c: 01f21213 slli tp,tp,0x1f 80000070: 00024463 bltz tp,80000078 <_start+0x78> 80000074: 00000213 li tp,0 80000078: 01f29293 slli t0,t0,0x1f 8000007c: 0002c463 bltz t0,80000084 <_start+0x84> 80000080: 00000293 li t0,0 80000084: 01f31313 slli t1,t1,0x1f 80000088: 00034463 bltz t1,80000090 <_start+0x90> 8000008c: 00000313 li t1,0 80000090: 01f39393 slli t2,t2,0x1f 80000094: 0003c463 bltz t2,8000009c <_start+0x9c> 80000098: 00000393 li t2,0 8000009c: 01f41413 slli s0,s0,0x1f 800000a0: 00044463 bltz s0,800000a8 <_start+0xa8> 800000a4: 00000413 li s0,0 800000a8: 01f49493 slli s1,s1,0x1f 800000ac: 0004c463 bltz s1,800000b4 <_start+0xb4> 800000b0: 00000493 li s1,0 800000b4: 01f51513 slli a0,a0,0x1f 800000b8: 00054463 bltz a0,800000c0 <_start+0xc0> 800000bc: 00000513 li a0,0 800000c0: 01f59593 slli a1,a1,0x1f 800000c4: 0005c463 bltz a1,800000cc <_start+0xcc> 800000c8: 00000593 li a1,0 800000cc: 01f61613 slli a2,a2,0x1f 800000d0: 00064463 bltz a2,800000d8 <_start+0xd8> 800000d4: 00000613 li a2,0 800000d8: 01f69693 slli a3,a3,0x1f 800000dc: 0006c463 bltz a3,800000e4 <_start+0xe4> 800000e0: 00000693 li a3,0 800000e4: 01f71713 slli a4,a4,0x1f 800000e8: 00074463 bltz a4,800000f0 <_start+0xf0> 800000ec: 00000713 li a4,0 800000f0: 01f79793 slli a5,a5,0x1f 800000f4: 0007c463 bltz a5,800000fc <_start+0xfc> 800000f8: 00000793 li a5,0 800000fc: 00082023 sw zero,0(a6) # 80001000 <codasip_signature_start> 80000100: 00182223 sw ra,4(a6) 80000104: 00282423 sw sp,8(a6) 80000108: 00382623 sw gp,12(a6) 8000010c: 00482823 sw tp,16(a6) 80000110: 00582a23 sw t0,20(a6) 80000114: 00682c23 sw t1,24(a6) 80000118: 00782e23 sw t2,28(a6) 8000011c: 02882023 sw s0,32(a6) 80000120: 02982223 sw s1,36(a6) 80000124: 02a82423 sw a0,40(a6) 80000128: 02b82623 sw a1,44(a6) 8000012c: 02c82823 sw a2,48(a6) 80000130: 02d82a23 sw a3,52(a6) 80000134: 02e82c23 sw a4,56(a6) 80000138: 02f82e23 sw a5,60(a6) 8000013c: 00001097 auipc ra,0x1 80000140: f0408093 addi ra,ra,-252 # 80001040 <test_A2_res> 80000144: 00100813 li a6,1 80000148: 00100893 li a7,1 8000014c: 00100913 li s2,1 80000150: 00100993 li s3,1 80000154: 00100a13 li s4,1 80000158: 00100a93 li s5,1 8000015c: 00100b13 li s6,1 80000160: 00100b93 li s7,1 80000164: 00100c13 li s8,1 80000168: 00100c93 li s9,1 8000016c: 00100d13 li s10,1 80000170: 00100d93 li s11,1 80000174: 00100e13 li t3,1 80000178: 00100e93 li t4,1 8000017c: 00100f13 li t5,1 80000180: 00100f93 li t6,1 80000184: 01f81813 slli a6,a6,0x1f 80000188: 00084463 bltz a6,80000190 <_start+0x190> 8000018c: 00000813 li a6,0 80000190: 01f89893 slli a7,a7,0x1f 80000194: 0008c463 bltz a7,8000019c <_start+0x19c> 80000198: 00000893 li a7,0 8000019c: 01f91913 slli s2,s2,0x1f 800001a0: 00094463 bltz s2,800001a8 <_start+0x1a8> 800001a4: 00000913 li s2,0 800001a8: 01f99993 slli s3,s3,0x1f 800001ac: 0009c463 bltz s3,800001b4 <_start+0x1b4> 800001b0: 00000993 li s3,0 800001b4: 01fa1a13 slli s4,s4,0x1f 800001b8: 000a4463 bltz s4,800001c0 <_start+0x1c0> 800001bc: 00000a13 li s4,0 800001c0: 01fa9a93 slli s5,s5,0x1f 800001c4: 000ac463 bltz s5,800001cc <_start+0x1cc> 800001c8: 00000a93 li s5,0 800001cc: 01fb1b13 slli s6,s6,0x1f 800001d0: 000b4463 bltz s6,800001d8 <_start+0x1d8> 800001d4: 00000b13 li s6,0 800001d8: 01fb9b93 slli s7,s7,0x1f 800001dc: 000bc463 bltz s7,800001e4 <_start+0x1e4> 800001e0: 00000b93 li s7,0 800001e4: 01fc1c13 slli s8,s8,0x1f 800001e8: 000c4463 bltz s8,800001f0 <_start+0x1f0> 800001ec: 00000c13 li s8,0 800001f0: 01fc9c93 slli s9,s9,0x1f 800001f4: 000cc463 bltz s9,800001fc <_start+0x1fc> 800001f8: 00000c93 li s9,0 800001fc: 01fd1d13 slli s10,s10,0x1f 80000200: 000d4463 bltz s10,80000208 <_start+0x208> 80000204: 00000d13 li s10,0 80000208: 01fd9d93 slli s11,s11,0x1f 8000020c: 000dc463 bltz s11,80000214 <_start+0x214> 80000210: 00000d93 li s11,0 80000214: 01fe1e13 slli t3,t3,0x1f 80000218: 000e4463 bltz t3,80000220 <_start+0x220> 8000021c: 00000e13 li t3,0 80000220: 01fe9e93 slli t4,t4,0x1f 80000224: 000ec463 bltz t4,8000022c <_start+0x22c> 80000228: 00000e93 li t4,0 8000022c: 01ff1f13 slli t5,t5,0x1f 80000230: 000f4463 bltz t5,80000238 <_start+0x238> 80000234: 00000f13 li t5,0 80000238: 01ff9f93 slli t6,t6,0x1f 8000023c: 000fc463 bltz t6,80000244 <_start+0x244> 80000240: 00000f93 li t6,0 80000244: 0100a023 sw a6,0(ra) 80000248: 0110a223 sw a7,4(ra) 8000024c: 0120a423 sw s2,8(ra) 80000250: 0130a623 sw s3,12(ra) 80000254: 0140a823 sw s4,16(ra) 80000258: 0150aa23 sw s5,20(ra) 8000025c: 0160ac23 sw s6,24(ra) 80000260: 0170ae23 sw s7,28(ra) 80000264: 0380a023 sw s8,32(ra) 80000268: 0390a223 sw s9,36(ra) 8000026c: 03a0a423 sw s10,40(ra) 80000270: 03b0a623 sw s11,44(ra) 80000274: 03c0a823 sw t3,48(ra) 80000278: 03d0aa23 sw t4,52(ra) 8000027c: 03e0ac23 sw t5,56(ra) 80000280: 03f0ae23 sw t6,60(ra) 80000284: 00001517 auipc a0,0x1 80000288: d7c50513 addi a0,a0,-644 # 80001000 <codasip_signature_start> 8000028c: 00001597 auipc a1,0x1 80000290: df458593 addi a1,a1,-524 # 80001080 <_end> 80000294: f0100637 lui a2,0xf0100 80000298: f2c60613 addi a2,a2,-212 # f00fff2c <_end+0x700feeac> 8000029c <complience_halt_loop>: 8000029c: 02b50663 beq a0,a1,800002c8 <complience_halt_break> 800002a0: 00c52683 lw a3,12(a0) 800002a4: 00d62023 sw a3,0(a2) 800002a8: 00852683 lw a3,8(a0) 800002ac: 00d62023 sw a3,0(a2) 800002b0: 00452683 lw a3,4(a0) 800002b4: 00d62023 sw a3,0(a2) 800002b8: 00052683 lw a3,0(a0) 800002bc: 00d62023 sw a3,0(a2) 800002c0: 01050513 addi a0,a0,16 800002c4: fd9ff06f j 8000029c <complience_halt_loop> 800002c8 <complience_halt_break>: 800002c8: f0100537 lui a0,0xf0100 800002cc: f2050513 addi a0,a0,-224 # f00fff20 <_end+0x700feea0> 800002d0: 00052023 sw zero,0(a0) Disassembly of section .data: 80001000 <codasip_signature_start>: 80001000: ffff 0xffff 80001002: ffff 0xffff 80001004: ffff 0xffff 80001006: ffff 0xffff 80001008: ffff 0xffff 8000100a: ffff 0xffff 8000100c: ffff 0xffff 8000100e: ffff 0xffff 80001010: ffff 0xffff 80001012: ffff 0xffff 80001014: ffff 0xffff 80001016: ffff 0xffff 80001018: ffff 0xffff 8000101a: ffff 0xffff 8000101c: ffff 0xffff 8000101e: ffff 0xffff 80001020: ffff 0xffff 80001022: ffff 0xffff 80001024: ffff 0xffff 80001026: ffff 0xffff 80001028: ffff 0xffff 8000102a: ffff 0xffff 8000102c: ffff 0xffff 8000102e: ffff 0xffff 80001030: ffff 0xffff 80001032: ffff 0xffff 80001034: ffff 0xffff 80001036: ffff 0xffff 80001038: ffff 0xffff 8000103a: ffff 0xffff 8000103c: ffff 0xffff 8000103e: ffff 0xffff 80001040 <test_A2_res>: 80001040: ffff 0xffff 80001042: ffff 0xffff 80001044: ffff 0xffff 80001046: ffff 0xffff 80001048: ffff 0xffff 8000104a: ffff 0xffff 8000104c: ffff 0xffff 8000104e: ffff 0xffff 80001050: ffff 0xffff 80001052: ffff 0xffff 80001054: ffff 0xffff 80001056: ffff 0xffff 80001058: ffff 0xffff 8000105a: ffff 0xffff 8000105c: ffff 0xffff 8000105e: ffff 0xffff 80001060: ffff 0xffff 80001062: ffff 0xffff 80001064: ffff 0xffff 80001066: ffff 0xffff 80001068: ffff 0xffff 8000106a: ffff 0xffff 8000106c: ffff 0xffff 8000106e: ffff 0xffff 80001070: ffff 0xffff 80001072: ffff 0xffff 80001074: ffff 0xffff 80001076: ffff 0xffff 80001078: ffff 0xffff 8000107a: ffff 0xffff 8000107c: ffff 0xffff 8000107e: ffff 0xffff
ObjDump
3
cbrune/VexRiscv
src/test/resources/asm/I-RF_width-01.elf.objdump
[ "MIT" ]
== Words == == Links == == Images == photo.jpg 1
Creole
0
jquorning/ada-wiki
regtests/expect/wiki-collect/img.creole
[ "Apache-2.0" ]
DOUBLE PRECISION FUNCTION rcomp(a,x) C ------------------- C EVALUATION OF EXP(-X)*X**A/GAMMA(A) C ------------------- C RT2PIN = 1/SQRT(2*PI) C ------------------- C .. Scalar Arguments .. DOUBLE PRECISION a,x C .. C .. Local Scalars .. DOUBLE PRECISION rt2pin,t,t1,u C .. C .. External Functions .. DOUBLE PRECISION gam1,gamma,rlog EXTERNAL gam1,gamma,rlog C .. C .. Intrinsic Functions .. INTRINSIC dlog,exp,sqrt C .. C .. Data statements .. DATA rt2pin/.398942280401433D0/ C .. C .. Executable Statements .. C ------------------- rcomp = 0.0D0 IF (a.GE.20.0D0) GO TO 20 t = a*dlog(x) - x IF (a.GE.1.0D0) GO TO 10 rcomp = (a*exp(t))* (1.0D0+gam1(a)) RETURN 10 rcomp = exp(t)/gamma(a) RETURN C 20 u = x/a IF (u.EQ.0.0D0) RETURN t = (1.0D0/a)**2 t1 = (((0.75D0*t-1.0D0)*t+3.5D0)*t-105.0D0)/ (a*1260.0D0) t1 = t1 - a*rlog(u) rcomp = rt2pin*sqrt(a)*exp(t1) RETURN END
FORTRAN
4
Ennosigaeon/scipy
scipy/special/cdflib/rcomp.f
[ "BSD-3-Clause" ]
[Desktop Entry] Type=Application Name=xxh Exec=xxh Comment=xxh on Python {{ python-fullversion }} Icon=python Categories=System; Terminal=true
desktop
1
SBado/xxh
appimage/xxh.desktop
[ "BSD-2-Clause" ]
insert into nested (a) values (1), (10), (100), (1000);
SQL
1
cuishuang/tidb
br/tests/lightning_generated_columns/data/gencol.nested.0.sql
[ "Apache-2.0" ]
# Pass all login requests straight through if (req.url ~ "wp-login") { return (pass); } # Pipe all admin requests directly if (req.url ~ "wp-admin") { return (pipe); } # Pass all requests containing a wp- or wordpress_ cookie # (meaning NO caching for logged in users) if (req.http.Cookie ~ "(^|;\s*)(wp-|wordpress_)") { return (pass); } # Drop *all* cookies sent to Wordpress, if we've gotten this far unset req.http.Cookie; # Try a cache-lookup return (lookup);
VCL
4
ColleenKeegan/genesis-wordpress
provisioning/roles/varnish/files/etc-varnish/conf.d/receive/wordpress.vcl
[ "MIT" ]
#lang scribble/manual @(require scriblib/autobib scribble/core (only-in racket match)) @(provide (all-defined-out)) @(define-cite ~cite citet generate-bibliography #:style number-style) @(abbreviate-given-names #t) @(define rosette:onward13 (make-bib #:title @hyperlink["http://homes.cs.washington.edu/~emina/pubs/rosette.onward13.pdf"]{Growing Solver-Aided Languages with Rosette} #:author (authors "Emina Torlak" "Rastislav Bodik") #:date 2013 #:location "New Ideas, New Paradigms, and Reflections on Programming and Software (Onward!)")) @(define rosette:pldi14 (make-bib #:title @hyperlink["http://homes.cs.washington.edu/~emina/pubs/rosette.pldi14.pdf"]{A Lightweight Symbolic Virtual Machine for Solver-Aided Host Languages} #:author (authors "Emina Torlak" "Rastislav Bodik") #:date 2014 #:location "Programming Language Design and Implementation (PLDI)")) @(define sympro:oopsla18 (make-bib #:title @hyperlink["https://unsat.cs.washington.edu/papers/bornholt-sympro.pdf"]{Finding Code That Explodes Under Symbolic Evaluation} #:author (authors "James Bornholt" "Emina Torlak") #:date 2018 #:location "Object Oriented Programming, Systems, Languages, and Applications (OOPSLA)"))
Racket
4
remysucre/rosette
rosette/guide/scribble/refs.scrbl
[ "BSD-2-Clause" ]
\documentclass{article} \begin{document} \section{Sec} \subsection{Ssec} \paragraph{Par} \section*{Sec} \subsection*{Ssec} \paragraph*{Par} %\section{Sec} \subsection{Ssec} \paragraph{Par} \section*{Sec2} \subsection*{Ssec2} \end{document}
TeX
1
IngoMeyer441/vimtex
test/test-toc/test-starred.tex
[ "MIT" ]
// Convenience script to bump all @babel dependencies of all packages to the latest version const fs = require(`fs`) const execa = require(`execa`) const packages = fs.readdirSync(`./packages`) const versions = {} function getLatestMinor(pkg) { let version if (!versions[pkg]) { version = execa.sync(`npm`, [`show`, pkg, `version`]).stdout // e.g. 7.14.5 -> 7.14.0 const parts = version.split(`.`) parts[parts.length - 1] = 0 version = parts.join(`.`) versions[pkg] = version console.log(`latest ${pkg} minor: `, version) } else { version = versions[pkg] } return version } function replace(deps, library) { if (deps && deps[library]) { deps[library] = `^` + getLatestMinor(library) } } packages.forEach(packageName => { const path = `${process.cwd()}/packages/${packageName}/package.json` fs.readFile(path, (err, json) => { if (err) return const pkg = JSON.parse(json) Object.keys(pkg.dependencies || {}).forEach(dep => { if (dep.startsWith(`@babel/`)) { replace(pkg.dependencies, dep) } }) Object.keys(pkg.devDependencies || {}).forEach(dep => { if (dep.startsWith(`@babel/`)) { replace(pkg.devDependencies, dep) } }) console.log(`updating ${path}`) fs.writeFileSync(path, JSON.stringify(pkg, null, 2) + `\n`) }) })
JavaScript
4
waltercruz/gatsby
scripts/upgrade-babel.js
[ "MIT" ]
CREATE TABLE hdb_catalog.hdb_cron_triggers ( name TEXT PRIMARY KEY, webhook_conf JSON NOT NULL, cron_schedule TEXT NOT NULL, payload JSON, retry_conf JSON, header_conf JSON, include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE, comment TEXT ); CREATE TABLE hdb_catalog.hdb_cron_events ( id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, trigger_name TEXT NOT NULL, scheduled_time TIMESTAMPTZ NOT NULL, additional_payload JSON, status TEXT NOT NULL DEFAULT 'scheduled', tries INTEGER NOT NULL DEFAULT 0, created_at TIMESTAMP DEFAULT NOW(), next_retry_at TIMESTAMPTZ, FOREIGN KEY (trigger_name) REFERENCES hdb_catalog.hdb_cron_triggers(name) ON UPDATE CASCADE ON DELETE CASCADE, CONSTRAINT valid_status CHECK (status IN ('scheduled','locked','delivered','error','dead')) ); CREATE INDEX hdb_cron_event_status ON hdb_catalog.hdb_cron_events (status); CREATE TABLE hdb_catalog.hdb_cron_event_invocation_logs ( id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, event_id TEXT, status INTEGER, request JSON, response JSON, created_at TIMESTAMP DEFAULT NOW(), FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_cron_events (id) ON UPDATE CASCADE ON DELETE CASCADE ); CREATE VIEW hdb_catalog.hdb_cron_events_stats AS SELECT ct.name, COALESCE(ce.upcoming_events_count,0) as upcoming_events_count, COALESCE(ce.max_scheduled_time, now()) as max_scheduled_time FROM hdb_catalog.hdb_cron_triggers ct LEFT JOIN ( SELECT trigger_name, count(*) as upcoming_events_count, max(scheduled_time) as max_scheduled_time FROM hdb_catalog.hdb_cron_events WHERE tries = 0 AND status = 'scheduled' GROUP BY trigger_name ) ce ON ct.name = ce.trigger_name; CREATE TABLE hdb_catalog.hdb_scheduled_events ( id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, webhook_conf JSON NOT NULL, scheduled_time TIMESTAMPTZ NOT NULL, retry_conf JSON, payload JSON, header_conf JSON, status TEXT NOT NULL DEFAULT 'scheduled', tries INTEGER NOT NULL DEFAULT 0, created_at TIMESTAMP DEFAULT NOW(), next_retry_at TIMESTAMPTZ, comment TEXT, CONSTRAINT valid_status CHECK (status IN ('scheduled','locked','delivered','error','dead')) ); CREATE INDEX hdb_scheduled_event_status ON hdb_catalog.hdb_scheduled_events (status); CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs ( id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, event_id TEXT, status INTEGER, request JSON, response JSON, created_at TIMESTAMP DEFAULT NOW(), FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) ON DELETE CASCADE ON UPDATE CASCADE );
SQL
3
gh-oss-contributor/graphql-engine-1
server/src-rsr/migrations/34_to_35.sql
[ "Apache-2.0", "MIT" ]
dir_guard = @mkdir -p $(@D) FIND := find CXX := g++ CXXFLAGS += -Wall -O3 -std=c++11 LDFLAGS += -lm UNAME := $(shell uname) CUDA_HOME := /usr/local/cuda NVCC := $(CUDA_HOME)/bin/nvcc USE_GPU = 1 ifeq ($(UNAME), Darwin) USE_GPU = 0 FOMP := else LDFLAGS += -fopenmp FOMP := -fopenmp endif ifeq ($(USE_GPU), 1) NVCCFLAGS += --default-stream per-thread LDFLAGS += -L$(CUDA_HOME)/lib64 -lcudart -lcublas -lcurand endif CUDA_ARCH := -gencode arch=compute_60,code=sm_60 -gencode arch=compute_70,code=sm_70 build_root = build ifeq ($(USE_GPU), 1) include_dirs = ./include $(CUDA_HOME)/include else include_dirs = ./include endif CXXFLAGS += $(addprefix -I,$(include_dirs)) -Wno-unused-local-typedef CXXFLAGS += -fPIC cpp_files = $(shell $(FIND) src/lib -name "*.cpp" -print | rev | cut -d"/" -f1 | rev) cxx_obj_files = $(subst .cpp,.o,$(cpp_files)) obj_build_root = $(build_root)/objs objs = $(addprefix $(obj_build_root)/cxx/,$(cxx_obj_files)) ifeq ($(USE_GPU), 1) CXXFLAGS += -DUSE_GPU NVCCFLAGS += -DUSE_GPU NVCCFLAGS += $(addprefix -I,$(include_dirs)) NVCCFLAGS += -std=c++11 --use_fast_math --compiler-options '-fPIC' cu_files = $(shell $(FIND) src/lib -name "*.cu" -printf "%P\n") cu_obj_files = $(subst .cu,.o,$(cu_files)) objs += $(addprefix $(obj_build_root)/cuda/,$(cu_obj_files)) endif DEPS = $(objs:.o=.d) target = $(build_root)/dll/libtree.so target_dep = $(addsuffix .d,$(target)) .PRECIOUS: $(build_root)/lib/%.o all: $(target) $(target) : src/tree_main.cpp $(objs) $(dir_guard) $(CXX) -shared $(CXXFLAGS) -MMD -o $@ $(filter %.cpp %.o, $^) $(LDFLAGS) DEPS += $(target_dep) ifeq ($(USE_GPU), 1) $(obj_build_root)/cuda/%.o: src/lib/%.cu $(dir_guard) $(NVCC) $(NVCCFLAGS) $(CUDA_ARCH) -M $< -o ${@:.o=.d} -odir $(@D) $(NVCC) $(NVCCFLAGS) $(CUDA_ARCH) -c $< -o $@ endif $(obj_build_root)/cxx/%.o: src/lib/%.cpp $(dir_guard) $(CXX) $(CXXFLAGS) -MMD -c -o $@ $(filter %.cpp, $^) $(FOMP) clean: rm -rf $(build_root) -include $(DEPS)
Makefile
4
deepneuralmachine/google-research
bigg/bigg/model/tree_clib/Makefile
[ "Apache-2.0" ]
[Desktop Entry] Name=DevilutionX Hellfire Comment=Diablo 1: Hellfire for GKD350h Exec=devilutionx Terminal=false Type=Application StartupNotify=true Icon=hellfire_32 Categories=games; X-OD-Manual=readme.gcw0.txt X-OD-NeedsDownscaling=true
desktop
1
mewpull/devilutionX
Packaging/OpenDingux/gkd350h-hellfire.desktop
[ "Unlicense" ]
#lang scribble/manual @title[#:style '(unnumbered)]{Example} Content. @section[#:style '(toc unnumbered)]{X} @subsection[#:style '(toc-hidden unnumbered)]{A}
Racket
3
cwebber/scribble
scribble-test/tests/scribble/docs/empty-onthispage.scrbl
[ "Apache-2.0", "MIT" ]
/* * * Created: Jul 30, 2015 * File name: test_cbuff.c * Description: * <INSERT DESCRIPTION HERE> */ #include "../data_structs/circular_buffer/circular_buffer.h" #include "../utils/util.h" #include <stdio.h> //Check that basic construction and destruction works static ch_word test1() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); cbuff_delete(cb1); return result; } //Add a single value to the structure static ch_word test2() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); i64 val = 52; i64* val_p = cbuff_push_back(cb1, &val); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == val); cbuff_delete(cb1); return result; } //Test the peeking works as expected static ch_word test3() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); i64 val = 52; i64* val_p = cbuff_push_back(cb1, &val); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == val); val_p = cbuff_peek_front(cb1); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == val); cbuff_delete(cb1); return result; } //Test the popping works as expected. Make sure that push pop push works static ch_word test4() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); i64 val = 52; i64* val_p = cbuff_push_back(cb1, &val); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == val); cbuff_pop_front(cb1); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(*val_p == val); i64* val_p2 = cbuff_push_back(cb1, &val); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(*val_p == val); CH_ASSERT(*val_p2 == val); cbuff_delete(cb1); return result; } //Test pushing a small array static ch_word test5() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 2; i64 vals[2] = {52,53}; i64* val_p = cbuff_push_back_carray(cb1, vals,&len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 2); cbuff_delete(cb1); return result; } //Test pushing a large array static ch_word test6() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(4,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 4); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 6; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals,&len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 4); CH_ASSERT(cb1->size == 4); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 4); cbuff_delete(cb1); return result; } //Test pushing a large array and then popping everything static ch_word test7() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(4,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 4); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 6; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals,&len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 4); CH_ASSERT(cb1->size == 4); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 4); for(int i = 0; i < len; i++){ cbuff_pop_front(cb1); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == len - 1 - i); CH_ASSERT(cb1->size == 4 ); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); const ch_word release_idx = (i + 1) % cb1->size; CH_ASSERT(cb1->_release_index == release_idx); } cbuff_delete(cb1); return result; } //Test pushing a small array and then using 1 thing static ch_word test8() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 2; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 2); i64* used = cbuff_use_front(cb1); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 1); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[0]); cbuff_delete(cb1); return result; } //Test pushing a small array and then using all things static ch_word test9() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 2; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 2); int i = 0; i64* used = cbuff_use_front(cb1); for(; used; used = cbuff_use_front(cb1), i++){ CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == i + 1); CH_ASSERT(cb1->_add_next_index == 2); const ch_word use_next = (i + 1) % cb1->size; CH_ASSERT(cb1->_use_next_index == use_next ); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[i]); } cbuff_delete(cb1); return result; } //Test pushing a small array and then using all things, then releasing static ch_word test10() { //Construct and check the basic conditions ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); //Now push a small array on ch_word len = 2; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 2); //Now use everything int i = 0; i64* used = cbuff_use_front(cb1); ch_word use_next = 0; for(; used; used = cbuff_use_front(cb1), i++){ CH_ASSERT(cb1->count == len); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == i + 1); CH_ASSERT(cb1->_add_next_index == len); use_next = (i + 1) % cb1->size; CH_ASSERT(cb1->_use_next_index == use_next ); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[i]); } CH_ASSERT( i = 1); CH_ASSERT(used == NULL); //Now pop everything off for(int i = 0; i < len; i++){ cbuff_pop_front(cb1); CH_ASSERT(cb1->count == len - 1 - i); CH_ASSERT(cb1->size == 5 ); CH_ASSERT(cb1->in_use == len - i - 1); CH_ASSERT(cb1->_add_next_index == len); CH_ASSERT(cb1->_use_next_index == use_next); const ch_word release_idx = (i + 1) % cb1->size; CH_ASSERT(cb1->_release_index == release_idx); } used = cbuff_use_front(cb1); CH_ASSERT(used == NULL); cbuff_delete(cb1); return result; } //Test pushing a bigger array and then usse some things, release and try to push and use more static ch_word test11() { //Construct and check the basic conditions ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); //Now push a small array on ch_word len = 3; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->count == 3); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 3); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 3); i64* used = cbuff_use_front(cb1); CH_ASSERT(used != NULL); CH_ASSERT(cb1->count == 3); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 3); CH_ASSERT(cb1->_use_next_index == 1 ); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[0]); cbuff_pop_front(cb1); CH_ASSERT(cb1->count == 2); CH_ASSERT(cb1->size == 5 ); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 3); CH_ASSERT(cb1->_use_next_index == 1); CH_ASSERT(cb1->_release_index == 1); cbuff_delete(cb1); return result; } //Test pushing a bigger array and then usse some things, release and try to push and use more static ch_word test12() { //Construct and check the basic conditions ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); //Now push a small array on ch_word len = 1; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 1); //Use the first item i64* used = cbuff_use_front(cb1); CH_ASSERT(used != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 1 ); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[0]); //Then unuse it cbuff_unuse_front(cb1); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0 ); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*used == vals[0]); //Then use it again used = cbuff_use_front(cb1); CH_ASSERT(*used == vals[0]); CH_ASSERT(used != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 1 ); CH_ASSERT(cb1->_release_index == 0); //Now use another item used = cbuff_use_front(cb1); CH_ASSERT(used == NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 1 ); CH_ASSERT(cb1->_release_index == 0); //Now we pop what we have used cbuff_pop_front(cb1); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5 ); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 1); CH_ASSERT(cb1->_release_index == 1); //Now we add some more len = 6; val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(len == 5); CH_ASSERT(cb1->count == 5); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 1); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(*val_p == vals[0]); //And try to use a value used = cbuff_use_front(cb1); CH_ASSERT(*used == vals[0]); CH_ASSERT(used != NULL); CH_ASSERT(cb1->count == 5); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 2 ); CH_ASSERT(cb1->_release_index == 1); cbuff_delete(cb1); return result; } //Test pushing a bigger array and then usse some things, release and try to push and use more static ch_word test13() { //Construct and check the basic conditions ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); //Now push a small array on ch_word len = 1; i64 vals[6] = {52,53,54,55,56,57}; i64* val_p = cbuff_push_back_carray(cb1, vals, &len); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val_p == vals[0]); CH_ASSERT(len == 1); //Pop that item off cbuff_pop_front(cb1); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); //Try to use an item i64* used = cbuff_use_front(cb1); CH_ASSERT(used == NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); i64 val2 = 8923; i64* val2_p = cbuff_push_back(cb1, &val2); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(*val2_p == val2); CH_ASSERT(len == 1); //Try to use an item used = cbuff_use_front(cb1); CH_ASSERT(used != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 1); CH_ASSERT(cb1->_add_next_index == 2); CH_ASSERT(cb1->_use_next_index == 2); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(*used = val2); cbuff_delete(cb1); return result; } //Test pushing a small array and then popping, then pushing big array and checking static ch_word test14() { ch_word result = 1; ch_cbuff_t* cb1 = ch_cbuff_new(5,sizeof(i64)); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 0); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); ch_word len = 1; i64 vals1[6] = {52,53,54,55,56,57}; i64* val1_p = cbuff_push_back_carray(cb1, vals1, &len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 1); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 0); CH_ASSERT(*val1_p == vals1[0]); CH_ASSERT(len == 1); cbuff_pop_front(cb1); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 0); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); len = 5; i64 vals2[6] = {2,3,4,5,1}; i64* val2_p = cbuff_push_back_carray(cb1, vals2, &len); CH_ASSERT(cb1->_array != NULL); CH_ASSERT(cb1->count == 5); CH_ASSERT(cb1->size == 5); CH_ASSERT(cb1->in_use == 0); CH_ASSERT(cb1->_add_next_index == 1); CH_ASSERT(cb1->_use_next_index == 0); CH_ASSERT(cb1->_release_index == 1); CH_ASSERT(len == 5); CH_ASSERT(*val2_p == vals2[0]); CH_ASSERT( ((i64*)(cb1->_array->first))[0] == vals2[4]); CH_ASSERT( ((i64*)(cb1->_array->first))[1] == vals2[0]); CH_ASSERT( ((i64*)(cb1->_array->first))[2] == vals2[1]); CH_ASSERT( ((i64*)(cb1->_array->first))[3] == vals2[2]); CH_ASSERT( ((i64*)(cb1->_array->first))[4] == vals2[3]); cbuff_delete(cb1); return result; } int main(int argc, char** argv) { (void)argc; (void)argv; ch_word test_pass = 0; printf("CH Data Structures: Circular Queue Test 01: "); printf("%s", (test_pass = test1()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 02: "); printf("%s", (test_pass = test2()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 03: "); printf("%s", (test_pass = test3()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 04: "); printf("%s", (test_pass = test4()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 05: "); printf("%s", (test_pass = test5()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 06: "); printf("%s", (test_pass = test6()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 07: "); printf("%s", (test_pass = test7()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 08: "); printf("%s", (test_pass = test8()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 09: "); printf("%s", (test_pass = test9()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 10: "); printf("%s", (test_pass = test10()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 11: "); printf("%s", (test_pass = test11()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 12: "); printf("%s", (test_pass = test12()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 13: "); printf("%s", (test_pass = test13()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; printf("CH Data Structures: Circular Queue Test 14: "); printf("%s", (test_pass = test14()) ? "PASS\n" : "FAIL\n"); if(!test_pass) return 1; return 0; }
XC
4
mgrosvenor/libchaste
tests/test_cbuff.xc
[ "BSD-3-Clause" ]
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ #include <react/renderer/components/root/RootComponentDescriptor.h> #include <react/renderer/core/PropsParserContext.h> #include <react/renderer/element/ComponentBuilder.h> #include <gtest/gtest.h> #include <react/renderer/element/Element.h> #include <react/renderer/element/testUtils.h> namespace facebook::react { TEST(RootShadowNodeTest, cloneWithLayoutConstraints) { ContextContainer contextContainer{}; PropsParserContext parserContext{-1, contextContainer}; auto builder = simpleComponentBuilder(); std::shared_ptr<RootShadowNode> rootShadowNode; LayoutConstraints defaultLayoutConstraints = {}; auto element = Element<RootShadowNode>().reference(rootShadowNode).tag(1).props([&] { auto sharedProps = std::make_shared<RootProps>(); sharedProps->layoutConstraints = defaultLayoutConstraints; return sharedProps; }); builder.build(element); EXPECT_FALSE(rootShadowNode->getIsLayoutClean()); EXPECT_TRUE(rootShadowNode->layoutIfNeeded()); EXPECT_TRUE(rootShadowNode->getIsLayoutClean()); auto clonedWithDiffentLayoutConstraints = rootShadowNode->clone( parserContext, LayoutConstraints{{0, 0}, {10, 10}}, {}); EXPECT_FALSE(clonedWithDiffentLayoutConstraints->getIsLayoutClean()); EXPECT_TRUE(clonedWithDiffentLayoutConstraints->layoutIfNeeded()); } } // namespace facebook::react
C++
3
bobzhang/react-native
ReactCommon/react/renderer/components/root/tests/RootShadowNodeTest.cpp
[ "CC-BY-4.0", "MIT" ]