repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
JaDogg/__py_playground
reference/sketchbook/lex/deriv2.py
4558
""" Regular expression matching, and incremental DFA construction, using even-more-simplified and memoized Brzozowski derivatives. Rather less clunky, but also less Pythonic, than deriv.py. """ from memo import memoize def match(re, s): for c in s: re = re.deriv(c) # (This redundant test lets us exit early sometimes.) if re is fail: return False return re.nullable def mark(nullable, deriv, tag, args): deriv.nullable = nullable deriv.tag = tag deriv.args = args deriv.deriv = memoize(deriv) return deriv fail = mark(False, lambda c: fail, 'fail', ()) empty = mark(True, lambda c: fail, 'empty', ()) def _lit(literal): return mark(False, lambda c: empty if c == literal else fail, 'lit', literal) class Maker: def __init__(self): self.empty = empty self.lit = memoize(_lit) self.mkalt = memoize(self._alt) self.mkseq = memoize(self._seq) self.mkmany = memoize(self._many) def alt(self, *res): acc = collect_alternatives(res) if len(acc) == 0: return fail if len(acc) == 1: return acc[0] return self.mkalt(frozenset(acc)) def _alt(self, re_set): return mark(any(re.nullable for re in re_set), lambda c: self.alt(*[re.deriv(c) for re in re_set]), 'alt', re_set) def seq(self, *res): if fail in res: return fail res = collect_sequence(res) if len(res) == 0: return empty if len(res) == 1: return res[0] return self.mkseq(res) def _seq(self, res): hd, tl = res[0], res[1:] if hd.nullable: def sequence(c): return self.alt(self.seq(hd.deriv(c), *tl), self.seq(*tl).deriv(c)) else: def sequence(c): return self.seq(hd.deriv(c), *tl) return mark(all(re.nullable for re in res), sequence, 'seq', res) def many(self, re): if re is fail or re is empty: return empty if re.tag is 'many': return re return self.mkmany(re) def _many(self, re): def loop(c): return self.seq(re.deriv(c), loop) return mark(True, loop, 'many', (re,)) def make_scanner(self, whitespace, res): return self.seq(self.many(whitespace), reduce(self.alt, res)) def collect_alternatives(res): acc = [] for re in res: if re.tag is 'alt': acc.extend(re.args) elif re is not fail: acc.append(re) return acc def collect_sequence(res): acc = [] for re in res: if re.tag is 'seq': acc.extend(re.args) elif re is not empty: acc.append(re) return tuple(acc) def show(re): if re.tag is 'lit': return repr(re.args) return '%s(%s)' % (re.tag, ', '.join(map(show, re.args))) ## mk = Maker() ## match(fail, '') #. False ## match(empty, '') #. True ## match(empty, 'A') #. False ## match(mk.lit('x'), '') #. False ## match(mk.lit('x'), 'y') #. False ## match(mk.lit('x'), 'x') #. True ## match(mk.lit('x'), 'xx') #. False ### match(mk.lit('abc'), 'abc') ## match(mk.seq(mk.lit('a'), mk.lit('b')), '') #. False ## match(mk.seq(mk.lit('a'), mk.lit('b')), 'ab') #. True ## match(mk.alt(mk.lit('a'), mk.lit('b')), 'b') #. True ## match(mk.alt(mk.lit('a'), mk.lit('b')), 'a') #. True ## match(mk.alt(mk.lit('a'), mk.lit('b')), 'x') #. False ## match(mk.many(mk.lit('a')), '') #. True ## match(mk.many(mk.lit('a')), 'a') #. True ## match(mk.many(mk.lit('a')), 'x') #. False ## match(mk.many(mk.lit('a')), 'aa') #. True ## complicated = mk.seq(mk.many(mk.alt(mk.seq(mk.lit('a'), mk.lit('b')), mk.seq(mk.lit('a'), mk.seq(mk.lit('x'), mk.lit('y'))))), mk.lit('z')) ## match(complicated, '') #. False ## match(complicated, 'z') #. True ## match(complicated, 'abz') #. True ## match(complicated, 'ababaxyab') #. False ## match(complicated, 'ababaxyabz') #. True ## match(complicated, 'ababaxyaxz') #. False ## match(mk.many(mk.many(mk.lit('x'))), 'xxxx') #. True ## match(mk.many(mk.many(mk.lit('x'))), 'xxxxy') #. False ## match(mk.seq(empty, mk.lit('x')), '') #. False ## match(mk.seq(empty, mk.lit('x')), 'x') #. True ## mk.lit('x') is mk.lit('x') #. True ## mk.alt(mk.lit('x'), mk.lit('y')) is mk.alt(mk.lit('x'), mk.lit('y')) #. True ## mk.seq(mk.lit('x'), mk.lit('y')) is mk.seq(mk.lit('x'), mk.lit('y')) #. True ## mk.many(mk.lit('x')) is mk.many(mk.lit('x')) #. True
mit
fingo/urlopia
view.react/src/contexts/request-context/actions/createRequest.js
3092
import {getCurrentUser} from "../../../api/services/session.service"; import {pushSuccessNotification} from "../../../helpers/notifications/Notifications"; import {sendPostRequest} from "../../../helpers/RequestHelper"; import {CREATE_REQUEST_ACTION_PREFIX, CREATE_REQUEST_URL} from "../constants"; export const createRequest = (dispatch, {startDate, endDate, type, occasionalType}, isAdmin) => { dispatch({type: `${CREATE_REQUEST_ACTION_PREFIX}_request`}) sendPostRequest(CREATE_REQUEST_URL, { startDate, endDate, type, occasionalType, }) .then(data => { const action = { type: `${CREATE_REQUEST_ACTION_PREFIX}_success`, payload: { isAdmin, occasionalType, type }, response: data, } dispatch(action) pushNotificationOnSuccess(action) }) .catch(errorMsg => dispatch({ type: `${CREATE_REQUEST_ACTION_PREFIX}_failure`, error: errorMsg, })) } const getNotificationSuffix = (requestType) => { const {ec: isUserEC} = getCurrentUser(); return isUserEC ? getHolidaySuffixFor(requestType) : getBreakSuffixFor(requestType) } const getHolidaySuffixFor = (requestType) => { return requestType === "OCCASIONAL"? "urlop okolicznościowy" : "urlop wypoczynkowy" } const getBreakSuffixFor = (requestType) => { return requestType === "OCCASIONAL"? "przerwę okolicznościową" : "przerwę wypoczynkową" } const pushNotificationOnSuccess = action => { const suffix = getNotificationSuffix(action.payload.type) pushSuccessNotification(`Pomyślnie złożono wniosek o ${suffix}`) } export const createRequestReducer = (state, action) => { switch (action.type) { case `${CREATE_REQUEST_ACTION_PREFIX}_request`: { return { ...state, contextError: null } } case `${CREATE_REQUEST_ACTION_PREFIX}_success`: { const {isAdmin, occasionalType} = action.payload; let newCompanyRequests = state.companyRequests.requests if (!occasionalType && isAdmin) { newCompanyRequests = [action.response,...state.companyRequests.requests]; } return { ...state, myRequests: { ...state.myRequests, fetching: false, requests: [action.response, ...state.myRequests.requests], }, companyRequests: { ...state.companyRequests, fetching: false, requests: newCompanyRequests, }, } } case `${CREATE_REQUEST_ACTION_PREFIX}_failure`: { return { ...state, contextError: action.error } } default: { throw new Error(`Unhandled exception type ${action.type}`) } } }
mit
PulsarBlow/SuperMassive
src/SuperMassive/Helpers/XmlHelper.cs
12026
namespace SuperMassive { using System.Diagnostics.CodeAnalysis; using System.Linq; using System; using System.Collections.Specialized; using System.IO; using System.Runtime.Serialization; using System.Text; using System.Xml; using System.Xml.Linq; /// <summary> /// DataContract and other Xml serialization utilities /// </summary> public static class XmlHelper { /// <summary> /// Builds an object of the specified type from the given XML representation that can be passed to the DataContractSerializer /// </summary> /// <param name="xml"></param> /// <param name="type"></param> /// <returns></returns> public static object ReadObject(string xml, Type type) { if (string.IsNullOrEmpty(xml)) throw new ArgumentNullException(nameof(xml)); if (type == null) throw new ArgumentNullException(nameof(type)); var serializer = new DataContractSerializer(type); using var reader = new StringReader(xml); XmlReader xmlReader = new XmlTextReader(reader); return serializer.ReadObject(xmlReader); } /// <summary> /// Builds an object from its XML representation that can be passed to the DataContractSerializer. /// </summary> /// <param name="xml"></param> /// <returns></returns> public static T ReadObject<T>(string xml) { if (string.IsNullOrEmpty(xml)) throw new ArgumentNullException(nameof(xml)); var objResult = ReadObject(xml, typeof(T)); var result = (T) objResult; return result; } /// <summary> /// Read the XML representation of an object from a file. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fileName"></param> /// <returns></returns> public static T ReadObjectFromFile<T>(string fileName) { var objResult = ReadObjectFromFile(fileName, typeof(T)); var result = (T) objResult; return result; } /// <summary> /// Reads the XML representation of an object from a file. /// </summary> /// <param name="fileName"></param> /// <param name="type"></param> /// <exception cref="FileNotFoundException"></exception> /// <returns></returns> public static object ReadObjectFromFile(string fileName, Type type) { if (string.IsNullOrEmpty(fileName)) throw new ArgumentNullException(nameof(fileName)); if (!File.Exists(fileName)) throw new FileNotFoundException("File not found", fileName); using var reader = new StreamReader(fileName, Encoding.UTF8); var xml = reader.ReadToEnd(); return ReadObject(xml, type); } /// <summary> /// Gets the XML representation of the given object by using the DataContracSerializer. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="obj"></param> /// <returns></returns> public static string WriteObject<T>(T obj) { if (obj == null) throw new ArgumentNullException(nameof(obj)); return WriteObject(obj.GetType(), obj); } /// <summary> /// Gets the XML representation of the given object of specified type by using the DataContracSerializer. /// </summary> /// <param name="type"></param> /// <param name="value"></param> /// <returns></returns> public static string WriteObject(Type type, object value) { if (type == null) throw new ArgumentNullException(nameof(type)); if (value == null) throw new ArgumentNullException(nameof(value)); var builder = new StringBuilder(); var serializer = new DataContractSerializer(type); var settings = new XmlWriterSettings { Encoding = Encoding.UTF8, OmitXmlDeclaration = true }; using var xWriter = XmlWriter.Create(builder, settings); serializer.WriteObject(xWriter, value); xWriter.Flush(); return builder.ToString(); } /// <summary> /// Gets the XML representation the given <see cref="NameValueCollection"/> /// </summary> /// <param name="myCollection"></param> /// <param name="rootName"></param> /// <param name="namespaceUri"></param> /// <returns></returns> public static string? WriteObject(NameValueCollection myCollection, string rootName, string namespaceUri) { var xmlDocument = WriteObjectToXmlDocument(myCollection, rootName, namespaceUri); return xmlDocument?.InnerXml; } /// <summary> /// Writes the XML representation from the DataContractSerializer /// into the specified filename. If a file at filename /// already exists then an Exception will be thrown. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fileName"></param> /// <param name="obj"></param> public static void WriteObjectToFile<T>(string fileName, [DisallowNull] T obj) { WriteObjectToFile(fileName, obj.GetType(), obj); } /// <summary> /// Writes the XML representation from the DataContractSerializer /// into the specified filename. If a file at filename /// already exists then an Exception will be thrown. /// </summary> /// <param name="fileName"></param> /// <param name="type"></param> /// <param name="value"></param> public static void WriteObjectToFile(string fileName, Type type, object value) { if (string.IsNullOrEmpty(fileName)) throw new ArgumentNullException(nameof(fileName)); using var writer = new StreamWriter(fileName, false, Encoding.UTF8); writer.Write(WriteObject(type, value)); } /// <summary> /// Writes the XML representation of the <see cref="NameValueCollection"/> into a new <see cref="XmlDocument"/> instance. /// </summary> /// <param name="myCollection"></param> /// <param name="rootName"></param> /// <param name="namespaceUri"></param> /// <returns></returns> public static XmlDocument? WriteObjectToXmlDocument( NameValueCollection myCollection, string rootName, string namespaceUri) { if (string.IsNullOrEmpty(rootName)) rootName = "xmldocument"; var xmlDocument = new XmlDocument(); var xmlRoot = xmlDocument.CreateElement( string.Empty, XmlConvert.EncodeName(rootName.ToLowerInvariant()), namespaceUri); foreach (string? item in myCollection) { string s = item ?? string.Empty; var xmlElement = xmlDocument.CreateElement(string.Empty, XmlConvert.EncodeName(s.ToLowerInvariant()), namespaceUri); var xmlCDataSection = xmlDocument.CreateCDataSection(myCollection[item]); xmlElement.AppendChild(xmlCDataSection); xmlRoot.AppendChild(xmlElement); } xmlDocument.AppendChild(xmlRoot); return xmlDocument; } /// <summary> /// Writes the XML representation of the <see cref="NameValueCollection"/> into a new <see cref="XDocument"/> instance. /// </summary> /// <param name="myCollection"></param> /// <param name="rootName"></param> /// <param name="namespaceUri"></param> /// <returns></returns> public static XDocument? WriteObjectToXDocument(NameValueCollection myCollection, string rootName, string namespaceUri) { if (myCollection == null) return null; if (namespaceUri == null) namespaceUri = string.Empty; rootName = string.IsNullOrEmpty(rootName) ? "xdocument" : XmlConvert.EncodeName(rootName.ToLowerInvariant()); XNamespace ns = namespaceUri; var xDocument = new XDocument( new XElement(ns + rootName)); foreach (string? item in myCollection) { string s = item ?? string.Empty; xDocument.Element(ns + rootName) ?.Add( new XElement(ns + XmlConvert.EncodeName(s.ToLowerInvariant()), myCollection[item] != null ? new XCData(myCollection[item]) : null)); } return xDocument; } /// <summary> /// Returns the string content of the given <see cref="XDocument"/>. /// </summary> /// <param name="document"></param> /// <param name="settings"></param> /// <returns></returns> public static string ToString(XDocument document, XmlWriterSettings settings) { if (document == null) return string.Empty; var sb = new StringBuilder(); using var writer = XmlWriter.Create(sb, settings); document.Save(writer); return sb.ToString(); } /// <summary> /// Returns the string content of the given <see cref="XDocument"/>. /// </summary> /// <param name="document"></param> /// <returns></returns> public static string ToString(XDocument document) { var settings = new XmlWriterSettings { Encoding = Encoding.UTF8, OmitXmlDeclaration = true }; return ToString(document, settings); } /// <summary> /// Returns the cleaned (no illegal chars) string content of the given <see cref="XDocument"/>. /// </summary> /// <param name="document"></param> /// <returns></returns> public static string? ToCleanString(XDocument document) { if (document == null) return null; var settings = new XmlWriterSettings() { Encoding = Encoding.UTF8, OmitXmlDeclaration = true, CheckCharacters = false }; var sb = new StringBuilder(); using var writer = XmlWriter.Create(sb, settings); document.Save(writer); return SanitizeXmlString(sb.ToString()); } /// <summary> /// Whether a given character is allowed by XML 1.0. /// </summary> public static bool IsLegalXmlChar(int character) { return ( character == 0x9 /* == '\t' == 9 */ || character == 0xA /* == '\n' == 10 */ || character == 0xD /* == '\r' == 13 */ || (character >= 0x20 && character <= 0xD7FF) || (character >= 0xE000 && character <= 0xFFFD) || (character >= 0x10000 && character <= 0x10FFFF) ); } /// <summary> /// Remove illegal XML characters from a string. /// </summary> public static string SanitizeXmlString(string xml) { if (string.IsNullOrEmpty(xml)) { return xml; } var buffer = new StringBuilder(xml.Length); foreach (var c in xml.Where(c => IsLegalXmlChar(c))) { buffer.Append(c); } return buffer.ToString(); } } }
mit
SkillsFundingAgency/das-employerapprenticeshipsservice
src/SFA.DAS.EmployerFinance.UnitTests/Commands/RefreshPaymentDataTests/WhenIHAveCompletedProcessing.cs
7492
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using MediatR; using Moq; using NServiceBus; using NUnit.Framework; using SFA.DAS.EmployerFinance.Commands.RefreshPaymentData; using SFA.DAS.EmployerFinance.Data; using SFA.DAS.EmployerFinance.Messages.Events; using SFA.DAS.EmployerFinance.Models.Payments; using SFA.DAS.EmployerFinance.Services; using SFA.DAS.NLog.Logger; using SFA.DAS.NServiceBus.Testing.Services; using SFA.DAS.Testing; using SFA.DAS.Validation; namespace SFA.DAS.EmployerFinance.UnitTests.Commands.RefreshPaymentDataTests { [TestFixture] public class RefreshPaymentDataCommandHandlerTests : FluentTest<RefreshPaymentDataCommandHandlerTestsFixture> { [Test] public Task WhenIHaveCompletedProcessing_AndHaveNewPayments() { const long accountId = 999; return RunAsync(f => f .SetAccountId(accountId) .SetPeriodEnd("2017R14") .SetExistingPayments(new List<Guid> { Guid.Parse("953EC305-06FD-483C-A155-50211921143C"), Guid.Parse("963EC305-06FD-483C-A155-50211921143C"), Guid.Parse("973EC305-06FD-483C-A155-50211921143C"), }) .SetIncomingPayments(new List<PaymentDetails> { new PaymentDetails { Id = Guid.NewGuid(), Amount = 99, EmployerAccountId = accountId, } }) , f => f.Handle(), (f, r) => { f.VerifyRefreshPaymentDataCompletedEventIsPublished(true); }); } [Test] public Task WhenIHaveCompletedProcessing_AndHaveReceivedOnlyMatchingPayments() { const long accountId = 999; var paymentGuid = Guid.Parse("953EC305-06FD-483C-A155-50211921143C"); return RunAsync(f => f .SetAccountId(accountId) .SetPeriodEnd("2017R14") .SetExistingPayments(new List<Guid> { paymentGuid, Guid.Parse("963EC305-06FD-483C-A155-50211921143C"), Guid.Parse("973EC305-06FD-483C-A155-50211921143C"), }) .SetIncomingPayments(new List<PaymentDetails> { new PaymentDetails { Id = paymentGuid, Amount = 99, EmployerAccountId = accountId, } }) , f => f.Handle(), (f, r) => { f.VerifyRefreshPaymentDataCompletedEventIsPublished(false); }); } [Test] public Task WhenIHaveCompletedProcessing_AndHaveReceievedNoPayments() { const long accountId = 999; return RunAsync(f => f .SetAccountId(accountId) .SetPeriodEnd("2017R14") .SetExistingPayments(new List<Guid> { Guid.Parse("963EC305-06FD-483C-A155-50211921143C"), Guid.Parse("973EC305-06FD-483C-A155-50211921143C"), }) .SetIncomingPayments(new List<PaymentDetails>()) , f => f.Handle(), (f, r) => { f.VerifyRefreshPaymentDataCompletedEventIsPublished(false); }); } [Test] public Task WhenIHaveCompletedProcessing_AndHaveReceievedNullPayments() { const long accountId = 999; return RunAsync(f => f .SetAccountId(accountId) .SetPeriodEnd("2017R14") .SetExistingPayments(new List<Guid> { Guid.Parse("963EC305-06FD-483C-A155-50211921143C"), Guid.Parse("973EC305-06FD-483C-A155-50211921143C"), }) .SetIncomingPayments(null) , f => f.Handle(), (f, r) => { f.VerifyRefreshPaymentDataCompletedEventIsPublished(false); }); } } public class RefreshPaymentDataCommandHandlerTestsFixture : FluentTestFixture { private readonly Mock<IDasLevyRepository> _dasLevyRepository; private readonly Mock<IPaymentService> _paymentService; private readonly RefreshPaymentDataCommandHandler _handler; private readonly TestableEventPublisher eventPublisher; private long _accountId = 999; private string _periodEnd = "2018R12"; public RefreshPaymentDataCommandHandlerTestsFixture() { _dasLevyRepository = new Mock<IDasLevyRepository>(); var logger = new Mock<ILog>(); var mediator = new Mock<IMediator>(); _paymentService = new Mock<IPaymentService>(); var validator = new Mock<IValidator<RefreshPaymentDataCommand>>(); eventPublisher = new TestableEventPublisher(); validator.Setup(x => x.Validate(It.IsAny<RefreshPaymentDataCommand>())) .Returns(new ValidationResult { ValidationDictionary = new Dictionary<string, string>() }); _handler = new RefreshPaymentDataCommandHandler( eventPublisher, validator.Object, _paymentService.Object, _dasLevyRepository.Object, mediator.Object, logger.Object); } public RefreshPaymentDataCommandHandlerTestsFixture SetAccountId(long accountId) { _accountId = accountId; return this; } public RefreshPaymentDataCommandHandlerTestsFixture SetPeriodEnd(string periodEnd) { _periodEnd = periodEnd; return this; } public RefreshPaymentDataCommandHandlerTestsFixture SetExistingPayments(List<Guid> guids) { _dasLevyRepository.Setup(x => x.GetAccountPaymentIds(It.IsAny<long>())) .ReturnsAsync(new HashSet<Guid>(guids)); return this; } public RefreshPaymentDataCommandHandlerTestsFixture SetIncomingPayments(List<PaymentDetails> paymentDetails) { _paymentService.Setup(x => x.GetAccountPayments(It.IsAny<string>(), It.IsAny<long>(), It.IsAny<Guid>())) .ReturnsAsync(paymentDetails); return this; } public Task<Unit> Handle() { return _handler.Handle(new RefreshPaymentDataCommand { AccountId = _accountId, PeriodEnd = _periodEnd }); } public void VerifyRefreshPaymentDataCompletedEventIsPublished(bool expectedPaymentProcessedValue) { Assert.IsTrue(eventPublisher.Events.OfType<RefreshPaymentDataCompletedEvent>().Any(e => e.AccountId == _accountId && e.PeriodEnd == _periodEnd && e.PaymentsProcessed == expectedPaymentProcessedValue)); } } }
mit
nestharus/JASS
Antlr/PreAntlr/src/compile/antlr/TestGrammar.java
22556
package compile.antlr; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.LinkedList; import java.util.List; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonToken; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.DiagnosticErrorListener; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.atn.PredictionMode; public class TestGrammar { private Class<? extends Lexer> lexerClass; private Class<? extends Parser> parserClass; private Lexer lexer; private Parser parser; private boolean arg_tree = false; private boolean arg_tokens = false; private boolean arg_gui = false; private String arg_ps = null; private String arg_encoding = null; private boolean arg_trace = false; private boolean arg_diagnostics = false; private boolean arg_sll = false; private String arg_channel = null; private String arg_lexer; private String arg_parser; private String arg_package; private List<String> arg_input = new LinkedList<String>(); private CommonTokenStream tokens; private List<Token> tokenList; private String parserRule; private String[] tokenNames; private String[] channelNames = null; private int getValueWidth() { int max = 0; int len; String str; for (Token token : tokenList) { str = token.getText(); str = str.replace("\n", "\\n"); str = str.replace("\t", "\\t"); str = str.replace("\r", "\\r"); ((CommonToken) token).setText(str); len = token.getText().length(); if (len > max) { max = len; } } return max; } private int getTypeWidth() { int max = 0; int type; int len; for (Token token : tokenList) { type = token.getType(); if (type == -1) { len = 3; } else { len = tokenNames[type].length(); } if (len > max) { max = len; } } return max; } private int getChannelWidth() { if (channelNames == null) { return 4; } int max = 0; int len; for (Token token : tokenList) { len = channelNames[token.getChannel()].length(); if (len > max) { max = len; } } return max; } private static void printex(String msg, int maxlen, int spacing) { int strlen = msg == null || msg == ""? 0 : msg.length(); int len = 0; char[] str = msg.toCharArray(); while (len < strlen && len < maxlen) { if (str[len] == '\t' || str[len] == '\r' || str[len] == '\n') { str[len] = ' '; } System.out.print(str[len++]); } while (len++ < maxlen) { System.out.print(' '); } for (int i = spacing; i > 0; --i) { System.out.print(' '); } } private static void printex(char c, int len) { while (len-- > 0) { System.out.print('-'); } } private void printTokens(String tabs) { if (arg_tokens) { tokenList = tokens.getTokens(); final int spacing = 8; final int typeWidth = getTypeWidth(); final int valueWidth = getValueWidth() + 2; final int channelWidth = getChannelWidth(); final int width = typeWidth + valueWidth + channelWidth + spacing + spacing; int type; System.out.print(tabs + "Tokens {\n"); System.out.print(tabs + "\t"); printex("Type", typeWidth, spacing); printex("Value", valueWidth, spacing); printex("Channel", channelWidth, 0); System.out.println(); System.out.print(tabs + "\t"); printex('-', width); System.out.print("\n\n"); for (Token token : tokenList) { type = token.getType(); System.out.print(tabs + "\t"); printex(type == -1? "EOF" : tokenNames[type], typeWidth, spacing); printex("|" + token.getText() + "|", valueWidth, spacing); if (channelNames != null) { printex(channelNames[token.getChannel()], channelWidth, 0); } else { printex(Integer.toString(token.getChannel()), channelWidth, 0); } System.out.print('\n'); } System.out.print(tabs + "}\n"); } } private boolean evaluateArgs_assert(String[] args, final int i, final String expected) { if (args[i].equals(expected)) { return true; } System.err.println("Expecting [" + expected + "], got [" + args[i] + "]"); return false; } private void evaluateArgs_error(final String arg, final String expected) { System.err.println("Expecting " + expected + ", got [" + arg + "]"); } private int evaluateArgs_grammar(final String args[], int i) { if (evaluateArgs_assert(args, i, "-grammar")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_lexer = args[i]; arg_parser = args[i]; } else { --i; evaluateArgs_error(args[i], "[grammarName]"); } } else { evaluateArgs_error("nothing", "[grammarName]"); } } return i; } private int evaluateArgs_lexer(final String args[], int i) { if (evaluateArgs_assert(args, i, "-lexer")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_lexer = args[i]; } else { --i; evaluateArgs_error(args[i], "[lexerName]"); } } else { evaluateArgs_error("nothing", "[lexerName]"); } } return i; } private int evaluateArgs_parser(final String args[], int i) { if (evaluateArgs_assert(args, i, "-parser")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_parser = args[i]; } else { --i; evaluateArgs_error(args[i], "[parserName]"); } } else { evaluateArgs_error("nothing", "[parserName]"); } } return i; } private int evaluateArgs_package(final String args[], int i) { if (evaluateArgs_assert(args, i, "-package")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_package = args[i]; } else { --i; evaluateArgs_error(args[i], "[packageName]"); } } else { evaluateArgs_error("nothing", "[packageName]"); } } return i; } private int evaluateArgs_encoding(final String args[], int i) { if (evaluateArgs_assert(args, i, "-encoding")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_encoding = args[i]; } else { --i; evaluateArgs_error(args[i], "[encodingName]"); } } else { evaluateArgs_error("nothing", "[encodingName]"); } } return i; } private int evaluateArgs_ps(final String args[], int i) { if (evaluateArgs_assert(args, i, "-ps")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_ps = args[i]; } else { --i; evaluateArgs_error(args[i], "[psName]"); } } else { evaluateArgs_error("nothing", "[psName]"); } } return i; } private int evaluateArgs_channel(final String args[], int i) { if (evaluateArgs_assert(args, i, "-channel")) { ++i; if (i < args.length) { if (args[i].charAt(0) != '-') { arg_channel = args[i]; } else { --i; evaluateArgs_error(args[i], "[channelName]"); } } else { evaluateArgs_error("nothing", "[channelName]"); } } return i; } private int evaluateArgs_tokens(final String args[], int i) { if (evaluateArgs_assert(args, i, "-tokens")) arg_tokens = true; return i; } private int evaluateArgs_tree(final String args[], int i) { if (evaluateArgs_assert(args, i, "-tree")) arg_tree = true; return i; } private int evaluateArgs_gui(final String args[], int i) { if (evaluateArgs_assert(args, i, "-gui")) arg_gui = true; return i; } private int evaluateArgs_trace(final String args[], int i) { if (evaluateArgs_assert(args, i, "-")) arg_trace = true; return i; } private int evaluateArgs_diagnostics(final String args[], int i) { if (evaluateArgs_assert(args, i, "-diagnostics")) arg_diagnostics = true; return i; } private int evaluateArgs_SLL(final String args[], int i) { if (evaluateArgs_assert(args, i, "-SLL")) arg_sll = true; return i; } private int evaluateArgs_input(final String args[], int i) { if (args[i].charAt(0) != '-') { arg_input.add(args[i]); } return i; } private int evaluateArgs_g(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'r': return evaluateArgs_grammar(args, i); case 'u': return evaluateArgs_gui(args, i); default: evaluateArgs_error(args[i], "[-grammar] [-gui]"); } return i; } private int evaluateArgs_pa(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'r': return evaluateArgs_parser(args, i); case 'c': return evaluateArgs_package(args, i); default: evaluateArgs_error(args[i], "[-parser] [-package]]"); } return i; } private int evaluateArgs_p(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'a': return evaluateArgs_pa(args, i, d + 1); case 's': return evaluateArgs_ps(args, i); default: evaluateArgs_error(args[i], "[-parser] [-ps] [-package]"); } return i; } private int evaluateArgs_tr(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'e': return evaluateArgs_tree(args, i); case 'a': return evaluateArgs_trace(args, i); default: evaluateArgs_error(args[i], "[-tree] [-trace]"); } return i; } private int evaluateArgs_t(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'o': return evaluateArgs_tokens(args, i); case 'r': return evaluateArgs_tr(args, i, d + 1); default: evaluateArgs_error(args[i], "[-tokens] [-tree] [-trace]"); } return i; } private int evaluateArgs_1(final String args[], final int i, final int d) { switch (args[i].charAt(d)) { case 'g': return evaluateArgs_g(args, i, d + 1); case 'l': return evaluateArgs_lexer(args, i); case 'p': return evaluateArgs_p(args, i, d + 1); case 't': return evaluateArgs_t(args, i, d + 1); case 'e': return evaluateArgs_encoding(args, i); case 'd': return evaluateArgs_diagnostics(args, i); case 's': return evaluateArgs_SLL(args, i); case 'c': return evaluateArgs_channel(args, i); default: evaluateArgs_error(args[i], "[-grammar] [-lexer] [-parser] [-package] [-tokens] [-tree] [-gui] [-trace] [-diagnostics] [-SLL] [-ps] [-encoding]"); } return i; } private int evaluateArgs_0(final String args[], final int i, final int d) { if (args[i].length() < 3) { evaluateArgs_error(args[i], "[-grammar] [-lexer] [-parser] [-packcage] [-tokens] [-tree] [-gui] [-trace] [-diagnostics] [-SLL] [-ps] [-encoding]"); return i; } switch (args[i].charAt(d)) { case '-': return evaluateArgs_1(args, i, d + 1); default: return evaluateArgs_input(args, i); } } private void evaluateArgs_len0(String args[]) { if (args.length == 0) { System.err.print("Arguments\n-------------------------------------------------------------------------\n\n"); System.err.println("\t([-grammar grammarName] | [-lexer lexerName] [-parser parserName])"); System.err.println("\t[-package packageName]? [-ps psName]? [-encoding encodingName]? [-channel enumName]?"); System.err.println("\t[-tokens]? [-tree]? [-gui]? [-trace]? [-diagnostics]? [-SLL]?"); System.err.println("\t[input-filename]*"); System.err.print("\nDetails\n---------------------------------------------------------------------------\n\n"); System.err.println("\tA lexer of some sort, be it from -grammar or -lexer, must be passed in\n\n"); System.err.println("\t[-grammar grammarName]\n" + "\n\t\t" + "Will attempt to load both lexer and parser of name [grammarName]" + "\n\t\t" + "The loaded grammar will be the last appearing [-grammar] argument" + "\n\n"); System.err.println("\t[-lexer lexerName]\n" + "\n\t\t" + "Will attempt to load the lexer of name [lexerName]" + "\n\t\t" + "The loaded lexer will be the last appearing [-lexer] argument" + "\n\n"); System.err.println("\t[-parser parserName]\n" + "\n\t\t" + "Will attempt to load the parser of name [parserName]" + "\n\t\t" + "The loaded parser will be the last appearing [-parser] argument" + "\n\n\t\t" + "Requires a lexer" + "\n\n"); System.err.println("\t[-channel enumName]\n" + "\n\t\t" + "Will use supplied [enumName] for channel names in token output" + "\n\t\t" + "Without this, it will use channel ids instead of channel names" + "\n\n\t\tExample: -channel Channel" + "\n\n\t\t\t" + "public static enum Channel {" + "\n\t\t\t\t" + "OUT," + "\n\t\t\t\t" + "WHITESPACE," + "\n\t\t\t\t" + "COMMENTS" + "\n\n\t\t\t\t" + "; public final int value = CHANNEL_INDEX++;" + "\n\t\t\t" + "} private static int CHANNEL_INDEX = 0;" + "\n\n"); System.err.println("\t[-package packageName]\n" + "\n\t\t" + "Will load grammar from package [packageName]" + "\n\t\t" + "Packages may be specifically applied to the parser and lexer as well" + "\n\t\t" + "A package declaration will work with specific lexer and parser package definitions" + "\n\n\t\t" + "Loads myPackage..otherPackage.subPackage.lexerName" + "\n\n\t\t\t" + "-package myPackage.otherPackage -lexer subPackage.lexerName" + "\n\n"); System.err.println("\t[-ps psName]\n" + "\n\t\t" + "generates a visual representation of the parse tree in PostScript and" + "\n\t\t" + "stores it in [psName] (should be of type .ps)" + "\n\n"); System.err.println("\t[-encoding encodingName]\n" + "\n\t\t" + "specifies the input file encoding if the current" + "\n\t\t" + "locale would not read the input properly. For example, need this option" + "\n\t\t" + "to parse a Japanese-encoded XML file" + "\n\n"); System.err.println("\t[-trace]\n" + "\n\t\t" + "prints the rule name and current token upon rule entry and exit" + "\n\n"); System.err.println("\t[-diagnostics]\n" + "\n\t\t" + "turns on diagnostic messages during parsing. This generates messages" + "\n\t\t" + "only for unusual situations such as ambiguous input phrases." + "\n\n"); System.err.println("\t[-SLL]\n" + "\n\t\t" + "uses a faster but slightly weaker parsing strategy" + "\n\n"); System.err.println("\t[input-filename]\n" + "\n\t\t" + "Omitting will read from stdin" + "\n\n"); System.exit(1); } } private void evaluateArgs(String args[]) { evaluateArgs_len0(args); for (int i = 0; i < args.length; ++i) { i = evaluateArgs_0(args, i, 0); } } public TestGrammar(String args[]) { evaluateArgs(args); } private String getLexerName() { if (arg_lexer == null) { System.err.println("Missing lexer"); System.exit(1); } if (arg_package != null) { return arg_package + "." + arg_lexer; } else { return arg_lexer; } } private String getParserName() { if (arg_parser == null) { System.err.println("Missing parser"); System.exit(1); } if (arg_package != null) { return arg_package + "." + arg_parser; } else { return arg_parser; } } private void loadLexer() { String lexerName = getLexerName() + "Lexer"; ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); lexerClass = null; try { lexerClass = classLoader.loadClass(lexerName).asSubclass(Lexer.class); } catch (java.lang.ClassNotFoundException cnfe) { lexerName = arg_lexer; try { lexerClass = classLoader.loadClass(lexerName).asSubclass(Lexer.class); } catch (ClassNotFoundException cnfe2) { System.err.println("Unable to load " + lexerName + " as lexer or parser (file wasn't found)"); System.exit(1); } } try { Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class); lexer = lexerCtor.newInstance((CharStream) null); } catch (Exception e) { System.exit(1); } tokenNames = lexer.getTokenNames(); if (arg_channel != null) { Class<?> channel = null; try { channel = Class.forName(lexerClass.getName() + "$" + arg_channel); } catch (Exception e) { System.err.println("[" + arg_channel + " is not a declared member enum of @members of " + arg_lexer); System.err.println("Using channel id for -tokens instead of channel names"); } if (channel != null) { if (channel.isEnum()) { if (Modifier.isStatic(channel.getModifiers())) { Object[] enumConstants = channel.getEnumConstants(); if (enumConstants.length != 0) { channelNames = new String[enumConstants.length]; for (int i = 0; i < enumConstants.length; ++i) { channelNames[i] = enumConstants[i].toString(); } } else { System.err.println("[" + arg_channel + "] has no declared channels"); System.err.println("Using channel id for -tokens instead of channel names"); } } else { System.err.println("[" + arg_channel + "] is not a static member of @members of " + arg_lexer); System.err.println("Using channel id for -tokens instead of channel names"); } } else { System.err.println("[" + arg_channel + "] is not a member enum of @members of " + arg_lexer); System.err.println("Using channel id for -tokens instead of channel names"); } } } } private void loadParser() { parserClass = null; parser = null; if (arg_parser != null) { String parserName = getParserName() + "Parser"; ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { parserClass = classLoader.loadClass(parserName).asSubclass(Parser.class); } catch (Exception e) { parserName = arg_parser; try { parserClass = classLoader.loadClass(parserName).asSubclass(Parser.class); } catch (ClassNotFoundException cnfe2) { System.err.println("Unable to load " + parserName + " as parser (file wasn't found)"); System.exit(1); } } try { Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class); parser = parserCtor.newInstance((TokenStream) null); } catch (Exception e) { } } if (parser != null) { parserRule = parser.getRuleNames()[0]; } } private void process() { loadLexer(); loadParser(); InputStream inputStream; Reader reader; if (arg_input.size() == 0) { inputStream = System.in; reader = null; try { if (arg_encoding != null) { reader = new InputStreamReader(inputStream, arg_encoding); } else { reader = new InputStreamReader(inputStream); } } catch (Exception e) { } if (reader != null) { process(inputStream, reader); } } else { for (String inputFile : arg_input) { inputStream = null; reader = null; try { if (inputFile != null) { inputStream = new FileInputStream(inputFile); } } catch (Exception e) { System.err.println("Could Not Load File [" + inputFile + "]"); } if (inputStream != null) { try { if (arg_encoding != null) { reader = new InputStreamReader(inputStream, arg_encoding); } else { reader = new InputStreamReader(inputStream); } } catch (Exception e) { } if (reader != null) { System.out.print(inputFile + " {\n"); process(inputStream, reader); System.out.print("}\n"); } } } } } private void process(InputStream inputStream, Reader reader) { try { lexer.setInputStream(new ANTLRInputStream(reader)); tokens = new CommonTokenStream(lexer); // tokens = new UnbufferedTokenStream(lexer); if (parser != null) { if (arg_diagnostics) { parser.addErrorListener(new DiagnosticErrorListener()); parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); } if (arg_tree || arg_gui || arg_ps != null) { parser.setBuildParseTree(true); } if (arg_sll) { parser.getInterpreter().setPredictionMode(PredictionMode.SLL); } parser.setTokenStream(tokens); parser.setTrace(arg_trace); if (arg_tree || arg_gui || arg_ps != null) { try { Method startRule = parserClass.getMethod(parserRule); ParserRuleContext tree = (ParserRuleContext) startRule.invoke(parser, (Object[]) null); if (arg_tree) { System.out.println("\tTree {\n\t\t" + tree.toStringTree(parser) + "\n\t}"); } if (arg_gui) { tree.inspect(parser); } if (arg_ps != null) { try { tree.save(parser, arg_ps); } catch (Exception e) { System.out.println("Could not save postscript [" + arg_ps + "]"); } } } catch (Exception e) { System.err.println("Parser has invalid start rule [" + parserRule + "]"); } } } else { tokens.fill(); } printTokens("\t"); } catch (Exception e) { } finally { try { if (reader != null) { reader.close(); } if (inputStream != null) { inputStream.close(); } } catch (Exception e) { } } } public static void main(String args[]) { TestGrammar tester = new TestGrammar(args); tester.process(); } }
mit
aarongraham/mocha-dot-only
lib/onlyCallsSearch.js
211
'use strict'; var esprima = require('esprima'), fs = require('fs'), utils = require('./utils.js'); function checkFolder(folderPath) { return []; } module.exports = { checkFolder: checkFolder };
mit
atmanager/atmanager
src/ATManager/AtBundle/Entity/AtServicioTerceroRepository.php
595
<?php namespace ATManager\AtBundle\Entity; use Doctrine\ORM\EntityRepository; /** * AtServicioTerceroRepository * * This class was generated by the Doctrine ORM. Add your own custom * repository methods below. */ class AtServicioTerceroRepository extends EntityRepository { public function findByServiciosPorAt($at) { $em = $this->getEntityManager(); $query = $em->createQuery('SELECT s FROM AtBundle:AtServicioTercero s WHERE s.at = :at') ->setParameter('at', $at); $query->setMaxResults(50); return $query->getResult(); } }
mit
nil1990/sheduling_admin
application/config/database.php
4536
<?php defined('BASEPATH') OR exit('No direct script access allowed'); /* | ------------------------------------------------------------------- | DATABASE CONNECTIVITY SETTINGS | ------------------------------------------------------------------- | This file will contain the settings needed to access your database. | | For complete instructions please consult the 'Database Connection' | page of the User Guide. | | ------------------------------------------------------------------- | EXPLANATION OF VARIABLES | ------------------------------------------------------------------- | | ['dsn'] The full DSN string describe a connection to the database. | ['hostname'] The hostname of your database server. | ['username'] The username used to connect to the database | ['password'] The password used to connect to the database | ['database'] The name of the database you want to connect to | ['dbdriver'] The database driver. e.g.: mysqli. | Currently supported: | cubrid, ibase, mssql, mysql, mysqli, oci8, | odbc, pdo, postgre, sqlite, sqlite3, sqlsrv | ['dbprefix'] You can add an optional prefix, which will be added | to the table name when using the Query Builder class | ['pconnect'] TRUE/FALSE - Whether to use a persistent connection | ['db_debug'] TRUE/FALSE - Whether database errors should be displayed. | ['cache_on'] TRUE/FALSE - Enables/disables query caching | ['cachedir'] The path to the folder where cache files should be stored | ['char_set'] The character set used in communicating with the database | ['dbcollat'] The character collation used in communicating with the database | NOTE: For MySQL and MySQLi databases, this setting is only used | as a backup if your server is running PHP < 5.2.3 or MySQL < 5.0.7 | (and in table creation queries made with DB Forge). | There is an incompatibility in PHP with mysql_real_escape_string() which | can make your site vulnerable to SQL injection if you are using a | multi-byte character set and are running versions lower than these. | Sites using Latin-1 or UTF-8 database character set and collation are unaffected. | ['swap_pre'] A default table prefix that should be swapped with the dbprefix | ['encrypt'] Whether or not to use an encrypted connection. | | 'mysql' (deprecated), 'sqlsrv' and 'pdo/sqlsrv' drivers accept TRUE/FALSE | 'mysqli' and 'pdo/mysql' drivers accept an array with the following options: | | 'ssl_key' - Path to the private key file | 'ssl_cert' - Path to the public key certificate file | 'ssl_ca' - Path to the certificate authority file | 'ssl_capath' - Path to a directory containing trusted CA certificats in PEM format | 'ssl_cipher' - List of *allowed* ciphers to be used for the encryption, separated by colons (':') | 'ssl_verify' - TRUE/FALSE; Whether verify the server certificate or not ('mysqli' only) | | ['compress'] Whether or not to use client compression (MySQL only) | ['stricton'] TRUE/FALSE - forces 'Strict Mode' connections | - good for ensuring strict SQL while developing | ['ssl_options'] Used to set various SSL options that can be used when making SSL connections. | ['failover'] array - A array with 0 or more data for connections if the main should fail. | ['save_queries'] TRUE/FALSE - Whether to "save" all executed queries. | NOTE: Disabling this will also effectively disable both | $this->db->last_query() and profiling of DB queries. | When you run a query, with this setting set to TRUE (default), | CodeIgniter will store the SQL statement for debugging purposes. | However, this may cause high memory usage, especially if you run | a lot of SQL queries ... disable this to avoid that problem. | | The $active_group variable lets you choose which connection group to | make active. By default there is only one group (the 'default' group). | | The $query_builder variables lets you determine whether or not to load | the query builder class. */ $active_group = 'default'; $query_builder = TRUE; $db['default'] = array( 'dsn' => '', 'hostname' => 'localhost', 'username' => 'root', 'password' => '', 'database' => 'content_scheduling', 'dbdriver' => 'mysqli', 'dbprefix' => '', 'pconnect' => FALSE, 'db_debug' => (ENVIRONMENT !== 'production'), 'cache_on' => FALSE, 'cachedir' => '', 'char_set' => 'utf8', 'dbcollat' => 'utf8_general_ci', 'swap_pre' => '', 'encrypt' => FALSE, 'compress' => FALSE, 'stricton' => FALSE, 'failover' => array(), 'save_queries' => TRUE );
mit
Innmind/TimeContinuum
src/Earth/Timezone/America/Bahia.php
293
<?php declare(strict_types = 1); namespace Innmind\TimeContinuum\Earth\Timezone\America; use Innmind\TimeContinuum\Earth\Timezone; /** * @psalm-immutable */ final class Bahia extends Timezone { public function __construct() { parent::__construct('America/Bahia'); } }
mit
diminishedprime/.org
reading-list/seven_languages_in_seven_weeks/ruby/day_2_floating_down_from_the_sky.rb
3787
def tell_the_truth(options = {}) if options[:profession] == :lawyer 'it could be believed that this is almost certainly not false.' else true end end tell_the_truth(profession: :lawyer) tell_the_truth(prefession: :shipbuilder) tell_the_truth # Code Blocks and Yield 3.times { puts 'hiya there, kiddo' } animals = ['lions and', 'tigers and', 'bears', 'oh my'] animals.each { |a| puts a } # You can pass around blocks def call_block yield end def pass_block(&block) call_block(&block) end pass_block { puts 'Hello, Block' } # Defining Classes class Tree attr_accessor :children, :node_name def initialize(name, children = []) @children = children @node_name = name end def visit_all(&block) visit &block children.each { |c| c.visit_all &block } end def visit(&block) yield block end end ruby_tree = Tree.new('Ruby', [Tree.new('Reia'), Tree.new('MacRuby')]) puts 'Visiting a node' ruby_tree.visit { |node| puts node } puts 'Visiting entire tree' ruby_tree.visit_all { |node| puts node } # Writing a Mixin module ToFile def filename "object_#{object_id}.txt" end def to_f File.open(filename, 'w') { |f| f.write(to_s) } end end class Person include ToFile attr_accessor :name def initialize(name) @name = name end def to_s name end end Person.new('matz').to_f ### Problems # Research # Find out how to access files with and without code blocks. What is the benefit # of the code block? ## Looks like the code block will be run and then the file closed for you ## automatically. # How would you translate a hash to an array? Can you translate arrays to # hashes? ## I would use some method of each, or each_key to go from hash -> array, going ## from array to hash seems easy with .each_slice(2) # Can you iterate through a hash? ## Yes # You can use Ruby arrays as stacks. What other common data structures do arrays # support? # queues, Lists # Do # Print the contents of an array of sixteen numbers, four numbers at a time, # using just each. Now, do the same with each_slice in Enumerable. numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] # With only each so_far = 0 current_group = [] numbers.each do |num| currentGroup.push(num) so_far += 1 next unless soFar == 4 puts currentGroup puts '' current_group = [] so_far = 0 end # With each_slice numbers.each_slice(4) do |num_group| puts num_group puts '' end # The Tree class was interesting, but it did not allow you to specify a new tree # with a clean user interface. Let the initializer accept a nested structure of # hashes. You should be able to specify a tree like this: # my_tree = { 'grandpa' => { 'dad' => { 'child 1' => {}, # 'child 2' => {} }, # 'uncle' => { 'child 3' => {}, # 'child 4' => {} } } } class TreeTwo attr_accessor :children, :node_name def initialize(current_hash) @children = [] current_hash.each_pair do |key, value| @node_name = key @children.push(Tree.new(value)) end end def visit_all(&block) visit block children.each { |c| c.visit_all block } end def visit yield self end end # Write a simple grep that will print the lines of a file having any occurrences # of a phrase anywhere in that line. You will need to do a simple regular # expression match and read lines from a file. (This is surprisingly simple in # Ruby.) If you want, include line numbers. # phrase = 'hi there' # currentLine = 0 # File.open('file_name_here.extension', 'r') do |f1| # while line = f1.gets # currentLine += 1 # puts line if line.match(phrase) # end # end
mit
mieky/we-love-cake
server.ts
245
const express = require("express"); const app = express(); const HTTP_PORT = process.env.HTTP_PORT || 3000; app.use(express.static(__dirname + "/build")); app.listen(HTTP_PORT, () => { console.log(`Listening to port ${HTTP_PORT}...`); });
mit
anzev/hedwig
run_tests.py
148
import unittest if __name__ == '__main__': suite = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(suite)
mit
kubal5003/LegoCar
Android/app/src/main/java/pl/earduino/rclimoble/Shapes/SteeringWheel.java
3283
package pl.earduino.rclimoble.Shapes; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLUtils; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import javax.microedition.khronos.opengles.GL10; import pl.earduino.rclimoble.R; public class SteeringWheel { private final FloatBuffer vertexBuffer; // private final ShortBuffer drawListBuffer; private int[] textures = new int[1]; // number of coordinates per vertex in this array private FloatBuffer textureBuffer; // buffer holding the texture coordinates private float texture[] = { // Mapping coordinates for the vertices 0.0f, 1.0f, // top left (V2) 0.0f, 0.0f, // bottom left (V1) 1.0f, 1.0f, // top right (V4) 1.0f, 0.0f // bottom right (V3) }; private float vertices[] = { -1.0f, -1.0f, 0.0f, // V1 - bottom left -1.0f, 1.0f, 0.0f, // V2 - top left 1.0f, -1.0f, 0.0f, // V3 - bottom right 1.0f, 1.0f, 0.0f // V4 - top right }; private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f }; /** * Sets up the drawing object data for use in an OpenGL ES context. */ public SteeringWheel() { ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); vertexBuffer = byteBuffer.asFloatBuffer(); vertexBuffer.put(vertices); vertexBuffer.position(0); byteBuffer = ByteBuffer.allocateDirect(texture.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); textureBuffer = byteBuffer.asFloatBuffer(); textureBuffer.put(texture); textureBuffer.position(0); } public void draw(GL10 gl) { gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); // Point to our buffers gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Set the face rotation gl.glFrontFace(GL10.GL_CW); // Point to our vertex buffer gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer); gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer); // Draw the vertices as triangle strip gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3); //Disable the client state before leaving gl.glDisableClientState(GL10.GL_VERTEX_ARRAY); gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY); } public void loadGLTexture(GL10 gl, Context context) { // loading texture Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.steering_wheel); gl.glGenTextures(1, textures, 0); gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST); gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0); bitmap.recycle(); } }
mit
lmarinov/Exercise-repo
Java_Advanced_2021/src/FunctionalProgramming/Lab/FilterByAge.java
2754
package FunctionalProgramming.Lab; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Scanner; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; public class FilterByAge { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); int n = Integer.parseInt(scanner.nextLine()); List<Person> people = new ArrayList<>(); Function<String, Person> parsePerson = str -> { String[] input = str.split(", "); return new Person(input[0], Integer.parseInt(input[1])); }; while (n-- > 0){ Person currentPerson = parsePerson.apply(scanner.nextLine()); people.add(currentPerson); } String condition = scanner.nextLine(); int ageCondition = Integer.parseInt(scanner.nextLine()); String format = scanner.nextLine(); people = condition.equals("older") ? Person.filterPeople(people, p -> p.age >= ageCondition) : Person.filterPeople(people, p -> p.age <= ageCondition); System.out.println(Person.formatPeople(people, getFormatter(format), System.lineSeparator())); } private static Function<Person, String> getFormatter(String format) { switch (format){ case "name": return p -> p.name; case "age": return p -> String.valueOf(p.age); case "name age": return p -> p.name + " - " + p.age; default: throw new IllegalStateException("Unknown format type " + format); } } private static class Person { private String name; private int age; public Person(String name, int age) { this.name = name; this.age = age; } public String getName() { return name; } public int getAge() { return age; } public void setName(String name) { this.name = name; } public void setAge(int age) { this.age = age; } public static List<Person> filterPeople (Collection<Person> people,Predicate<Person> predicate){ return people.stream() .filter(predicate) .collect(Collectors.toList()); } public static String formatPeople(Collection<Person> people, Function<Person, String> formatter, String delimiter){ return people.stream() .map(formatter) .collect(Collectors.joining(delimiter)); } } }
mit
mavoweb/mavo
src/backend.github.js
13964
(function($, $$) { let _ = Mavo.Backend.register(class Github extends Mavo.Backend { id = "Github" constructor (url, o) { super(url, o); this.permissions.on(["login", "read"]); this.login(true); } update (url, o) { super.update(url, o); // Extract info for username, repo, branch, filepath from URL let extension = this.format.constructor.extensions[0] || ".json"; this.defaults = { repo: "mv-data", filename: `${this.mavo.id}${extension}` }; this.info = _.parseURL(this.source, this.defaults); // If an author provided backend metadata, use them // since they have higher priority for (const prop in o) { // Skip the format and mavo properties // since they are already updated in the parent's update method if (["format", "mavo"].includes(prop)) { continue; } if (this.info.apiCall === "graphql" && prop === "query") { // It makes sense to set/update the apiData property only for calls with GraphQL. // Otherwise, it will break the Github#get method. this.info.apiData = { query: o.query }; continue; } this.info[prop] = o[prop]; } $.extend(this, this.info); } async get (url) { if (this.isAuthenticated() || !this.path || url) { // Authenticated or raw API call let info = url? _.parseURL(url) : this.info; if (info.apiData) { // GraphQL return this.request(info.apiCall, info.apiData, "POST") .then(response => { if (response.errors?.length) { return Promise.reject(response.errors.map(x => x.message).join("\n")); } return response.data; }); } let isRawAPICall = info.apiParams !== undefined; let responseType = isRawAPICall ? "response" : "json"; let req = { responseType, headers: { "Accept": "application/vnd.github.squirrel-girl-preview" } }; let response = await this.request(info.apiCall, {ref:this.branch}, "GET", req); if (isRawAPICall) { // Raw API call let json = await response.json(); let params = new URL(info.apiCall, this.constructor.apiDomain).searchParams; let maxPages = params.get("max_pages") - 1; /* subtract 1 because we already fetched a page */ if (maxPages > 0 && params.get("page") === null && Array.isArray(json)) { // Fetch more pages let next; do { next = response.headers.get("Link")?.match(/<(.+?)>; rel="next"/)?.[1]; if (next) { response = await this.request(next, {ref:this.branch}, "GET", req); if (response.ok) { let pageJSON = await response.json(); if (Array.isArray(pageJSON)) { json.push(...pageJSON); } else { break; } } else { break; } } else { break; } } while (--maxPages > 0); } return json; } else { if (info.repo && response.content) { // Fetching file contents return _.atob(response.content); } else { return response; } } } else { // Unauthenticated, use simple GET request to avoid rate limit url = new URL(`https://raw.githubusercontent.com/${this.username}/${this.repo}/${this.branch || "main"}/${this.path}`); url.searchParams.set("timestamp", Date.now()); // ensure fresh copy let response = await fetch(url.href); if (response.ok) { this.branch = this.branch || "main"; return response.text(); } else { if (response.status === 404 && !this.branch) { // Possibly using older default branch "master", try again and store branch name url.pathname = `/${this.username}/${this.repo}/master/${this.path}`; response = await fetch(url.href); if (response.ok) { this.branch = "master"; return response.text(); } } } return null; } } upload (file, path = this.path) { return Mavo.readFile(file).then(dataURL => { let base64 = dataURL.slice(5); // remove data: let media = base64.match(/^\w+\/[\w+]+/)[0]; media = media.replace("+", "\\+"); // Fix for #608 base64 = base64.replace(RegExp(`^${media}(;base64)?,`), ""); path = this.path.replace(/[^/]+$/, "") + path; // make upload path relative to existing path return this.put(base64, path, {isEncoded: true}); }) .then(fileInfo => this.getURL(path, fileInfo.commit.sha)); } /** * Saves a file to the backend. * @param {String} serialized - Serialized data * @param {String} path - Optional file path * @return {Promise} A promise that resolves when the file is saved. */ put (serialized, path = this.path, o = {}) { if (!path) { // Raw API calls are read-only for now return; } let repoCall = `repos/${this.username}/${this.repo}`; let fileCall = `${repoCall}/contents/${path}`; let commitPrefix = this.mavo.element.getAttribute("mv-github-commit-prefix") || ""; // Create repo if it doesn’t exist let repoInfo = this.repoInfo? Promise.resolve(this.repoInfo) : this.request("user/repos", {name: this.repo}, "POST").then(repoInfo => this.repoInfo = repoInfo); serialized = o.isEncoded? serialized : _.btoa(serialized); return repoInfo.then(repoInfo => { if (!this.canPush()) { // Does not have permission to commit, create a fork return this.request(`${repoCall}/forks`, {name: this.repo}, "POST") .then(forkInfo => { fileCall = `repos/${forkInfo.full_name}/contents/${path}`; return this.forkInfo = forkInfo; }) .then(forkInfo => { // Ensure that fork is created (they take a while) let timeout; let test = (resolve, reject) => { clearTimeout(timeout); this.request(`repos/${forkInfo.full_name}/commits`, {until: "1970-01-01T00:00:00Z"}, "HEAD") .then(x => { resolve(forkInfo); }) .catch(x => { // Try again after 1 second timeout = setTimeout(test, 1000); }); }; return new Promise(test); }); } return repoInfo; }) .then(repoInfo => { return this.request(fileCall, { ref: this.branch }).then(fileInfo => this.request(fileCall, { message: commitPrefix + this.mavo._("gh-updated-file", {name: fileInfo.name || "file"}), content: serialized, branch: this.branch, sha: fileInfo.sha }, "PUT"), xhr => { if (xhr.status == 404) { // File does not exist, create it return this.request(fileCall, { message: commitPrefix + "Created file", content: serialized, branch: this.branch }, "PUT"); } return xhr; }); }) .then(fileInfo => { const env = {context: this, fileInfo}; Mavo.hooks.run("gh-after-commit", env); return env.fileInfo; }); } login (passive) { return this.oAuthenticate(passive) .then(() => this.getUser()) .catch(xhr => { if (xhr.status == 401) { // Unauthorized. Access token we have is invalid, discard it this.logout(); } }) .then(u => { if (this.user) { this.permissions.on("logout"); if (this.info.path) { this.permissions.on(["edit", "save"]); } if (this.repo) { return this.request(`repos/${this.username}/${this.repo}`) .then(repoInfo => { if (this.branch === undefined) { this.branch = repoInfo.default_branch; } this.repoInfo = repoInfo; if (!this.mavo.source) { // if url doesn't have source, check for forks if (!this.canPush()) { // Check if current user has a fork of this repo, and display dialog to switch if (this.user.info.public_repos < repoInfo.forks) { // graphql search of current user's forks let query = `query { viewer { name repositories(last: 100, isFork: true) { nodes { url parent { nameWithOwner } } } } }`; return this.request("https://api.github.com/graphql", {query: query}, "POST") .then(data => { let repos = data.data.viewer.repositories.nodes; for (let i in repos) { if (repos[i].parent.nameWithOwner === repoInfo.full_name) { this.switchToMyForkDialog(repos[i].url); return repoInfo; } } return repoInfo; }); } else { // search forks of this repo return this.request(repoInfo.forks_url) .then(forks => { for (let i in forks) { if (forks[i].owner.login === this.user.username) { this.switchToMyForkDialog(forks[i].html_url); return repoInfo; } } return repoInfo; }); } } } return repoInfo; }).then(repoInfo => { const env = { context: this, repoInfo }; Mavo.hooks.run("gh-after-login", env); return env.repoInfo; }); } } }); } canPush () { if (this.repoInfo) { return this.repoInfo.permissions.push; } // Repo does not exist so we can't check permissions // Just check if authenticated user is the same as our URL username return this.user?.username?.toLowerCase() == this.username.toLowerCase(); } oAuthParams = () => "&scope=repo" logout () { return this.oAuthLogout().then(() => { this.user = null; }); } getUser () { if (this.user) { return Promise.resolve(this.user); } return this.request("user").then(info => { this.user = { username: info.login, name: info.name || info.login, avatar: info.avatar_url, url: "https://github.com/" + info.login, info }; $.fire(this, "mv-login"); }); } getURL (path = this.path, sha) { let repoInfo = this.forkInfo || this.repoInfo; let repo = repoInfo.full_name; path = path.replace(/ /g, "%20"); repoInfo.pagesInfo = repoInfo.pagesInfo || this.request(`repos/${repo}/pages`, {}, "GET", { headers: { "Accept": "application/vnd.github.mister-fantastic-preview+json" } }); return repoInfo.pagesInfo.then(pagesInfo => pagesInfo.html_url + path) .catch(xhr => { // No Github Pages, return jsdelivr URLs return `https://cdn.jsdelivr.net/gh/${repo}@${sha || this.branch || "latest"}/${path}`; }); } switchToMyForkDialog (forkURL) { let params = (new URL(location)).searchParams; params.append(`${this.mavo.id}-storage`, forkURL + "/" + this.path); this.notice = this.mavo.message(` ${this.mavo._("gh-login-fork-options")} <form onsubmit="return false"> <a href="${location.pathname}?${params}"><button>${this.mavo._("gh-use-my-fork")}</button></a> </form>`, { classes: "mv-inline", dismiss: ["button", "submit"] }); this.notice.closed.then(form => { if (!form) { return; } history.pushState({}, "", `${location.pathname}?${params}`); location.replace(`${location.pathname}?${params}`); }); return; } static apiDomain = "https://api.github.com/" static oAuth = "https://github.com/login/oauth/authorize" static key = "7e08e016048000bc594e" static test (url) { url = new URL(url, Mavo.base); return /^((api\.)?github\.com|raw\.githubusercontent\.com)/.test(url.host); } /** * Parse Github URLs, return username, repo, branch, path */ static parseURL (source, defaults = {}) { const ret = {}; // Define computed properties as writable accessors Object.defineProperties(ret, { "apiCall": { get() { let call = `repos/${this.username}/${this.repo}/${this.resources ?? "contents"}`; const path = this.path; if (path) { call += `/${path}`; } // Don't lose search params for raw API calls return call + (this.apiParams ?? ""); }, set (v) { delete this.apiCall; this.apiCall = v; }, configurable: true, enumerable: true }, "path": { get() { if (this.filename) { return (this.filepath? this.filepath + "/" : "") + this.filename; } else { return this.filepath; } }, set (v) { delete this.path; this.path = v; }, configurable: true, enumerable: true } }); const url = new URL(source, Mavo.base); let path = url.pathname.slice(1).split("/"); ret.username = path.shift(); ret.repo = path.shift() || defaults.repo; if (/raw.githubusercontent.com$/.test(url.host)) { ret.branch = path.shift(); } else if (/api.github.com$/.test(url.host)) { // Raw API call delete ret.username; delete ret.repo; ret.apiParams = url.search; ret.apiData = Mavo.Functions.from(source, "#"); // url.* drops line breaks const apiCall = url.pathname.slice(1) + ret.apiParams; if (apiCall == "graphql") { ret.apiCall = apiCall; ret.apiData = { query: ret.apiData }; return ret; } path = url.pathname.slice(1).split("/"); const firstSegment = path.shift(); if (firstSegment != "repos") { ret.apiCall = apiCall; return ret; } ret.username = path.shift(); ret.repo = path.shift(); ret.resources = path.shift(); } else if (path[0] == "blob") { path.shift(); ret.branch = path.shift(); } const lastSegment = path[path.length - 1]; if (/\.\w+$/.test(lastSegment)) { ret.filename = lastSegment; path.splice(path.length - 1, 1); } else { // If we work with a raw API call and couldn't find the filename in the path, // leave the filename blank ret.filename = ret.hasOwnProperty("apiParams")? "" : defaults.filename; } ret.filepath = path.join("/") || defaults.filepath || ""; return ret; } // Fix atob() and btoa() so they can handle Unicode static btoa = str => btoa(unescape(encodeURIComponent(str))) static atob = str => decodeURIComponent(escape(window.atob(str))) }); })(Bliss, Bliss.$);
mit
javanigus/zabuun
dialogue/0020-school-life-help-from-a-classmate.php
1530
<!doctype html> <html class="no-js" lang=""> <head> <title>Zabuun - Learn Arabic for English Speakers</title> <meta name="description" content=""> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/head.php';?> </head> <body> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/ie8.php';?> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/header.php';?> <div class="content"> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/side.php';?> <div class="main"> <div class="location"> <p class="expandcollapse"> <a href="">Modern Standard Arabic (MSA)</a> | <a href="">Egyptian Colloquial Arabic (ECA)</a> | <a href="">Both</a> </p> </div> <!-- begin dialogue --> <h1>School Life</h1> <h2>Help from a Classmate</h2> <p> <b>A:</b> Excuse me, aren't you in my math class?<br> <b>B:</b> Yes, I think I am.<br> <b>A:</b> Could I please borrow your notes?<br> <b>B:</b> Sure, but please hurry with them.<br> <b>A:</b> I will. I only want to look over them.<br> <b>B:</b> We went over some new material yesterday.<br> <b>A:</b> Was it very difficult?<br> <b>B:</b> It was not too different from what we've learned.<br> <b>A:</b> Did the teacher mention the test?<br> <b>B:</b> The test will be next week.<br> <b>A:</b> Thank you for your help!<br> <b>B:</b> Sure, good luck studying. </p> <!-- end dialogue --> </div> </div> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/footer.php';?> <?php include $_SERVER['DOCUMENT_ROOT'].'/layout/scripts.php';?> </body> </html>
mit
alberthw/ruby
models/models.go
942
package models import ( "github.com/astaxie/beego" "github.com/astaxie/beego/orm" _ "github.com/lib/pq" _ "github.com/mattn/go-sqlite3" ) func init() { initSqlite() //initPostgresql() orm.RegisterModel(new(Rubyconfig), new(Command), new(Reposetting), new(Filerepo), new(Devicesystemconfig), new(Devicehardwareconfig), new(Devicesoftwareconfig), new(DeviceLog)) createTables() } func initSqlite() { beego.Info("sqlite") orm.Debug = false orm.RegisterDriver("sqlite3", orm.DRSqlite) orm.RegisterDataBase("default", "sqlite3", "data.db", 30) } func initPostgresql() { beego.Info("Postgresql") orm.Debug = false orm.RegisterDriver("postgres", orm.DRPostgres) connstr := "user=postgres password=123456 dbname=ruby sslmode=disable" orm.RegisterDataBase("default", "postgres", connstr) } func createTables() error { name := "default" force := false verbose := true err := orm.RunSyncdb(name, force, verbose) return err }
mit
mdzurenko/just-gestures
JustGestures/ControlItems/UC_TP_doubleBtn.Designer.cs
9264
namespace JustGestures.ControlItems { partial class UC_TP_doubleBtn { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.rTB_notes = new System.Windows.Forms.RichTextBox(); this.lbl_executeBtn = new System.Windows.Forms.Label(); this.lbl_holdBtn = new System.Windows.Forms.Label(); this.cB_executeBtn = new System.Windows.Forms.ComboBox(); this.cB_holdBtn = new System.Windows.Forms.ComboBox(); this.panel1 = new System.Windows.Forms.Panel(); this.rTB_clickExecuteBtn = new System.Windows.Forms.RichTextBox(); this.rTB_pushHoldBtn = new System.Windows.Forms.RichTextBox(); this.lV_buttonMatchedGestures = new System.Windows.Forms.ListView(); this.cH_associatedActions = new System.Windows.Forms.ColumnHeader(); this.cH_group = new System.Windows.Forms.ColumnHeader(); this.panel1.SuspendLayout(); this.SuspendLayout(); // // rTB_notes // this.rTB_notes.BackColor = System.Drawing.Color.White; this.rTB_notes.BorderStyle = System.Windows.Forms.BorderStyle.None; this.rTB_notes.Location = new System.Drawing.Point(15, 154); this.rTB_notes.Name = "rTB_notes"; this.rTB_notes.ReadOnly = true; this.rTB_notes.Size = new System.Drawing.Size(168, 78); this.rTB_notes.TabIndex = 7; this.rTB_notes.Text = "Note:\n- Hold down the Modifier to show notification \n- Release the Trigger first " + "to cancel the action\n"; // // lbl_executeBtn // this.lbl_executeBtn.AutoSize = true; this.lbl_executeBtn.Location = new System.Drawing.Point(12, 82); this.lbl_executeBtn.Name = "lbl_executeBtn"; this.lbl_executeBtn.Size = new System.Drawing.Size(78, 13); this.lbl_executeBtn.TabIndex = 3; this.lbl_executeBtn.Text = "Modifier Button"; // // lbl_holdBtn // this.lbl_holdBtn.AutoSize = true; this.lbl_holdBtn.Location = new System.Drawing.Point(12, 7); this.lbl_holdBtn.Name = "lbl_holdBtn"; this.lbl_holdBtn.Size = new System.Drawing.Size(74, 13); this.lbl_holdBtn.TabIndex = 2; this.lbl_holdBtn.Text = "Trigger Button"; // // cB_executeBtn // this.cB_executeBtn.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cB_executeBtn.FormattingEnabled = true; this.cB_executeBtn.Location = new System.Drawing.Point(15, 98); this.cB_executeBtn.Margin = new System.Windows.Forms.Padding(0); this.cB_executeBtn.Name = "cB_executeBtn"; this.cB_executeBtn.Size = new System.Drawing.Size(154, 21); this.cB_executeBtn.TabIndex = 1; this.cB_executeBtn.SelectedIndexChanged += new System.EventHandler(this.comboBox_SelectedIndexChanged); // // cB_holdBtn // this.cB_holdBtn.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cB_holdBtn.FormattingEnabled = true; this.cB_holdBtn.Location = new System.Drawing.Point(15, 23); this.cB_holdBtn.Name = "cB_holdBtn"; this.cB_holdBtn.Size = new System.Drawing.Size(154, 21); this.cB_holdBtn.TabIndex = 0; this.cB_holdBtn.SelectedIndexChanged += new System.EventHandler(this.comboBox_SelectedIndexChanged); // // panel1 // this.panel1.BackColor = System.Drawing.Color.White; this.panel1.Controls.Add(this.rTB_clickExecuteBtn); this.panel1.Controls.Add(this.rTB_pushHoldBtn); this.panel1.Controls.Add(this.lbl_holdBtn); this.panel1.Controls.Add(this.cB_holdBtn); this.panel1.Controls.Add(this.cB_executeBtn); this.panel1.Controls.Add(this.rTB_notes); this.panel1.Controls.Add(this.lbl_executeBtn); this.panel1.Dock = System.Windows.Forms.DockStyle.Top; this.panel1.Location = new System.Drawing.Point(0, 0); this.panel1.Name = "panel1"; this.panel1.Size = new System.Drawing.Size(195, 238); this.panel1.TabIndex = 9; // // rTB_clickExecuteBtn // this.rTB_clickExecuteBtn.BackColor = System.Drawing.Color.White; this.rTB_clickExecuteBtn.BorderStyle = System.Windows.Forms.BorderStyle.None; this.rTB_clickExecuteBtn.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.rTB_clickExecuteBtn.Location = new System.Drawing.Point(15, 122); this.rTB_clickExecuteBtn.Name = "rTB_clickExecuteBtn"; this.rTB_clickExecuteBtn.ReadOnly = true; this.rTB_clickExecuteBtn.Size = new System.Drawing.Size(168, 30); this.rTB_clickExecuteBtn.TabIndex = 14; this.rTB_clickExecuteBtn.Text = "2.) Click to invoke the action"; // // rTB_pushHoldBtn // this.rTB_pushHoldBtn.BackColor = System.Drawing.Color.White; this.rTB_pushHoldBtn.BorderStyle = System.Windows.Forms.BorderStyle.None; this.rTB_pushHoldBtn.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.rTB_pushHoldBtn.Location = new System.Drawing.Point(15, 48); this.rTB_pushHoldBtn.Name = "rTB_pushHoldBtn"; this.rTB_pushHoldBtn.ReadOnly = true; this.rTB_pushHoldBtn.Size = new System.Drawing.Size(168, 30); this.rTB_pushHoldBtn.TabIndex = 13; this.rTB_pushHoldBtn.Text = "1.) Push and hold this button"; // // lV_buttonMatchedGestures // this.lV_buttonMatchedGestures.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.cH_associatedActions, this.cH_group}); this.lV_buttonMatchedGestures.Dock = System.Windows.Forms.DockStyle.Fill; this.lV_buttonMatchedGestures.Location = new System.Drawing.Point(0, 238); this.lV_buttonMatchedGestures.Name = "lV_buttonMatchedGestures"; this.lV_buttonMatchedGestures.Size = new System.Drawing.Size(195, 155); this.lV_buttonMatchedGestures.TabIndex = 10; this.lV_buttonMatchedGestures.UseCompatibleStateImageBehavior = false; this.lV_buttonMatchedGestures.View = System.Windows.Forms.View.Details; // // cH_associatedActions // this.cH_associatedActions.Text = "Associated Actions"; this.cH_associatedActions.Width = 113; // // cH_group // this.cH_group.Text = "Group"; this.cH_group.Width = 72; // // UC_TP_doubleBtn // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.Controls.Add(this.lV_buttonMatchedGestures); this.Controls.Add(this.panel1); this.Name = "UC_TP_doubleBtn"; this.Size = new System.Drawing.Size(195, 393); this.panel1.ResumeLayout(false); this.panel1.PerformLayout(); this.ResumeLayout(false); } #endregion private System.Windows.Forms.RichTextBox rTB_notes; private System.Windows.Forms.Label lbl_executeBtn; private System.Windows.Forms.Label lbl_holdBtn; private System.Windows.Forms.ComboBox cB_executeBtn; private System.Windows.Forms.ComboBox cB_holdBtn; private System.Windows.Forms.Panel panel1; private System.Windows.Forms.ListView lV_buttonMatchedGestures; private System.Windows.Forms.ColumnHeader cH_associatedActions; private System.Windows.Forms.ColumnHeader cH_group; private System.Windows.Forms.RichTextBox rTB_clickExecuteBtn; private System.Windows.Forms.RichTextBox rTB_pushHoldBtn; } }
mit
rigregs/ez-rules
ez-rules-core/src/test/java/com/opnitech/rules/core/test/engine/test_workflow/exchanges/Exchange2.java
421
package com.opnitech.rules.core.test.engine.test_workflow.exchanges; /** * @author Rigre Gregorio Garciandia Sonora */ public class Exchange2 { private String value; public Exchange2(String value) { this.value = value; // Default constructor } public String getValue() { return this.value; } public void setValue(String value) { this.value = value; } }
mit
nithinvnath/PAVProject
com.ibm.wala.core/src/com/ibm/wala/util/ref/CacheReference.java
1898
/******************************************************************************* * Copyright (c) 2002 - 2006 IBM Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.wala.util.ref; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import com.ibm.wala.util.debug.Assertions; /** * A factory for References ... useful for debugging. */ public final class CacheReference { private final static byte SOFT = 0; private final static byte WEAK = 1; private final static byte HARD = 2; // should be SOFT except during debugging. private final static byte choice = SOFT; public final static Object make(final Object referent) { switch (choice) { case SOFT: return new SoftReference<Object>(referent); case WEAK: return new WeakReference<Object>(referent); case HARD: return referent; default: Assertions.UNREACHABLE(); return null; } } public final static Object get(final Object reference) throws IllegalArgumentException { if (reference == null) { return null; } switch (choice) { case SOFT: if (!(reference instanceof java.lang.ref.SoftReference)) { throw new IllegalArgumentException("not ( reference instanceof java.lang.ref.SoftReference ) "); } return ((SoftReference) reference).get(); case WEAK: return ((WeakReference) reference).get(); case HARD: return reference; default: Assertions.UNREACHABLE(); return null; } } }
mit
ShaneStevenLei/laravel5-file-uploader
src/config/resumable/resumable.js
37330
/* * MIT Licensed * http://www.23developer.com/opensource * http://github.com/23/resumable.js * Steffen Tiedemann Christensen, [email protected] */ (function(){ "use strict"; var Resumable = function(opts){ if ( !(this instanceof Resumable) ) { return new Resumable(opts); } this.version = 1.0; // SUPPORTED BY BROWSER? // Check if these features are support by the browser: // - File object type // - Blob object type // - FileList object type // - slicing files this.support = ( (typeof(File)!=='undefined') && (typeof(Blob)!=='undefined') && (typeof(FileList)!=='undefined') && (!!Blob.prototype.webkitSlice||!!Blob.prototype.mozSlice||!!Blob.prototype.slice||false) ); if(!this.support) return(false); // PROPERTIES var $ = this; $.files = []; $.defaults = { chunkSize:1*1024*1024, forceChunkSize:false, simultaneousUploads:3, fileParameterName:'file', throttleProgressCallbacks:0.5, query:{}, headers:{}, preprocess:null, method:'multipart', uploadMethod: 'POST', testMethod: 'GET', prioritizeFirstAndLastChunk:false, target:'/', parameterNamespace:'', testChunks:true, generateUniqueIdentifier:null, getTarget:null, maxChunkRetries:undefined, chunkRetryInterval:undefined, permanentErrors:[400, 404, 415, 500, 501], maxFiles:undefined, withCredentials:false, xhrTimeout:0, maxFilesErrorCallback:function (files, errorCount) { var maxFiles = $.getOpt('maxFiles'); alert('Please upload ' + maxFiles + ' file' + (maxFiles === 1 ? '' : 's') + ' at a time.'); }, minFileSize:1, minFileSizeErrorCallback:function(file, errorCount) { alert(file.fileName||file.name +' is too small, please upload files larger than ' + $h.formatSize($.getOpt('minFileSize')) + '.'); }, maxFileSize:undefined, maxFileSizeErrorCallback:function(file, errorCount) { alert(file.fileName||file.name +' is too large, please upload files less than ' + $h.formatSize($.getOpt('maxFileSize')) + '.'); }, fileType: [], fileTypeErrorCallback: function(file, errorCount) { alert(file.fileName||file.name +' has type not allowed, please upload files of type ' + $.getOpt('fileType') + '.'); } }; $.opts = opts||{}; $.getOpt = function(o) { var $opt = this; // Get multiple option if passed an array if(o instanceof Array) { var options = {}; $h.each(o, function(option){ options[option] = $opt.getOpt(option); }); return options; } // Otherwise, just return a simple option if ($opt instanceof ResumableChunk) { if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; } else { $opt = $opt.fileObj; } } if ($opt instanceof ResumableFile) { if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; } else { $opt = $opt.resumableObj; } } if ($opt instanceof Resumable) { if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; } else { return $opt.defaults[o]; } } }; // EVENTS // catchAll(event, ...) // fileSuccess(file), fileProgress(file), fileAdded(file, event), fileRetry(file), fileError(file, message), // complete(), progress(), error(message, file), pause() $.events = []; $.on = function(event,callback){ $.events.push(event.toLowerCase(), callback); }; $.fire = function(){ // `arguments` is an object, not array, in FF, so: var args = []; for (var i=0; i<arguments.length; i++) args.push(arguments[i]); // Find event listeners, and support pseudo-event `catchAll` var event = args[0].toLowerCase(); for (var i=0; i<=$.events.length; i+=2) { if($.events[i]==event) $.events[i+1].apply($,args.slice(1)); if($.events[i]=='catchall') $.events[i+1].apply(null,args); } if(event=='fileerror') $.fire('error', args[2], args[1]); if(event=='fileprogress') $.fire('progress'); }; // INTERNAL HELPER METHODS (handy, but ultimately not part of uploading) var $h = { stopEvent: function(e){ e.stopPropagation(); e.preventDefault(); }, each: function(o,callback){ if(typeof(o.length)!=='undefined') { for (var i=0; i<o.length; i++) { // Array or FileList if(callback(o[i])===false) return; } } else { for (i in o) { // Object if(callback(i,o[i])===false) return; } } }, generateUniqueIdentifier:function(file){ var custom = $.getOpt('generateUniqueIdentifier'); if(typeof custom === 'function') { return custom(file); } var relativePath = file.webkitRelativePath||file.fileName||file.name; // Some confusion in different versions of Firefox var size = file.size; return(size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, '')); }, contains:function(array,test) { var result = false; $h.each(array, function(value) { if (value == test) { result = true; return false; } return true; }); return result; }, formatSize:function(size){ if(size<1024) { return size + ' bytes'; } else if(size<1024*1024) { return (size/1024.0).toFixed(0) + ' KB'; } else if(size<1024*1024*1024) { return (size/1024.0/1024.0).toFixed(1) + ' MB'; } else { return (size/1024.0/1024.0/1024.0).toFixed(1) + ' GB'; } }, getTarget:function(params){ var target = $.getOpt('target'); if(typeof target === 'function') { return target(params); } if(target.indexOf('?') < 0) { target += '?'; } else { target += '&'; } return target + params.join('&'); } }; var onDrop = function(event){ $h.stopEvent(event); //handle dropped things as items if we can (this lets us deal with folders nicer in some cases) if (event.dataTransfer && event.dataTransfer.items) { loadFiles(event.dataTransfer.items, event); } //else handle them as files else if (event.dataTransfer && event.dataTransfer.files) { loadFiles(event.dataTransfer.files, event); } }; var preventDefault = function(e) { e.preventDefault(); }; // INTERNAL METHODS (both handy and responsible for the heavy load) /** * @summary This function loops over the files passed in from a drag and drop operation and gets them ready for appendFilesFromFileList * It attempts to use FileSystem API calls to extract files and subfolders if the dropped items include folders * That capability is only currently available in Chrome, but if it isn't available it will just pass the items along to * appendFilesFromFileList (via enqueueFileAddition to help with asynchronous processing.) * @param files {Array} - the File or Entry objects to be processed depending on your browser support * @param event {Object} - the drop event object * @param [queue] {Object} - an object to keep track of our progress processing the dropped items * @param [path] {String} - the relative path from the originally selected folder to the current files if extracting files from subfolders */ var loadFiles = function (files, event, queue, path){ //initialize the queue object if it doesn't exist if (!queue) { queue = { total: 0, files: [], event: event }; } //update the total number of things we plan to process updateQueueTotal(files.length, queue); //loop over all the passed in objects checking if they are files or folders for (var i = 0; i < files.length; i++) { var file = files[i]; var entry, reader; if (file.isFile || file.isDirectory) { //this is an object we can handle below with no extra work needed up front entry = file; } else if (file.getAsEntry) { //get the file as an entry object if we can using the proposed HTML5 api (unlikely to get implemented by anyone) entry = file.getAsEntry(); } else if (file.webkitGetAsEntry) { //get the file as an entry object if we can using the Chrome specific webkit implementation entry = file.webkitGetAsEntry(); } else if (typeof file.getAsFile === 'function') { //if this is still a DataTransferItem object, get it as a file object enqueueFileAddition(file.getAsFile(), queue, path); //we just added this file object to the queue so we can go to the next object in the loop and skip the processing below continue; } else if (File && file instanceof File) { //this is already a file object so just queue it up and move on enqueueFileAddition(file, queue, path); //we just added this file object to the queue so we can go to the next object in the loop and skip the processing below continue; } else { //we can't do anything with this object, decrement the expected total and skip the processing below updateQueueTotal(-1, queue); continue; } if (!entry) { //there isn't anything we can do with this so decrement the total expected updateQueueTotal(-1, queue); } else if (entry.isFile) { //this is handling to read an entry object representing a file, parsing the file object is asynchronous which is why we need the queue //currently entry objects will only exist in this flow for Chrome entry.file(function(file) { enqueueFileAddition(file, queue, path); }, function(err) { console.warn(err); }); } else if (entry.isDirectory) { //this is handling to read an entry object representing a folder, parsing the directory object is asynchronous which is why we need the queue //currently entry objects will only exist in this flow for Chrome reader = entry.createReader(); var newEntries = []; //wrap the callback in another function so we can store the path in a closure var readDir = function(path){ reader.readEntries( //success callback: read entries out of the directory function(entries){ if (entries.length>0){ //add these results to the array of all the new stuff for (var i=0; i<entries.length; i++) { newEntries.push(entries[i]); } //call this function again as all the results may not have been sent yet readDir(entry.fullPath); } else { //we have now gotten all the results in newEntries so let's process them recursively loadFiles(newEntries, event, queue, path); //this was a directory rather than a file so decrement the expected file count updateQueueTotal(-1, queue); } }, //error callback, most often hit if there is a directory with nothing inside it function(err) { //this was a directory rather than a file so decrement the expected file count updateQueueTotal(-1, queue); console.warn(err); } ); }; readDir(entry.fullPath); } } }; /** * @summary Adjust the total number of files we are expecting to process * if decrementing and the new expected total is equal to the number processed, flush the queue * @param addition {Number} - the number of additional files we expect to process (may be negative) * @param queue {Object} - an object to keep track of our progress processing the dropped items */ var updateQueueTotal = function(addition, queue){ queue.total += addition; // If all the files we expect have shown up, then flush the queue. if (queue.files.length === queue.total) { appendFilesFromFileList(queue.files, queue.event); } }; /** * @summary Add a file to the queue of processed files, if it brings the total up to the expected total, flush the queue * @param file {Object} - File object to be passed along to appendFilesFromFileList eventually * @param queue {Object} - an object to keep track of our progress processing the dropped items * @param [path] {String} - the file's relative path from the originally dropped folder if we are parsing folder content (Chrome only for now) */ var enqueueFileAddition = function(file, queue, path) { //store the path to this file if it came in as part of a folder if (path) file.relativePath = path + '/' + file.name; queue.files.push(file); // If all the files we expect have shown up, then flush the queue. if (queue.files.length === queue.total) { appendFilesFromFileList(queue.files, queue.event); } }; var appendFilesFromFileList = function(fileList, event){ // check for uploading too many files var errorCount = 0; var o = $.getOpt(['maxFiles', 'minFileSize', 'maxFileSize', 'maxFilesErrorCallback', 'minFileSizeErrorCallback', 'maxFileSizeErrorCallback', 'fileType', 'fileTypeErrorCallback']); if (typeof(o.maxFiles)!=='undefined' && o.maxFiles<(fileList.length+$.files.length)) { // if single-file upload, file is already added, and trying to add 1 new file, simply replace the already-added file if (o.maxFiles===1 && $.files.length===1 && fileList.length===1) { $.removeFile($.files[0]); } else { o.maxFilesErrorCallback(fileList, errorCount++); return false; } } var files = []; $h.each(fileList, function(file){ var fileName = file.name; if(o.fileType.length > 0){ var fileTypeFound = false; for(var index in o.fileType){ var extension = '.' + o.fileType[index]; if(fileName.indexOf(extension, fileName.length - extension.length) !== -1){ fileTypeFound = true; break; } } if (!fileTypeFound) { o.fileTypeErrorCallback(file, errorCount++); return false; } } if (typeof(o.minFileSize)!=='undefined' && file.size<o.minFileSize) { o.minFileSizeErrorCallback(file, errorCount++); return false; } if (typeof(o.maxFileSize)!=='undefined' && file.size>o.maxFileSize) { o.maxFileSizeErrorCallback(file, errorCount++); return false; } function addFile(uniqueIdentifier){ if (!$.getFromUniqueIdentifier(uniqueIdentifier)) {(function(){ file.uniqueIdentifier = uniqueIdentifier; var f = new ResumableFile($, file, uniqueIdentifier); $.files.push(f); files.push(f); f.container = (typeof event != 'undefined' ? event.srcElement : null); window.setTimeout(function(){ $.fire('fileAdded', f, event) },0); })()}; } // directories have size == 0 var uniqueIdentifier = $h.generateUniqueIdentifier(file) if(uniqueIdentifier && typeof uniqueIdentifier.done === 'function' && typeof uniqueIdentifier.fail === 'function'){ uniqueIdentifier .done(function(uniqueIdentifier){ addFile(uniqueIdentifier); }) .fail(function(){ addFile(); }); }else{ addFile(uniqueIdentifier); } }); window.setTimeout(function(){ $.fire('filesAdded', files) },0); }; // INTERNAL OBJECT TYPES function ResumableFile(resumableObj, file, uniqueIdentifier){ var $ = this; $.opts = {}; $.getOpt = resumableObj.getOpt; $._prevProgress = 0; $.resumableObj = resumableObj; $.file = file; $.fileName = file.fileName||file.name; // Some confusion in different versions of Firefox $.size = file.size; $.relativePath = file.webkitRelativePath || file.relativePath || $.fileName; $.uniqueIdentifier = uniqueIdentifier; $._pause = false; $.container = ''; var _error = uniqueIdentifier !== undefined; // Callback when something happens within the chunk var chunkEvent = function(event, message){ // event can be 'progress', 'success', 'error' or 'retry' switch(event){ case 'progress': $.resumableObj.fire('fileProgress', $); break; case 'error': $.abort(); _error = true; $.chunks = []; $.resumableObj.fire('fileError', $, message); break; case 'success': if(_error) return; $.resumableObj.fire('fileProgress', $); // it's at least progress if($.isComplete()) { $.resumableObj.fire('fileSuccess', $, message); } break; case 'retry': $.resumableObj.fire('fileRetry', $); break; } }; // Main code to set up a file object with chunks, // packaged to be able to handle retries if needed. $.chunks = []; $.abort = function(){ // Stop current uploads var abortCount = 0; $h.each($.chunks, function(c){ if(c.status()=='uploading') { c.abort(); abortCount++; } }); if(abortCount>0) $.resumableObj.fire('fileProgress', $); }; $.cancel = function(){ // Reset this file to be void var _chunks = $.chunks; $.chunks = []; // Stop current uploads $h.each(_chunks, function(c){ if(c.status()=='uploading') { c.abort(); $.resumableObj.uploadNextChunk(); } }); $.resumableObj.removeFile($); $.resumableObj.fire('fileProgress', $); }; $.retry = function(){ $.bootstrap(); var firedRetry = false; $.resumableObj.on('chunkingComplete', function(){ if(!firedRetry) $.resumableObj.upload(); firedRetry = true; }); }; $.bootstrap = function(){ $.abort(); _error = false; // Rebuild stack of chunks from file $.chunks = []; $._prevProgress = 0; var round = $.getOpt('forceChunkSize') ? Math.ceil : Math.floor; var maxOffset = Math.max(round($.file.size/$.getOpt('chunkSize')),1); for (var offset=0; offset<maxOffset; offset++) {(function(offset){ window.setTimeout(function(){ $.chunks.push(new ResumableChunk($.resumableObj, $, offset, chunkEvent)); $.resumableObj.fire('chunkingProgress',$,offset/maxOffset); },0); })(offset)} window.setTimeout(function(){ $.resumableObj.fire('chunkingComplete',$); },0); }; $.progress = function(){ if(_error) return(1); // Sum up progress across everything var ret = 0; var error = false; $h.each($.chunks, function(c){ if(c.status()=='error') error = true; ret += c.progress(true); // get chunk progress relative to entire file }); ret = (error ? 1 : (ret>0.99999 ? 1 : ret)); ret = Math.max($._prevProgress, ret); // We don't want to lose percentages when an upload is paused $._prevProgress = ret; return(ret); }; $.isUploading = function(){ var uploading = false; $h.each($.chunks, function(chunk){ if(chunk.status()=='uploading') { uploading = true; return(false); } }); return(uploading); }; $.isComplete = function(){ var outstanding = false; $h.each($.chunks, function(chunk){ var status = chunk.status(); if(status=='pending' || status=='uploading' || chunk.preprocessState === 1) { outstanding = true; return(false); } }); return(!outstanding); }; $.pause = function(pause){ if(typeof(pause)==='undefined'){ $._pause = ($._pause ? false : true); }else{ $._pause = pause; } }; $.isPaused = function() { return $._pause; }; // Bootstrap and return $.resumableObj.fire('chunkingStart', $); $.bootstrap(); return(this); } function ResumableChunk(resumableObj, fileObj, offset, callback){ var $ = this; $.opts = {}; $.getOpt = resumableObj.getOpt; $.resumableObj = resumableObj; $.fileObj = fileObj; $.fileObjSize = fileObj.size; $.fileObjType = fileObj.file.type; $.offset = offset; $.callback = callback; $.lastProgressCallback = (new Date); $.tested = false; $.retries = 0; $.pendingRetry = false; $.preprocessState = 0; // 0 = unprocessed, 1 = processing, 2 = finished // Computed properties var chunkSize = $.getOpt('chunkSize'); $.loaded = 0; $.startByte = $.offset*chunkSize; $.endByte = Math.min($.fileObjSize, ($.offset+1)*chunkSize); if ($.fileObjSize-$.endByte < chunkSize && !$.getOpt('forceChunkSize')) { // The last chunk will be bigger than the chunk size, but less than 2*chunkSize $.endByte = $.fileObjSize; } $.xhr = null; // test() makes a GET request without any data to see if the chunk has already been uploaded in a previous session $.test = function(){ // Set up request and listen for event $.xhr = new XMLHttpRequest(); var testHandler = function(e){ $.tested = true; var status = $.status(); if(status=='success') { $.callback(status, $.message()); $.resumableObj.uploadNextChunk(); } else { $.send(); } }; $.xhr.addEventListener('load', testHandler, false); $.xhr.addEventListener('error', testHandler, false); $.xhr.addEventListener('timeout', testHandler, false); // Add data from the query options var params = []; var parameterNamespace = $.getOpt('parameterNamespace'); var customQuery = $.getOpt('query'); if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $); $h.each(customQuery, function(k,v){ params.push([encodeURIComponent(parameterNamespace+k), encodeURIComponent(v)].join('=')); }); // Add extra data to identify chunk params.push([parameterNamespace+'resumableChunkNumber', encodeURIComponent($.offset+1)].join('=')); params.push([parameterNamespace+'resumableChunkSize', encodeURIComponent($.getOpt('chunkSize'))].join('=')); params.push([parameterNamespace+'resumableCurrentChunkSize', encodeURIComponent($.endByte - $.startByte)].join('=')); params.push([parameterNamespace+'resumableTotalSize', encodeURIComponent($.fileObjSize)].join('=')); params.push([parameterNamespace+'resumableType', encodeURIComponent($.fileObjType)].join('=')); params.push([parameterNamespace+'resumableIdentifier', encodeURIComponent($.fileObj.uniqueIdentifier)].join('=')); params.push([parameterNamespace+'resumableFilename', encodeURIComponent($.fileObj.fileName)].join('=')); params.push([parameterNamespace+'resumableRelativePath', encodeURIComponent($.fileObj.relativePath)].join('=')); params.push([parameterNamespace+'resumableTotalChunks', encodeURIComponent($.fileObj.chunks.length)].join('=')); // Append the relevant chunk and send it $.xhr.open($.getOpt('testMethod'), $h.getTarget(params)); $.xhr.timeout = $.getOpt('xhrTimeout'); $.xhr.withCredentials = $.getOpt('withCredentials'); // Add data from header options $h.each($.getOpt('headers'), function(k,v) { $.xhr.setRequestHeader(k, v); }); $.xhr.send(null); }; $.preprocessFinished = function(){ $.preprocessState = 2; $.send(); }; // send() uploads the actual data in a POST call $.send = function(){ var preprocess = $.getOpt('preprocess'); if(typeof preprocess === 'function') { switch($.preprocessState) { case 0: $.preprocessState = 1; preprocess($); return; case 1: return; case 2: break; } } if($.getOpt('testChunks') && !$.tested) { $.test(); return; } // Set up request and listen for event $.xhr = new XMLHttpRequest(); // Progress $.xhr.upload.addEventListener('progress', function(e){ if( (new Date) - $.lastProgressCallback > $.getOpt('throttleProgressCallbacks') * 1000 ) { $.callback('progress'); $.lastProgressCallback = (new Date); } $.loaded=e.loaded||0; }, false); $.loaded = 0; $.pendingRetry = false; $.callback('progress'); // Done (either done, failed or retry) var doneHandler = function(e){ var status = $.status(); if(status=='success'||status=='error') { $.callback(status, $.message()); $.resumableObj.uploadNextChunk(); } else { $.callback('retry', $.message()); $.abort(); $.retries++; var retryInterval = $.getOpt('chunkRetryInterval'); if(retryInterval !== undefined) { $.pendingRetry = true; setTimeout($.send, retryInterval); } else { $.send(); } } }; $.xhr.addEventListener('load', doneHandler, false); $.xhr.addEventListener('error', doneHandler, false); $.xhr.addEventListener('timeout', doneHandler, false); // Set up the basic query data from Resumable var query = { resumableChunkNumber: $.offset+1, resumableChunkSize: $.getOpt('chunkSize'), resumableCurrentChunkSize: $.endByte - $.startByte, resumableTotalSize: $.fileObjSize, resumableType: $.fileObjType, resumableIdentifier: $.fileObj.uniqueIdentifier, resumableFilename: $.fileObj.fileName, resumableRelativePath: $.fileObj.relativePath, resumableTotalChunks: $.fileObj.chunks.length }; // Mix in custom data var customQuery = $.getOpt('query'); if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $); $h.each(customQuery, function(k,v){ query[k] = v; }); var func = ($.fileObj.file.slice ? 'slice' : ($.fileObj.file.mozSlice ? 'mozSlice' : ($.fileObj.file.webkitSlice ? 'webkitSlice' : 'slice'))), bytes = $.fileObj.file[func]($.startByte,$.endByte), data = null, target = $.getOpt('target'); var parameterNamespace = $.getOpt('parameterNamespace'); if ($.getOpt('method') === 'octet') { // Add data from the query options data = bytes; var params = []; $h.each(query, function(k,v){ params.push([encodeURIComponent(parameterNamespace+k), encodeURIComponent(v)].join('=')); }); target = $h.getTarget(params); } else { // Add data from the query options data = new FormData(); $h.each(query, function(k,v){ data.append(parameterNamespace+k,v); }); data.append(parameterNamespace+$.getOpt('fileParameterName'), bytes); } var method = $.getOpt('uploadMethod'); $.xhr.open(method, target); if ($.getOpt('method') === 'octet') { $.xhr.setRequestHeader('Content-Type', 'binary/octet-stream'); } $.xhr.timeout = $.getOpt('xhrTimeout'); $.xhr.withCredentials = $.getOpt('withCredentials'); // Add data from header options $h.each($.getOpt('headers'), function(k,v) { $.xhr.setRequestHeader(k, v); }); $.xhr.send(data); }; $.abort = function(){ // Abort and reset if($.xhr) $.xhr.abort(); $.xhr = null; }; $.status = function(){ // Returns: 'pending', 'uploading', 'success', 'error' if($.pendingRetry) { // if pending retry then that's effectively the same as actively uploading, // there might just be a slight delay before the retry starts return('uploading'); } else if(!$.xhr) { return('pending'); } else if($.xhr.readyState<4) { // Status is really 'OPENED', 'HEADERS_RECEIVED' or 'LOADING' - meaning that stuff is happening return('uploading'); } else { if($.xhr.status == 200 || $.xhr.status == 201) { // HTTP 200 or 201 (created) perfect return('success'); } else if($h.contains($.getOpt('permanentErrors'), $.xhr.status) || $.retries >= $.getOpt('maxChunkRetries')) { // HTTP 415/500/501, permanent error return('error'); } else { // this should never happen, but we'll reset and queue a retry // a likely case for this would be 503 service unavailable $.abort(); return('pending'); } } }; $.message = function(){ return($.xhr ? $.xhr.responseText : ''); }; $.progress = function(relative){ if(typeof(relative)==='undefined') relative = false; var factor = (relative ? ($.endByte-$.startByte)/$.fileObjSize : 1); if($.pendingRetry) return(0); var s = $.status(); switch(s){ case 'success': case 'error': return(1*factor); case 'pending': return(0*factor); default: return($.loaded/($.endByte-$.startByte)*factor); } }; return(this); } // QUEUE $.uploadNextChunk = function(){ var found = false; // In some cases (such as videos) it's really handy to upload the first // and last chunk of a file quickly; this let's the server check the file's // metadata and determine if there's even a point in continuing. if ($.getOpt('prioritizeFirstAndLastChunk')) { $h.each($.files, function(file){ if(file.chunks.length && file.chunks[0].status()=='pending' && file.chunks[0].preprocessState === 0) { file.chunks[0].send(); found = true; return(false); } if(file.chunks.length>1 && file.chunks[file.chunks.length-1].status()=='pending' && file.chunks[file.chunks.length-1].preprocessState === 0) { file.chunks[file.chunks.length-1].send(); found = true; return(false); } }); if(found) return(true); } // Now, simply look for the next, best thing to upload $h.each($.files, function(file){ if(file.isPaused()===false){ $h.each(file.chunks, function(chunk){ if(chunk.status()=='pending' && chunk.preprocessState === 0) { chunk.send(); found = true; return(false); } }); } if(found) return(false); }); if(found) return(true); // The are no more outstanding chunks to upload, check is everything is done var outstanding = false; $h.each($.files, function(file){ if(!file.isComplete()) { outstanding = true; return(false); } }); if(!outstanding) { // All chunks have been uploaded, complete $.fire('complete'); } return(false); }; // PUBLIC METHODS FOR RESUMABLE.JS $.assignBrowse = function(domNodes, isDirectory){ if(typeof(domNodes.length)=='undefined') domNodes = [domNodes]; $h.each(domNodes, function(domNode) { var input; if(domNode.tagName==='INPUT' && domNode.type==='file'){ input = domNode; } else { input = document.createElement('input'); input.setAttribute('type', 'file'); input.style.display = 'none'; domNode.addEventListener('click', function(){ input.style.opacity = 0; input.style.display='block'; input.focus(); input.click(); input.style.display='none'; }, false); domNode.appendChild(input); } var maxFiles = $.getOpt('maxFiles'); if (typeof(maxFiles)==='undefined'||maxFiles!=1){ input.setAttribute('multiple', 'multiple'); } else { input.removeAttribute('multiple'); } if(isDirectory){ input.setAttribute('webkitdirectory', 'webkitdirectory'); } else { input.removeAttribute('webkitdirectory'); } // When new files are added, simply append them to the overall list input.addEventListener('change', function(e){ appendFilesFromFileList(e.target.files,e); e.target.value = ''; }, false); }); }; $.assignDrop = function(domNodes){ if(typeof(domNodes.length)=='undefined') domNodes = [domNodes]; $h.each(domNodes, function(domNode) { domNode.addEventListener('dragover', preventDefault, false); domNode.addEventListener('dragenter', preventDefault, false); domNode.addEventListener('drop', onDrop, false); }); }; $.unAssignDrop = function(domNodes) { if (typeof(domNodes.length) == 'undefined') domNodes = [domNodes]; $h.each(domNodes, function(domNode) { domNode.removeEventListener('dragover', preventDefault); domNode.removeEventListener('dragenter', preventDefault); domNode.removeEventListener('drop', onDrop); }); }; $.isUploading = function(){ var uploading = false; $h.each($.files, function(file){ if (file.isUploading()) { uploading = true; return(false); } }); return(uploading); }; $.upload = function(){ // Make sure we don't start too many uploads at once if($.isUploading()) return; // Kick off the queue $.fire('uploadStart'); for (var num=1; num<=$.getOpt('simultaneousUploads'); num++) { $.uploadNextChunk(); } }; $.pause = function(){ // Resume all chunks currently being uploaded $h.each($.files, function(file){ file.abort(); }); $.fire('pause'); }; $.cancel = function(){ for(var i = $.files.length - 1; i >= 0; i--) { $.files[i].cancel(); } $.fire('cancel'); }; $.progress = function(){ var totalDone = 0; var totalSize = 0; // Resume all chunks currently being uploaded $h.each($.files, function(file){ totalDone += file.progress()*file.size; totalSize += file.size; }); return(totalSize>0 ? totalDone/totalSize : 0); }; $.addFile = function(file, event){ appendFilesFromFileList([file], event); }; $.removeFile = function(file){ for(var i = $.files.length - 1; i >= 0; i--) { if($.files[i] === file) { $.files.splice(i, 1); } } }; $.getFromUniqueIdentifier = function(uniqueIdentifier){ var ret = false; $h.each($.files, function(f){ if(f.uniqueIdentifier==uniqueIdentifier) ret = f; }); return(ret); }; $.getSize = function(){ var totalSize = 0; $h.each($.files, function(file){ totalSize += file.size; }); return(totalSize); }; return(this); }; // Node.js-style export for Node and Component if (typeof module != 'undefined') { module.exports = Resumable; } else if (typeof define === "function" && define.amd) { // AMD/requirejs: Define the module define(function(){ return Resumable; }); } else { // Browser: Expose to window window.Resumable = Resumable; } })();
mit
uutils/coreutils
tests/by-util/test_relpath.rs
4414
use crate::common::util::*; use std::borrow::Cow; use std::path::Path; struct TestCase<'a> { from: &'a str, to: &'a str, expected: &'a str, } const TESTS: [TestCase; 10] = [ TestCase { from: "A/B/C", to: "A", expected: "../..", }, TestCase { from: "A/B/C", to: "A/B", expected: "..", }, TestCase { from: "A/B/C", to: "A/B/C", expected: "", }, TestCase { from: "A/B/C", to: "A/B/C/D", expected: "D", }, TestCase { from: "A/B/C", to: "A/B/C/D/E", expected: "D/E", }, TestCase { from: "A/B/C", to: "A/B/D", expected: "../D", }, TestCase { from: "A/B/C", to: "A/B/D/E", expected: "../D/E", }, TestCase { from: "A/B/C", to: "A/D", expected: "../../D", }, TestCase { from: "A/B/C", to: "D/E/F", expected: "../../../D/E/F", }, TestCase { from: "A/B/C", to: "A/D/E", expected: "../../D/E", }, ]; #[allow(clippy::needless_lifetimes)] fn convert_path<'a>(path: &'a str) -> Cow<'a, str> { #[cfg(windows)] return path.replace("/", "\\").into(); #[cfg(not(windows))] return path.into(); } #[test] fn test_relpath_with_from_no_d() { let scene = TestScenario::new(util_name!()); let at = &scene.fixtures; for test in &TESTS { let from: &str = &convert_path(test.from); let to: &str = &convert_path(test.to); let expected: &str = &convert_path(test.expected); at.mkdir_all(to); at.mkdir_all(from); scene .ucmd() .arg(to) .arg(from) .succeeds() .stdout_only(&format!("{}\n", expected)); } } #[test] fn test_relpath_with_from_with_d() { let scene = TestScenario::new(util_name!()); let at = &scene.fixtures; for test in &TESTS { let from: &str = &convert_path(test.from); let to: &str = &convert_path(test.to); let pwd = at.as_string(); at.mkdir_all(to); at.mkdir_all(from); // d is part of subpath -> expect relative path let mut _result_stdout = scene .ucmd() .arg(to) .arg(from) .arg(&format!("-d{}", pwd)) .succeeds() .stdout_move_str(); // relax rules for windows test environment #[cfg(not(windows))] assert!(Path::new(&_result_stdout).is_relative()); // d is not part of subpath -> expect absolute path _result_stdout = scene .ucmd() .arg(to) .arg(from) .arg("-dnon_existing") // spell-checker:disable-line .succeeds() .stdout_move_str(); assert!(Path::new(&_result_stdout).is_absolute()); } } #[test] fn test_relpath_no_from_no_d() { let scene = TestScenario::new(util_name!()); let at = &scene.fixtures; for test in &TESTS { let to: &str = &convert_path(test.to); at.mkdir_all(to); let _result_stdout = scene.ucmd().arg(to).succeeds().stdout_move_str(); #[cfg(not(windows))] assert_eq!(_result_stdout, format!("{}\n", to)); // relax rules for windows test environment #[cfg(windows)] assert!(_result_stdout.ends_with(&format!("{}\n", to))); } } #[test] fn test_relpath_no_from_with_d() { let scene = TestScenario::new(util_name!()); let at = &scene.fixtures; for test in &TESTS { let to: &str = &convert_path(test.to); let pwd = at.as_string(); at.mkdir_all(to); // d is part of subpath -> expect relative path let _result_stdout = scene .ucmd() .arg(to) .arg(&format!("-d{}", pwd)) .succeeds() .stdout_move_str(); // relax rules for windows test environment #[cfg(not(windows))] assert!(Path::new(&_result_stdout).is_relative()); // d is not part of subpath -> expect absolute path let result_stdout = scene .ucmd() .arg(to) .arg("-dnon_existing") // spell-checker:disable-line .succeeds() .stdout_move_str(); assert!(Path::new(&result_stdout).is_absolute()); } }
mit
wizawu/1c
@types/jdk/org.w3c.dom.html.HTMLLabelElement.d.ts
451
declare namespace org { namespace w3c { namespace dom { namespace html { interface HTMLLabelElement extends org.w3c.dom.html.HTMLElement { getForm(): org.w3c.dom.html.HTMLFormElement getAccessKey(): java.lang.String setAccessKey(arg0: java.lang.String | string): void getHtmlFor(): java.lang.String setHtmlFor(arg0: java.lang.String | string): void } } } } }
mit
TechnicalBro/CraftBuildTools
craftbuildtools/utils/__init__.py
2991
import fnmatch import os from urllib.parse import urlsplit, urlparse def save_python_script(script_folder, script_url): import requests if not os.path.exists(script_folder): os.makedirs(script_folder) script_name = get_filename(script_url) script_data = requests.get(script_url).text script_loc = os.path.join(script_folder, script_name) write_file(script_loc, script_data) if not os.path.exists(script_loc): raise FileNotFoundError("Unable to locate file %s after attempting to save it" % script_loc) return script_loc def is_url(url): return urlparse(url).scheme != "" def get_filename(url_or_path): if not is_url(url_or_path): if not os.path.exists(url_or_path): return None return "%s%s" % os.path.splitext(url_or_path) else: return "%s%s" % os.path.splitext(os.path.basename(urlsplit(url_or_path).path)) def get_file_extension(path): if is_url(path): return "%s" % os.path.splitext(os.path.basename(urlsplit(path).path))[1] else: if not os.path.exists(path): return None return "%s" % os.path.splitext(path)[1] def get_files_recursive(path, match='*.py'): matches = [] for root, dirnames, filenames in os.walk(path): for filename in fnmatch.filter(filenames, match): matches.append(os.path.join(root, filename)) return matches def get_config_from_file(file, trim_newlines=True): with open(file, 'r') as config_file: data = config_file.read() if trim_newlines: data = data.replace('\n', '') return data def write_file(file, data): with open(file, 'w') as data_file: data_file.write(data) class ChangeDir: def __init__(self, newPath): self.newPath = os.path.expanduser(newPath) # Change directory with the new path def __enter__(self): self.savedPath = os.getcwd() os.chdir(self.newPath) # Return back to previous directory def __exit__(self, etype, value, traceback): os.chdir(self.savedPath) class Map(dict): """ Example: m = Map({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer']) """ def __init__(self, *args, **kwargs): super(Map, self).__init__(*args, **kwargs) for arg in args: if isinstance(arg, dict): for k, v in arg.items(): self[k] = v if kwargs: for k, v in kwargs.items(): self[k] = v def __getattr__(self, attr): return self.get(attr) def __setattr__(self, key, value): self.__setitem__(key, value) def __setitem__(self, key, value): super(Map, self).__setitem__(key, value) self.__dict__.update({key: value}) def __delattr__(self, item): self.__delitem__(item) def __delitem__(self, key): super(Map, self).__delitem__(key) del self.__dict__[key]
mit
guided1/virgin-symfony-test
src/Virgin/ChannelApiBundle/DependencyInjection/Configuration.php
890
<?php namespace Virgin\ChannelApiBundle\DependencyInjection; use Symfony\Component\Config\Definition\Builder\TreeBuilder; use Symfony\Component\Config\Definition\ConfigurationInterface; /** * This is the class that validates and merges configuration from your app/config files * * To learn more see {@link http://symfony.com/doc/current/cookbook/bundles/extension.html#cookbook-bundles-extension-config-class} */ class Configuration implements ConfigurationInterface { /** * {@inheritdoc} */ public function getConfigTreeBuilder() { $treeBuilder = new TreeBuilder(); $rootNode = $treeBuilder->root('virgin_channel_api'); // Here you should define the parameters that are allowed to // configure your bundle. See the documentation linked above for // more information on that topic. return $treeBuilder; } }
mit
tzigy/TelerikAcademy
Homeworks/C#-OOP/HW_04_OOP_Principles _Part1/AnimalHierarchy/Models/Gender.cs
124
namespace AnimalHierarchy.Models { using System; public enum Gender { Male, Female }; }
mit
suitejs/suitejs
packages/icons/src/md/image/MusicNote.js
319
import React from 'react'; import IconBase from '@suitejs/icon-base'; function MdMusicNote(props) { return ( <IconBase viewBox="0 0 48 48" {...props}> <path d="M24 6v21.11c-1.18-.69-2.54-1.11-4-1.11-4.42 0-8 3.58-8 8s3.58 8 8 8 8-3.58 8-8V14h8V6H24z" /> </IconBase> ); } export default MdMusicNote;
mit
indefinitelee/Learning
complete-intro-to-react/flow-typed/npm/jest_v20.x.x.js
19985
<<<<<<< HEAD // flow-typed signature: 336a37cc59a5628d581d11f98d1d94ab // flow-typed version: ef52b40a4e/jest_v20.x.x/flow_>=v0.33.x type JestMockFn = { (...args: Array<any>): any, ======= // flow-typed signature: 5960ed076fe29ecf92f57584d68acf98 // flow-typed version: b2a49dc910/jest_v20.x.x/flow_>=v0.39.x type JestMockFn<TArguments: $ReadOnlyArray<*>, TReturn> = { (...args: TArguments): TReturn, >>>>>>> master /** * An object for introspecting mock calls */ mock: { /** * An array that represents all calls that have been made into this mock * function. Each call is represented by an array of arguments that were * passed during the call. */ <<<<<<< HEAD calls: Array<Array<any>>, ======= calls: Array<TArguments>, >>>>>>> master /** * An array that contains all the object instances that have been * instantiated from this mock function. */ <<<<<<< HEAD instances: mixed ======= instances: Array<TReturn> >>>>>>> master }, /** * Resets all information stored in the mockFn.mock.calls and * mockFn.mock.instances arrays. Often this is useful when you want to clean * up a mock's usage data between two assertions. */ <<<<<<< HEAD mockClear(): Function, ======= mockClear(): void, >>>>>>> master /** * Resets all information stored in the mock. This is useful when you want to * completely restore a mock back to its initial state. */ <<<<<<< HEAD mockReset(): Function, ======= mockReset(): void, >>>>>>> master /** * Removes the mock and restores the initial implementation. This is useful * when you want to mock functions in certain test cases and restore the * original implementation in others. Beware that mockFn.mockRestore only * works when mock was created with jest.spyOn. Thus you have to take care of * restoration yourself when manually assigning jest.fn(). */ <<<<<<< HEAD mockRestore(): Function, ======= mockRestore(): void, >>>>>>> master /** * Accepts a function that should be used as the implementation of the mock. * The mock itself will still record all calls that go into and instances * that come from itself -- the only difference is that the implementation * will also be executed when the mock is called. */ <<<<<<< HEAD mockImplementation(fn: Function): JestMockFn, ======= mockImplementation( fn: (...args: TArguments) => TReturn, ): JestMockFn<TArguments, TReturn>, >>>>>>> master /** * Accepts a function that will be used as an implementation of the mock for * one call to the mocked function. Can be chained so that multiple function * calls produce different results. */ <<<<<<< HEAD mockImplementationOnce(fn: Function): JestMockFn, ======= mockImplementationOnce( fn: (...args: TArguments) => TReturn, ): JestMockFn<TArguments, TReturn>, >>>>>>> master /** * Just a simple sugar function for returning `this` */ mockReturnThis(): void, /** * Deprecated: use jest.fn(() => value) instead */ <<<<<<< HEAD mockReturnValue(value: any): JestMockFn, /** * Sugar for only returning a value once inside your mock */ mockReturnValueOnce(value: any): JestMockFn ======= mockReturnValue(value: TReturn): JestMockFn<TArguments, TReturn>, /** * Sugar for only returning a value once inside your mock */ mockReturnValueOnce(value: TReturn): JestMockFn<TArguments, TReturn> >>>>>>> master }; type JestAsymmetricEqualityType = { /** * A custom Jasmine equality tester */ asymmetricMatch(value: mixed): boolean }; type JestCallsType = { allArgs(): mixed, all(): mixed, any(): boolean, count(): number, first(): mixed, mostRecent(): mixed, reset(): void }; type JestClockType = { install(): void, mockDate(date: Date): void, tick(milliseconds?: number): void, uninstall(): void }; type JestMatcherResult = { message?: string | (() => string), pass: boolean }; type JestMatcher = (actual: any, expected: any) => JestMatcherResult; type JestPromiseType = { /** * Use rejects to unwrap the reason of a rejected promise so any other * matcher can be chained. If the promise is fulfilled the assertion fails. */ rejects: JestExpectType, /** * Use resolves to unwrap the value of a fulfilled promise so any other * matcher can be chained. If the promise is rejected the assertion fails. */ resolves: JestExpectType }; <<<<<<< HEAD type JestExpectType = { not: JestExpectType, ======= /** * Plugin: jest-enzyme */ type EnzymeMatchersType = { toBeChecked(): void, toBeDisabled(): void, toBeEmpty(): void, toBePresent(): void, toContainReact(element: React$Element<any>): void, toHaveClassName(className: string): void, toHaveHTML(html: string): void, toHaveProp(propKey: string, propValue?: any): void, toHaveRef(refName: string): void, toHaveState(stateKey: string, stateValue?: any): void, toHaveStyle(styleKey: string, styleValue?: any): void, toHaveTagName(tagName: string): void, toHaveText(text: string): void, toIncludeText(text: string): void, toHaveValue(value: any): void, toMatchElement(element: React$Element<any>): void, toMatchSelector(selector: string): void, }; type JestExpectType = { not: JestExpectType & EnzymeMatchersType, >>>>>>> master /** * If you have a mock function, you can use .lastCalledWith to test what * arguments it was last called with. */ lastCalledWith(...args: Array<any>): void, /** * toBe just checks that a value is what you expect. It uses === to check * strict equality. */ toBe(value: any): void, /** * Use .toHaveBeenCalled to ensure that a mock function got called. */ toBeCalled(): void, /** * Use .toBeCalledWith to ensure that a mock function was called with * specific arguments. */ toBeCalledWith(...args: Array<any>): void, /** * Using exact equality with floating point numbers is a bad idea. Rounding * means that intuitive things fail. */ toBeCloseTo(num: number, delta: any): void, /** * Use .toBeDefined to check that a variable is not undefined. */ toBeDefined(): void, /** * Use .toBeFalsy when you don't care what a value is, you just want to * ensure a value is false in a boolean context. */ toBeFalsy(): void, /** * To compare floating point numbers, you can use toBeGreaterThan. */ toBeGreaterThan(number: number): void, /** * To compare floating point numbers, you can use toBeGreaterThanOrEqual. */ toBeGreaterThanOrEqual(number: number): void, /** * To compare floating point numbers, you can use toBeLessThan. */ toBeLessThan(number: number): void, /** * To compare floating point numbers, you can use toBeLessThanOrEqual. */ toBeLessThanOrEqual(number: number): void, /** * Use .toBeInstanceOf(Class) to check that an object is an instance of a * class. */ toBeInstanceOf(cls: Class<*>): void, /** * .toBeNull() is the same as .toBe(null) but the error messages are a bit * nicer. */ toBeNull(): void, /** * Use .toBeTruthy when you don't care what a value is, you just want to * ensure a value is true in a boolean context. */ toBeTruthy(): void, /** * Use .toBeUndefined to check that a variable is undefined. */ toBeUndefined(): void, /** * Use .toContain when you want to check that an item is in a list. For * testing the items in the list, this uses ===, a strict equality check. */ toContain(item: any): void, /** * Use .toContainEqual when you want to check that an item is in a list. For * testing the items in the list, this matcher recursively checks the * equality of all fields, rather than checking for object identity. */ toContainEqual(item: any): void, /** * Use .toEqual when you want to check that two objects have the same value. * This matcher recursively checks the equality of all fields, rather than * checking for object identity. */ toEqual(value: any): void, /** * Use .toHaveBeenCalled to ensure that a mock function got called. */ toHaveBeenCalled(): void, /** * Use .toHaveBeenCalledTimes to ensure that a mock function got called exact * number of times. */ toHaveBeenCalledTimes(number: number): void, /** * Use .toHaveBeenCalledWith to ensure that a mock function was called with * specific arguments. */ toHaveBeenCalledWith(...args: Array<any>): void, /** <<<<<<< HEAD ======= * Use .toHaveBeenLastCalledWith to ensure that a mock function was last called * with specific arguments. */ toHaveBeenLastCalledWith(...args: Array<any>): void, /** >>>>>>> master * Check that an object has a .length property and it is set to a certain * numeric value. */ toHaveLength(number: number): void, /** * */ toHaveProperty(propPath: string, value?: any): void, /** <<<<<<< HEAD * Use .toMatch to check that a string matches a regular expression. */ toMatch(regexp: RegExp): void, ======= * Use .toMatch to check that a string matches a regular expression or string. */ toMatch(regexpOrString: RegExp | string): void, >>>>>>> master /** * Use .toMatchObject to check that a javascript object matches a subset of the properties of an object. */ toMatchObject(object: Object): void, /** * This ensures that a React component matches the most recent snapshot. */ toMatchSnapshot(name?: string): void, /** * Use .toThrow to test that a function throws when it is called. * If you want to test that a specific error gets thrown, you can provide an * argument to toThrow. The argument can be a string for the error message, * a class for the error, or a regex that should match the error. * * Alias: .toThrowError */ toThrow(message?: string | Error | RegExp): void, toThrowError(message?: string | Error | RegExp): void, /** * Use .toThrowErrorMatchingSnapshot to test that a function throws a error * matching the most recent snapshot when it is called. */ toThrowErrorMatchingSnapshot(): void }; type JestObjectType = { /** * Disables automatic mocking in the module loader. * * After this method is called, all `require()`s will return the real * versions of each module (rather than a mocked version). */ disableAutomock(): JestObjectType, /** * An un-hoisted version of disableAutomock */ autoMockOff(): JestObjectType, /** * Enables automatic mocking in the module loader. */ enableAutomock(): JestObjectType, /** * An un-hoisted version of enableAutomock */ autoMockOn(): JestObjectType, /** * Clears the mock.calls and mock.instances properties of all mocks. * Equivalent to calling .mockClear() on every mocked function. */ clearAllMocks(): JestObjectType, /** * Resets the state of all mocks. Equivalent to calling .mockReset() on every * mocked function. */ resetAllMocks(): JestObjectType, /** * Removes any pending timers from the timer system. */ clearAllTimers(): void, /** * The same as `mock` but not moved to the top of the expectation by * babel-jest. */ doMock(moduleName: string, moduleFactory?: any): JestObjectType, /** * The same as `unmock` but not moved to the top of the expectation by * babel-jest. */ dontMock(moduleName: string): JestObjectType, /** * Returns a new, unused mock function. Optionally takes a mock * implementation. */ <<<<<<< HEAD fn(implementation?: Function): JestMockFn, ======= fn<TArguments: $ReadOnlyArray<*>, TReturn>( implementation?: (...args: TArguments) => TReturn, ): JestMockFn<TArguments, TReturn>, >>>>>>> master /** * Determines if the given function is a mocked function. */ isMockFunction(fn: Function): boolean, /** * Given the name of a module, use the automatic mocking system to generate a * mocked version of the module for you. */ genMockFromModule(moduleName: string): any, /** * Mocks a module with an auto-mocked version when it is being required. * * The second argument can be used to specify an explicit module factory that * is being run instead of using Jest's automocking feature. * * The third argument can be used to create virtual mocks -- mocks of modules * that don't exist anywhere in the system. */ mock( moduleName: string, moduleFactory?: any, options?: Object ): JestObjectType, /** * Resets the module registry - the cache of all required modules. This is * useful to isolate modules where local state might conflict between tests. */ resetModules(): JestObjectType, /** * Exhausts the micro-task queue (usually interfaced in node via * process.nextTick). */ runAllTicks(): void, /** * Exhausts the macro-task queue (i.e., all tasks queued by setTimeout(), * setInterval(), and setImmediate()). */ runAllTimers(): void, /** * Exhausts all tasks queued by setImmediate(). */ runAllImmediates(): void, /** * Executes only the macro task queue (i.e. all tasks queued by setTimeout() * or setInterval() and setImmediate()). */ runTimersToTime(msToRun: number): void, /** * Executes only the macro-tasks that are currently pending (i.e., only the * tasks that have been queued by setTimeout() or setInterval() up to this * point) */ runOnlyPendingTimers(): void, /** * Explicitly supplies the mock object that the module system should return * for the specified module. Note: It is recommended to use jest.mock() * instead. */ setMock(moduleName: string, moduleExports: any): JestObjectType, /** * Indicates that the module system should never return a mocked version of * the specified module from require() (e.g. that it should always return the * real module). */ unmock(moduleName: string): JestObjectType, /** * Instructs Jest to use fake versions of the standard timer functions * (setTimeout, setInterval, clearTimeout, clearInterval, nextTick, * setImmediate and clearImmediate). */ useFakeTimers(): JestObjectType, /** * Instructs Jest to use the real versions of the standard timer functions. */ useRealTimers(): JestObjectType, /** * Creates a mock function similar to jest.fn but also tracks calls to * object[methodName]. */ <<<<<<< HEAD spyOn(object: Object, methodName: string): JestMockFn ======= spyOn(object: Object, methodName: string): JestMockFn<any, any> >>>>>>> master }; type JestSpyType = { calls: JestCallsType }; /** Runs this function after every test inside this context */ <<<<<<< HEAD declare function afterEach(fn: Function): void; /** Runs this function before every test inside this context */ declare function beforeEach(fn: Function): void; /** Runs this function after all tests have finished inside this context */ declare function afterAll(fn: Function): void; /** Runs this function before any tests have started inside this context */ declare function beforeAll(fn: Function): void; /** A context for grouping tests together */ declare function describe(name: string, fn: Function): void; ======= declare function afterEach(fn: (done: () => void) => ?Promise<mixed>, timeout?: number): void; /** Runs this function before every test inside this context */ declare function beforeEach(fn: (done: () => void) => ?Promise<mixed>, timeout?: number): void; /** Runs this function after all tests have finished inside this context */ declare function afterAll(fn: (done: () => void) => ?Promise<mixed>, timeout?: number): void; /** Runs this function before any tests have started inside this context */ declare function beforeAll(fn: (done: () => void) => ?Promise<mixed>, timeout?: number): void; /** A context for grouping tests together */ declare var describe: { /** * Creates a block that groups together several related tests in one "test suite" */ (name: string, fn: () => void): void, /** * Only run this describe block */ only(name: string, fn: () => void): void, /** * Skip running this describe block */ skip(name: string, fn: () => void): void, }; >>>>>>> master /** An individual test unit */ declare var it: { /** * An individual test unit * * @param {string} Name of Test * @param {Function} Test <<<<<<< HEAD */ (name: string, fn?: Function): ?Promise<void>, ======= * @param {number} Timeout for the test, in milliseconds. */ (name: string, fn?: (done: () => void) => ?Promise<mixed>, timeout?: number): void, >>>>>>> master /** * Only run this test * * @param {string} Name of Test * @param {Function} Test <<<<<<< HEAD */ only(name: string, fn?: Function): ?Promise<void>, ======= * @param {number} Timeout for the test, in milliseconds. */ only(name: string, fn?: (done: () => void) => ?Promise<mixed>, timeout?: number): void, >>>>>>> master /** * Skip running this test * * @param {string} Name of Test * @param {Function} Test <<<<<<< HEAD */ skip(name: string, fn?: Function): ?Promise<void>, ======= * @param {number} Timeout for the test, in milliseconds. */ skip(name: string, fn?: (done: () => void) => ?Promise<mixed>, timeout?: number): void, >>>>>>> master /** * Run the test concurrently * * @param {string} Name of Test * @param {Function} Test <<<<<<< HEAD */ concurrent(name: string, fn?: Function): ?Promise<void> }; declare function fit(name: string, fn: Function): ?Promise<void>; ======= * @param {number} Timeout for the test, in milliseconds. */ concurrent(name: string, fn?: (done: () => void) => ?Promise<mixed>, timeout?: number): void, }; declare function fit( name: string, fn: (done: () => void) => ?Promise<mixed>, timeout?: number, ): void; >>>>>>> master /** An individual test unit */ declare var test: typeof it; /** A disabled group of tests */ declare var xdescribe: typeof describe; /** A focused group of tests */ declare var fdescribe: typeof describe; /** A disabled individual test */ declare var xit: typeof it; /** A disabled individual test */ declare var xtest: typeof it; /** The expect function is used every time you want to test a value */ declare var expect: { /** The object that you want to make assertions against */ <<<<<<< HEAD (value: any): JestExpectType & JestPromiseType, ======= (value: any): JestExpectType & JestPromiseType & EnzymeMatchersType, >>>>>>> master /** Add additional Jasmine matchers to Jest's roster */ extend(matchers: { [name: string]: JestMatcher }): void, /** Add a module that formats application-specific data structures. */ addSnapshotSerializer(serializer: (input: Object) => string): void, assertions(expectedAssertions: number): void, hasAssertions(): void, any(value: mixed): JestAsymmetricEqualityType, anything(): void, arrayContaining(value: Array<mixed>): void, objectContaining(value: Object): void, /** Matches any received string that contains the exact expected string. */ stringContaining(value: string): void, stringMatching(value: string | RegExp): void }; // TODO handle return type // http://jasmine.github.io/2.4/introduction.html#section-Spies declare function spyOn(value: mixed, method: string): Object; /** Holds all functions related to manipulating test runner */ declare var jest: JestObjectType; /** * The global Jamine object, this is generally not exposed as the public API, * using features inside here could break in later versions of Jest. */ declare var jasmine: { DEFAULT_TIMEOUT_INTERVAL: number, any(value: mixed): JestAsymmetricEqualityType, anything(): void, arrayContaining(value: Array<mixed>): void, clock(): JestClockType, createSpy(name: string): JestSpyType, createSpyObj( baseName: string, methodNames: Array<string> ): { [methodName: string]: JestSpyType }, objectContaining(value: Object): void, stringMatching(value: string): void };
mit
amalajeyan/Jenkins
src/app/modules/countryList/components/country.list.item.component.ts
366
import { Component, Input } from '@angular/core'; import { country } from '../classes/country'; @Component({ selector: 'country-item', template: ` <tr> <td [style.color] = "'red'"> {{ country.label }} </td> <td> {{ country.value }} </td> </tr>` }) export class CountryListItemComponent { @Input() country : country; }
mit
adebar/PyOE1
oe1.py
2662
import argparse import datetime import sys import pandas as pd import requests BASE_URL = 'http://oe1.orf.at/programm/konsole/tag/' DEFAULT_COLUMNS = ['time', 'title', 'info'] def get_oe1_program(date='20160423', offline=False): if offline: return _get_oe1_program_offline() try: url = BASE_URL + str(date) response = requests.get(url, stream=True) return pd.DataFrame(response.json()['list']) except KeyError: sys.exit("Can not retreive program for date %s." % date) except requests.exceptions.ConnectionError: sys.exit("Can not retreive program due to a network error.") def _get_oe1_program_offline(): return pd.DataFrame.from_csv('offline.csv', encoding='utf-8') def post_process_program(program): program = program.set_index('id') program['datetime'] = program.apply(_get_date_from_row, axis=1) return program def _get_date_from_row(row): return datetime.datetime.strptime(row['day_label'] + row['time'], '%d.%m.%Y%H:%M') def filter_and_print_program(program, args, columns=None): # Reverse according to command line flag if args.reverse: program = program.loc[list(reversed(program.index)), :] # Filter rows to be printed based on command line argument if args.filter: program = program[program['title'].str.contains(args.filter)] # Return first URL if args.url: print program['url_stream'].iloc[0] sys.exit() # Print title title = 'Program for ' + str(args.date) if args.filter: title += ' (filter: %s)' % args.filter title += ':\n' print title # Filter columns to be printed if not columns: columns = DEFAULT_COLUMNS program = program.loc[:, columns] program.columns = [c.title() for c in program.columns] print program.to_string(index=False).encode('utf-8') def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("-d", "--date", help="Date for which to obtain program items for", type=int, default=0) parser.add_argument("-f", "--filter", help="String to filter program elements by", type=str) parser.add_argument("-r", "--reverse", help="Reverse results", action="store_true") parser.add_argument("-u", "--url", help="Print URL(s) of matching program items only", action="store_true") args = parser.parse_args() if args.date <= 0: args.date = (datetime.datetime.today() + datetime.timedelta(days=args.date)).strftime("%Y%m%d") return args def main(): args = parse_args() program = get_oe1_program(date=args.date) filter_and_print_program(program, args=args) main()
mit
bacta/swg
couchbase-connector/src/main/java/com/ocdsoft/bacta/soe/data/couchbase/CouchbaseNetworkIdGenerator.java
690
package com.ocdsoft.bacta.soe.data.couchbase; import com.google.inject.Inject; import com.google.inject.Singleton; import com.ocdsoft.bacta.swg.shared.database.GameDatabaseConnector; import com.ocdsoft.bacta.swg.shared.database.NetworkIdGenerator; /** * Created by kburkhardt on 2/23/14. */ @Singleton public class CouchbaseNetworkIdGenerator implements NetworkIdGenerator { private final GameDatabaseConnector gameDatabaseConnector; @Inject public CouchbaseNetworkIdGenerator(GameDatabaseConnector gameDatabaseConnector) { this.gameDatabaseConnector = gameDatabaseConnector; } public long next() { return gameDatabaseConnector.nextId(); } }
mit
bryanhaines/InQuick
InQuick/InQuick/Models/ManageViewModels/RemoveLoginViewModel.cs
340
using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Threading.Tasks; namespace InQuick.Models.ManageViewModels { public class RemoveLoginViewModel { public string LoginProvider { get; set; } public string ProviderKey { get; set; } } }
mit
achristodoulou/calendar
src/Today.php
5680
<?php namespace Achristodoulou\Calendar; /** * Class Today * @package Ac\Calendar */ class Today extends Day{ /** * @var \DateTime */ protected $dateTime; /** * @var ListOfDates */ protected $publicHolidays; /** * @var TimeRange */ private $workingHours; /** * @var ListOfDates */ private $annualLeaves; /** * @var TimeRange */ private $lunchTime; /** * @param \DateTime $dateTime * @param TimeRange $workingHours * @param ListOfDates $publicHolidays * @param ListOfDates $annualLeaves * @param TimeRange $lunchTime */ public function __construct(\DateTime $dateTime, TimeRange $workingHours = null, ListOfDates $publicHolidays = null, ListOfDates $annualLeaves = null, TimeRange $lunchTime = null) { $this->dateTime = $dateTime; $this->publicHolidays = $publicHolidays; $this->workingHours = $workingHours; $this->annualLeaves = $annualLeaves; $this->lunchTime = $lunchTime; } /** * Check if the day is as specified * @param int $day * @return bool */ public function is($day) { return $day === (int) $this->dateTime->format('N'); } /** * @return bool */ public function isWorkingDay() { $day = (int) $this->dateTime->format('N'); return in_array($day, [Day::MONDAY, Day::TUESDAY, Day::WEDNESDAY, Day::THURSDAY, Day::FRIDAY]); } /** * @return bool */ public function isWeekend() { $day = (int) $this->dateTime->format('N'); return in_array($day, [Day::SATURDAY, Day::SUNDAY]); } /** * @return bool * @throws \Exception */ public function isPublicHoliday() { if($this->annualLeaves === null) throw new \Exception('Public holidays are not set!'); foreach ($this->publicHolidays->all() as $current) { if ($this->dateTime->format('Y-m-d') == $current->format('Y-m-d')) return true; } return false; } /** * @return bool * @throws \Exception */ public function isAnnualLeave() { if($this->annualLeaves === null) throw new \Exception('Annual leaves are not set!'); foreach ($this->annualLeaves->all() as $current) { if ($this->dateTime->format('Y-m-d') == $current->format('Y-m-d')) return true; } return false; } /** * @return bool * @throws \Exception */ public function isLunchTime() { if($this->workingHours === null) throw new \Exception('Lunch time is not set!'); if($this->isWorkingDay() === false) return false; $current = $this->dateTime->format('H:i'); $currentTime = new TwentyFourHourTime($current); return $this->lunchTime->isTimeInBetween($currentTime); } /** * @return bool * @throws \Exception */ public function isWorkingHour() { if($this->workingHours === null) throw new \Exception('Working hours are not set!'); if($this->isWorkingDay() === false) return false; $current = $this->dateTime->format('H:i'); $currentTime = new TwentyFourHourTime($current); return $this->workingHours->isTimeInBetween($currentTime); } /** * @return bool * @throws \Exception */ public function isAfterWorkingHour() { if($this->workingHours === null) throw new \Exception('Working hours are not set!'); if($this->isWorkingDay() === false) return false; $current = $this->dateTime->format('H:i'); $currentTime = new TwentyFourHourTime($current); return !$this->workingHours->isTimeInBetween($currentTime); } /** * @return bool */ public function isNewYearDay() { $day = (int) $this->dateTime->format('jn'); return 11 === $day; } /** * @return bool */ public function isEpiphanyDay() { $day = (int) $this->dateTime->format('jn'); return 61 === $day; } /** * @return bool */ public function isAnnunciationDay() { $day = (int) $this->dateTime->format('jn'); return 253 === $day; } /** * @return bool */ public function isLabourDay() { $day = (int) $this->dateTime->format('jn'); return 15 === $day; } /** * @return bool */ public function isAssumptionDay() { $day = (int) $this->dateTime->format('jn'); return 158 === $day; } /** * @return bool */ public function isChristmas() { $day = (int) $this->dateTime->format('jn'); return 2512 === $day; } /** * @return bool */ public function isMidnightOfWorkingDay() { if($this->isWorkingDay() === false) return false; $current = $this->dateTime->format('H:i'); $currentTime = new TwentyFourHourTime($current); return $currentTime->isMidnight(); } /** * @return bool */ public function isNoonOfWorkingDay() { if($this->isWorkingDay() === false) return false; $current = $this->dateTime->format('H:i'); $currentTime = new TwentyFourHourTime($current); return $currentTime->isNoon(); } }
mit
DavidCWebs/address
includes/class-address-deactivator.php
596
<?php /** * Fired during plugin deactivation * * @link http://dev-notes.eu * @since 1.0.0 * * @package Address * @subpackage Address/includes */ /** * Fired during plugin deactivation. * * This class defines all code necessary to run during the plugin's deactivation. * * @since 1.0.0 * @package Address * @subpackage Address/includes * @author David Egan <[email protected]> */ class Address_Deactivator { /** * Short Description. (use period) * * Long Description. * * @since 1.0.0 */ public static function deactivate() { } }
mit
al-nenov/telerik-nodejs-tw
Offers App/test/browser/test.js
881
/* eslint-disable no-unused-expressions */ const { expect } = require('chai'); const { setupDriver } = require('./utils/setup-driver'); const webdriver = require('selenium-webdriver'); describe('Offers routes', () => { let driver = null; // let driver = // new webdriver.Builder() // .build(); const appUrl = 'http://localhost:3001'; beforeEach(() => { driver = setupDriver('chrome'); }); it('expect h1 with text "vsited"', (done) => { driver.get(appUrl) .then(() => { return driver.findElement( webdriver.By.css('h1') ); }) .then((el) => { return el.getText(); }) .then((text) => { expect(text).to.contain('visited'); done(); }); }); });
mit
hellofresh/janus-dashboard
src/store/reducers/request.reducer.js
496
import { REQUEST_START, REQUEST_COMPLETE, REQUEST_FAILURE } from '../constants' export const initialState = { isFetching: false } export default function reducer (state = initialState, action) { switch (action.type) { case REQUEST_START: { return { ...state, isFetching: true } } case REQUEST_COMPLETE: case REQUEST_FAILURE: { return { ...state, isFetching: false } } default: return state } }
mit
TylorS/cycle-snabbdom
test/browser/select.js
4678
'use strict' /* global describe, it */ let assert = require('assert') let Cycle = require('@cycle/core') let CycleDOM = require('../../src/index') let Observable = require('rx').Observable let {svg, div, p, span, h2, h3, h4, makeDOMDriver} = CycleDOM function createRenderTarget (id = null) { let element = document.createElement('div') element.className = 'cycletest' if (id) { element.id = id } document.body.appendChild(element) return element } describe('DOMSource.select()', function () { it('should have Observable `:root` in DOM source', function (done) { function app () { return { DOM: Observable.of( div('.top-most', [ p('Foo'), span('Bar') ]) ) } } const {sinks, sources} = Cycle.run(app, { DOM: makeDOMDriver(createRenderTarget()) }) sources.DOM.select(':root').elements.skip(1).take(1).subscribe(root => { const classNameRegex = /top\-most/ assert.strictEqual(root.tagName, 'DIV') const child = root.children[0] assert.notStrictEqual(classNameRegex.exec(child.className), null) assert.strictEqual(classNameRegex.exec(child.className)[0], 'top-most') setTimeout(() => { sinks.dispose() done() }) }) }) it('should return an object with observable and events()', function (done) { function app () { return { DOM: Observable.of(h3('.myelementclass', 'Foobar')) } } const {sinks, sources} = Cycle.run(app, { DOM: makeDOMDriver(createRenderTarget()) }) // Make assertions const selection = sources.DOM.select('.myelementclass') assert.strictEqual(typeof selection, 'object') assert.strictEqual(typeof selection.elements, 'object') assert.strictEqual(typeof selection.elements.subscribe, 'function') assert.strictEqual(typeof selection.events, 'function') sinks.dispose() done() }) it('should have an observable of DOM elements', function (done) { function app () { return { DOM: Observable.of(h3('.myelementclass', 'Foobar')) } } const {sinks, sources} = Cycle.run(app, { DOM: makeDOMDriver(createRenderTarget()) }) // Make assertions sources.DOM.select('.myelementclass').elements.skip(1).take(1) .subscribe(elements => { assert.notStrictEqual(elements, null) assert.notStrictEqual(typeof elements, 'undefined') // Is an Array assert.strictEqual(Array.isArray(elements), true) assert.strictEqual(elements.length, 1) // Array with the H3 element assert.strictEqual(elements[0].tagName, 'H3') assert.strictEqual(elements[0].textContent, 'Foobar') setTimeout(() => { sinks.dispose() done() }) }) }) it('should not select element outside the given scope', function (done) { function app () { return { DOM: Observable.of( h3('.top-most', [ h2('.bar', 'Wrong'), div('.foo', [ h4('.bar', 'Correct') ]) ]) ) } } const {sinks, sources} = Cycle.run(app, { DOM: makeDOMDriver(createRenderTarget()) }) // Make assertions sources.DOM.select('.foo').select('.bar').elements.skip(1).take(1) .subscribe(elements => { assert.strictEqual(elements.length, 1) const element = elements[0] assert.notStrictEqual(element, null) assert.notStrictEqual(typeof element, 'undefined') assert.strictEqual(element.tagName, 'H4') assert.strictEqual(element.textContent, 'Correct') setTimeout(() => { sinks.dispose() done() }) }) }) it('should select svg element', function (done) { function app () { return { DOM: Observable.of( svg({width: 150, height: 150}, [ svg.polygon({ attrs: { class: 'triangle', points: '20 0 20 150 150 20' } }) ]) ) } } const {sinks, sources} = Cycle.run(app, { DOM: makeDOMDriver(createRenderTarget()) }) // Make assertions sources.DOM.select('.triangle').elements.skip(1).take(1) .subscribe(elements => { assert.strictEqual(elements.length, 1) const triangleElement = elements[0] assert.notStrictEqual(triangleElement, null) assert.notStrictEqual(typeof triangleElement, 'undefined') assert.strictEqual(triangleElement.tagName, 'polygon') sinks.dispose() done() }) }) })
mit
n-educatio-pl/TestBundle
src/Neducatio/TestBundle/Tests/DataFixtures/UniqueDependencyResolverForComponentWithOneChildShould.php
601
<?php namespace Neducatio\TestBundle\Tests\DataFixtures; /** * Do sth. * * @covers Neducatio\TestBundle\DataFixtures\UniqueDependencyResolver */ class UniqueDependencyResolverForComponentWithOneChildShould extends UniqueDependencyResolverTestCase { /** * Do sth. * * @test */ public function returnArrayWithItsChildAndHimSelfAtTheEnd() { $expectedComponents = array('I', 'H'); $resolvedComponents = $this->uniqueDependencyResolver->resolve($this->dependentComponentsTree['H']); $this->assertSameDependentComponentsIds($expectedComponents, $resolvedComponents); } }
mit
OptionalLanguor/ihcNucleoPedagogico
application/controllers/Apresentar_deadline.php
3775
<?php defined('BASEPATH') or exit('No direct script access allowed'); class Apresentar_deadline extends CI_Controller { public function __construct() { parent::__construct(); if (!($this->session->userdata('esta_logado'))) { redirect('login'); } $this->load->model('Registro_Atendimento_Model'); $this->load->model('Aluno_Model'); $this->load->model('Categoria_Model'); $this->load->helper('url'); } public function Index() { #$pesquisa_res['resultado'] = $this->Registro_Atendimento_Model->pesquisa_registro_atendimento(''); $pesquisa_res['resultado'] = $this->atendimentos_deadline(); $this->load->view('apresentarDeadline', $pesquisa_res); } public function cmp_date($a, $b) { if ($a->dias_restantes == $b->dias_restantes) { return 0; } return ($a->dias_restantes < $b->dias_restantes) ? -1 : 1; } public function seleciona_categoria_id($lista_categoria, $id) { foreach($lista_categoria as $categoria) { if($categoria->id_Categoria==$id) return $categoria; } } public function atendimentos_deadline() { $registros = $this->Registro_Atendimento_Model->pesquisa_registro_atendimento(''); $categorias = $this->Categoria_Model->pesquisa_categoria(''); $registros_proximos = Array(); if(isset($registros)){ foreach($registros as $row) { $deadline = $this->seleciona_categoria_id($categorias,$row->id_Categoria)->prazo_deadline; $sqldate = new DateTime($row->data_abertura,new DateTimeZone('America/Sao_Paulo')); $now = new DateTime(null, new DateTimeZone('America/Sao_Paulo')); $sqldate->add(new DateInterval(sprintf('P%dD', $deadline))); $now->add(new DateInterval('P30D')); if($sqldate->getTimestamp() < $now->getTimestamp()){ $row->nome_aluno = $this->Aluno_Model->pesquisa_aluno_id($row->id_Pessoa)->nome; $row->nome_categoria = $this->Categoria_Model->pesquisa_categoria_id($row->id_Categoria)->nome; $now = new DateTime(null, new DateTimeZone('America/Sao_Paulo')); $row->dias_restantes = ($sqldate->getTimestamp()-$now->getTimestamp())/(60*60*24); $sqldate = new DateTime($row->data_abertura,new DateTimeZone('America/Sao_Paulo')); $row->data_abertura = date_format($sqldate, 'd/m/Y'); $row->hora_abertura = date_format($sqldate, 'H:i'); $dias=floor($row->dias_restantes); if($dias>0){ if($dias<7) $row->dias_restantes_texto = sprintf('Tempo restante: %d dias.', $dias); elseif($dias>=7 && $dias%7==0) $row->dias_restantes_texto = sprintf('Tempo restante:</br>%d semanas.', $dias/7); else $row->dias_restantes_texto = sprintf('Tempo restante:</br>%d semanas e %d dias.', $dias/7, $dias%7); } else if($dias==0){ $row->dias_restantes_texto = 'Entrega é para hoje.'; } else { $row->dias_restantes_texto = 'Entrega está atrasada'; } #$row->dias_restantes_texto=sprintf('%d',$dias); $registros_proximos[]=$row; } } } usort($registros_proximos, array($this, "cmp_date")); return $registros_proximos; } }
mit
ZhangFly/WTFSocket_Server_JAVA
test/controller/RegisterController.java
1788
package controller; import model.ApplicationMsg; import org.apache.commons.lang.StringUtils; import org.springframework.stereotype.Controller; import wtf.socket.controller.WTFSocketController; import wtf.socket.exception.WTFSocketException; import wtf.socket.exception.fatal.WTFSocketInvalidSourceException; import wtf.socket.protocol.WTFSocketMsg; import wtf.socket.routing.item.WTFSocketRoutingItem; import wtf.socket.routing.item.WTFSocketRoutingTmpItem; import java.util.List; /** * 注册功能 */ @Controller public class RegisterController implements WTFSocketController { @Override public boolean isResponse(WTFSocketMsg msg) { ApplicationMsg body = msg.getBody(ApplicationMsg.class); return body.getCmd() != null && body.getCmd() == 64; } @Override public boolean work(WTFSocketRoutingItem item, WTFSocketMsg msg, List<WTFSocketMsg> responses) throws WTFSocketException{ final ApplicationMsg body = msg.getBody(ApplicationMsg.class); if (!(item instanceof WTFSocketRoutingTmpItem)) { throw new WTFSocketInvalidSourceException("[" + msg.getFrom() + "] has registered" ); } item.setAddress(msg.getFrom()); item.setAccept(msg.getVersion()); if (body.hasParams()) item.setDeviceType(body.firstParam().getString("deviceType")); if (StringUtils.startsWith(msg.getFrom(), "Debug_")) { ((WTFSocketRoutingTmpItem) item).shiftToDebug(); }else { item.setCover(false); ((WTFSocketRoutingTmpItem) item).shiftToFormal(); } final WTFSocketMsg response = msg.makeResponse(); response.setBody(ApplicationMsg.success()); responses.add(response); return true; } }
mit
artisanofcode/flask-generic-views
tests/utils.py
199
import string ASCII = bytes(range(0, 127)).decode('ascii') SLUG = string.ascii_lowercase + string.digits + '_' DIGITS = string.digits def nondigit(value): return value and not value.isdigit()
mit
tahminak/tracker_web
app/js/services/jsonFilesService.js
558
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ (function () { 'use strict'; //Tracker App module var app = angular.module("trackerApp"); app.factory('jsonFilesService', function () { var jsonFiles = { "notesFile": "notes.json", "scriptFile": "scripts.json", "devicesFile": "devices.json" }; return jsonFiles; }); })();
mit
GoogleChrome/workbox
packages/workbox-precaching/src/precache.ts
1192
/* Copyright 2019 Google LLC Use of this source code is governed by an MIT-style license that can be found in the LICENSE file or at https://opensource.org/licenses/MIT. */ import {getOrCreatePrecacheController} from './utils/getOrCreatePrecacheController.js'; import {PrecacheEntry} from './_types.js'; import './_version.js'; /** * Adds items to the precache list, removing any duplicates and * stores the files in the * {@link workbox-core.cacheNames|"precache cache"} when the service * worker installs. * * This method can be called multiple times. * * Please note: This method **will not** serve any of the cached files for you. * It only precaches files. To respond to a network request you call * {@link workbox-precaching.addRoute}. * * If you have a single array of files to precache, you can just call * {@link workbox-precaching.precacheAndRoute}. * * @param {Array<Object|string>} [entries=[]] Array of entries to precache. * * @memberof workbox-precaching */ function precache(entries: Array<PrecacheEntry | string>): void { const precacheController = getOrCreatePrecacheController(); precacheController.precache(entries); } export {precache};
mit
jsdir/deployer
cmd/deployer/deployer_test.go
1358
package main import ( "fmt" "net/http" "net/http/httptest" "testing" "github.com/stretchr/testify/assert" ) func TestCliCreateRelease(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if err := r.ParseForm(); err != nil { t.Error(err) return } assert.Equal(t, r.PostFormValue("builds"), "[]") w.Header().Set("Content-Type", "application/json") fmt.Fprintln(w, fixtures.release1Data) })) defer ts.Close() app := CreateCliApp(ts.URL) app.Run([]string{"release", "service1", "tag1", "service2", "tag2"}) //assert.Contains(recorder.Body, `{json}\nid`) } func TestCliCreateReleaseAndDeploy() { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if err := r.ParseForm(); err != nil { t.Error(err) return } assert.Equal(r.PostFormValue("builds"), "[]") w.Header().Set("Content-Type", "application/json") fmt.Fprintln(w, fixtures.release1Data) })) defer ts.Close() // same as above, just add extra different body field app := CreateCli(config, ts.URL, recorder) app.Run([]string{"--to", "env0", "release", "service1", "tag1"}) recorder.should.contain(`{json}\nid`) } func TestCliCreateDeploy() { // check for src dest app := CreateCli(config, ts.URL, recorder) app.Run([]string{"deploy", "release0", "env1"}) }
mit
compasses/elastic-spark
scala-base/src/main/scala/programming/fp_part2/chap7_orignal/Nonblocking.scala
6698
package programming.fp_part2.chap7_orignal import java.util.concurrent.{Callable, CountDownLatch, ExecutorService} import java.util.concurrent.atomic.AtomicReference import language.implicitConversions object Nonblocking { trait Future[+A] { private[chap7_orignal] def apply(k: A => Unit): Unit } type Par[+A] = ExecutorService => Future[A] object Par { def run[A](es: ExecutorService)(p: Par[A]): A = { val ref = new java.util.concurrent.atomic.AtomicReference[A] // A mutable, threadsafe reference, to use for storing the result val latch = new CountDownLatch(1) // A latch which, when decremented, implies that `ref` has the result p(es) { a => ref.set(a); latch.countDown } // Asynchronously set the result, and decrement the latch latch.await // Block until the `latch.countDown` is invoked asynchronously ref.get // Once we've passed the latch, we know `ref` has been set, and return its value } def unit[A](a: A): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = cb(a) } /** A non-strict version of `unit` */ def delay[A](a: => A): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = cb(a) } def fork[A](a: => Par[A]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = eval(es)(a(es)(cb)) } /** * Helper function for constructing `Par` values out of calls to non-blocking continuation-passing-style APIs. * This will come in handy in Chapter 13. */ def async[A](f: (A => Unit) => Unit): Par[A] = es => new Future[A] { def apply(k: A => Unit) = f(k) } /** * Helper function, for evaluating an action * asynchronously, using the given `ExecutorService`. */ def eval(es: ExecutorService)(r: => Unit): Unit = es.submit(new Callable[Unit] { def call = r }) def map2[A,B,C](p: Par[A], p2: Par[B])(f: (A,B) => C): Par[C] = es => new Future[C] { def apply(cb: C => Unit): Unit = { var ar: Option[A] = None var br: Option[B] = None // this implementation is a little too liberal in forking of threads - // it forks a new logical thread for the actor and for stack-safety, // forks evaluation of the callback `cb` val combiner = Actor[Either[A,B]](es) { case Left(a) => if (br.isDefined) eval(es)(cb(f(a,br.get))) else ar = Some(a) case Right(b) => if (ar.isDefined) eval(es)(cb(f(ar.get,b))) else br = Some(b) } p(es)(a => combiner ! Left(a)) p2(es)(b => combiner ! Right(b)) } } // specialized version of `map` def map[A,B](p: Par[A])(f: A => B): Par[B] = es => new Future[B] { def apply(cb: B => Unit): Unit = p(es)(a => eval(es) { cb(f(a)) }) } def lazyUnit[A](a: => A): Par[A] = fork(unit(a)) def asyncF[A,B](f: A => B): A => Par[B] = a => lazyUnit(f(a)) def sequenceRight[A](as: List[Par[A]]): Par[List[A]] = as match { case Nil => unit(Nil) case h :: t => map2(h, fork(sequence(t)))(_ :: _) } def sequenceBalanced[A](as: IndexedSeq[Par[A]]): Par[IndexedSeq[A]] = fork { if (as.isEmpty) unit(Vector()) else if (as.length == 1) map(as.head)(a => Vector(a)) else { val (l,r) = as.splitAt(as.length/2) map2(sequenceBalanced(l), sequenceBalanced(r))(_ ++ _) } } def sequence[A](as: List[Par[A]]): Par[List[A]] = map(sequenceBalanced(as.toIndexedSeq))(_.toList) def parMap[A,B](as: List[A])(f: A => B): Par[List[B]] = sequence(as.map(asyncF(f))) def parMap[A,B](as: IndexedSeq[A])(f: A => B): Par[IndexedSeq[B]] = sequenceBalanced(as.map(asyncF(f))) // exercise answers /* * We can implement `choice` as a new primitive. * * `p(es)(result => ...)` for some `ExecutorService`, `es`, and * some `Par`, `p`, is the idiom for running `p`, and registering * a callback to be invoked when its result is available. The * result will be bound to `result` in the function passed to * `p(es)`. * * If you find this code difficult to follow, you may want to * write down the type of each subexpression and follow the types * through the implementation. What is the type of `p(es)`? What * about `t(es)`? What about `t(es)(cb)`? */ def choice[A](p: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es) { b => if (b) eval(es) { t(es)(cb) } else eval(es) { f(es)(cb) } } } /* The code here is very similar. */ def choiceN[A](p: Par[Int])(ps: List[Par[A]]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es) { ind => eval(es) { ps(ind)(es)(cb) }} } def choiceViaChoiceN[A](a: Par[Boolean])(ifTrue: Par[A], ifFalse: Par[A]): Par[A] = choiceN(map(a)(b => if (b) 0 else 1))(List(ifTrue, ifFalse)) def choiceMap[K,V](p: Par[K])(ps: Map[K,Par[V]]): Par[V] = es => new Future[V] { def apply(cb: V => Unit): Unit = p(es)(k => ps(k)(es)(cb)) } /* `chooser` is usually called `flatMap` or `bind`. */ def chooser[A,B](p: Par[A])(f: A => Par[B]): Par[B] = flatMap(p)(f) def flatMap[A,B](p: Par[A])(f: A => Par[B]): Par[B] = es => new Future[B] { def apply(cb: B => Unit): Unit = p(es)(a => f(a)(es)(cb)) } def choiceViaFlatMap[A](p: Par[Boolean])(f: Par[A], t: Par[A]): Par[A] = flatMap(p)(b => if (b) t else f) def choiceNViaFlatMap[A](p: Par[Int])(choices: List[Par[A]]): Par[A] = flatMap(p)(i => choices(i)) def join[A](p: Par[Par[A]]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es)(p2 => eval(es) { p2(es)(cb) }) } def joinViaFlatMap[A](a: Par[Par[A]]): Par[A] = flatMap(a)(x => x) def flatMapViaJoin[A,B](p: Par[A])(f: A => Par[B]): Par[B] = join(map(p)(f)) /* Gives us infix syntax for `Par`. */ implicit def toParOps[A](p: Par[A]): ParOps[A] = new ParOps(p) // infix versions of `map`, `map2` and `flatMap` class ParOps[A](p: Par[A]) { def map[B](f: A => B): Par[B] = Par.map(p)(f) def map2[B,C](b: Par[B])(f: (A,B) => C): Par[C] = Par.map2(p,b)(f) def flatMap[B](f: A => Par[B]): Par[B] = Par.flatMap(p)(f) def zip[B](b: Par[B]): Par[(A,B)] = p.map2(b)((_,_)) } } }
mit
ericpony/scala-examples
testcases/repair/Heap/Heap5.scala
2972
/* Copyright 2009-2013 EPFL, Lausanne * * Author: Ravi * Date: 20.11.2013 **/ import leon.lang._ import leon.collection._ object Heaps { sealed abstract class Heap { val rank : BigInt = this match { case Leaf() => 0 case Node(_, l, r) => 1 + max(l.rank, r.rank) } def content : Set[BigInt] = this match { case Leaf() => Set[BigInt]() case Node(v,l,r) => l.content ++ Set(v) ++ r.content } } case class Leaf() extends Heap case class Node(value:BigInt, left: Heap, right: Heap) extends Heap def max(i1 : BigInt, i2 : BigInt) = if (i1 >= i2) i1 else i2 def hasHeapProperty(h : Heap) : Boolean = h match { case Leaf() => true case Node(v, l, r) => ( l match { case Leaf() => true case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n) }) && ( r match { case Leaf() => true case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n) }) } def hasLeftistProperty(h: Heap) : Boolean = h match { case Leaf() => true case Node(_,l,r) => hasLeftistProperty(l) && hasLeftistProperty(r) && l.rank >= r.rank } def heapSize(t: Heap): BigInt = { t match { case Leaf() => BigInt(0) case Node(v, l, r) => heapSize(l) + 1 + heapSize(r) }} ensuring(_ >= 0) private def merge(h1: Heap, h2: Heap) : Heap = { require( hasLeftistProperty(h1) && hasLeftistProperty(h2) && hasHeapProperty(h1) && hasHeapProperty(h2) ) (h1,h2) match { case (Leaf(), _) => h2 case (_, Leaf()) => h1 case (Node(v1, l1, r1), Node(v2, l2, r2)) => if(v1 <= v2) // FIXME should be >= makeN(v1, l1, merge(r1, h2)) else makeN(v2, l2, merge(h1, r2)) } } ensuring { res => hasLeftistProperty(res) && hasHeapProperty(res) && heapSize(h1) + heapSize(h2) == heapSize(res) && h1.content ++ h2.content == res.content } private def makeN(value: BigInt, left: Heap, right: Heap) : Heap = { require( hasLeftistProperty(left) && hasLeftistProperty(right) ) if(left.rank >= right.rank) Node(value, left, right) else Node(value, right, left) } ensuring { res => hasLeftistProperty(res) } def insert(element: BigInt, heap: Heap) : Heap = { require(hasLeftistProperty(heap) && hasHeapProperty(heap)) merge(Node(element, Leaf(), Leaf()), heap) } ensuring { res => hasLeftistProperty(res) && hasHeapProperty(res) && heapSize(res) == heapSize(heap) + 1 && res.content == heap.content ++ Set(element) } def findMax(h: Heap) : Option[BigInt] = { h match { case Node(m,_,_) => Some(m) case Leaf() => None() } } def removeMax(h: Heap) : Heap = { require(hasLeftistProperty(h) && hasHeapProperty(h)) h match { case Node(_,l,r) => merge(l, r) case l => l } } ensuring { res => hasLeftistProperty(res) && hasHeapProperty(res) } }
mit
mafiya69/corefx
src/System.Reflection.Metadata/src/System/Reflection/PortableExecutable/PEReader.cs
23579
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Reflection.Internal; using System.Reflection.Metadata; using System.Threading; namespace System.Reflection.PortableExecutable { /// <summary> /// Portable Executable format reader. /// </summary> /// <remarks> /// The implementation is thread-safe, that is multiple threads can read data from the reader in parallel. /// Disposal of the reader is not thread-safe (see <see cref="Dispose"/>). /// </remarks> public sealed class PEReader : IDisposable { // May be null in the event that the entire image is not // deemed necessary and we have been instructed to read // the image contents without being lazy. private MemoryBlockProvider _peImage; // If we read the data from the image lazily (peImage != null) we defer reading the PE headers. private PEHeaders _lazyPEHeaders; private AbstractMemoryBlock _lazyMetadataBlock; private AbstractMemoryBlock _lazyImageBlock; private AbstractMemoryBlock[] _lazyPESectionBlocks; /// <summary> /// Creates a Portable Executable reader over a PE image stored in memory. /// </summary> /// <param name="peImage">Pointer to the start of the PE image.</param> /// <param name="size">The size of the PE image.</param> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is <see cref="IntPtr.Zero"/>.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="size"/> is negative.</exception> /// <remarks> /// The memory is owned by the caller and not released on disposal of the <see cref="PEReader"/>. /// The caller is responsible for keeping the memory alive and unmodified throughout the lifetime of the <see cref="PEReader"/>. /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> public unsafe PEReader(byte* peImage, int size) { if (peImage == null) { throw new ArgumentNullException("peImage"); } if (size < 0) { throw new ArgumentOutOfRangeException("size"); } _peImage = new ExternalMemoryBlockProvider(peImage, size); } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <exception cref="BadImageFormatException"> /// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid. /// </exception> /// <remarks> /// Ownership of the stream is transferred to the <see cref="PEReader"/> upon successful validation of constructor arguments. It will be /// disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// </remarks> public PEReader(Stream peStream) : this(peStream, PEStreamOptions.Default) { } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream beginning at its current position and ending at the end of the stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="options"/> has an invalid value.</exception> /// <exception cref="BadImageFormatException"> /// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid. /// </exception> public PEReader(Stream peStream, PEStreamOptions options) : this(peStream, options, (int?)null) { } /// <summary> /// Creates a Portable Executable reader over a PE image of the given size beginning at the stream's current position. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="size">PE image size.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentOutOfRangeException">Size is negative or extends past the end of the stream.</exception> public PEReader(Stream peStream, PEStreamOptions options, int size) : this(peStream, options, (int?)size) { } private unsafe PEReader(Stream peStream, PEStreamOptions options, int? sizeOpt) { if (peStream == null) { throw new ArgumentNullException("peStream"); } if (!peStream.CanRead || !peStream.CanSeek) { throw new ArgumentException(SR.StreamMustSupportReadAndSeek, "peStream"); } if (!options.IsValid()) { throw new ArgumentOutOfRangeException("options"); } long start = peStream.Position; int size = PEBinaryReader.GetAndValidateSize(peStream, sizeOpt); bool closeStream = true; try { bool isFileStream = FileStreamReadLightUp.IsFileStream(peStream); if ((options & (PEStreamOptions.PrefetchMetadata | PEStreamOptions.PrefetchEntireImage)) == 0) { _peImage = new StreamMemoryBlockProvider(peStream, start, size, isFileStream, (options & PEStreamOptions.LeaveOpen) != 0); closeStream = false; } else { // Read in the entire image or metadata blob: if ((options & PEStreamOptions.PrefetchEntireImage) != 0) { var imageBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, 0, (int)Math.Min(peStream.Length, int.MaxValue)); _lazyImageBlock = imageBlock; _peImage = new ExternalMemoryBlockProvider(imageBlock.Pointer, imageBlock.Size); // if the caller asked for metadata initialize the PE headers (calculates metadata offset): if ((options & PEStreamOptions.PrefetchMetadata) != 0) { InitializePEHeaders(); } } else { // The peImage is left null, but the lazyMetadataBlock is initialized up front. _lazyPEHeaders = new PEHeaders(peStream); _lazyMetadataBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, _lazyPEHeaders.MetadataStartOffset, _lazyPEHeaders.MetadataSize); } // We read all we need, the stream is going to be closed. } } finally { if (closeStream && (options & PEStreamOptions.LeaveOpen) == 0) { peStream.Dispose(); } } } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a byte array. /// </summary> /// <param name="peImage">PE image.</param> /// <remarks> /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is null.</exception> public PEReader(ImmutableArray<byte> peImage) { if (peImage.IsDefault) { throw new ArgumentNullException("peImage"); } _peImage = new ByteArrayMemoryProvider(peImage); } /// <summary> /// Disposes all memory allocated by the reader. /// </summary> /// <remarks> /// <see cref="Dispose"/> can be called multiple times (but not in parallel). /// It is not safe to call <see cref="Dispose"/> in parallel with any other operation on the <see cref="PEReader"/> /// or reading from <see cref="PEMemoryBlock"/>s retrieved from the reader. /// </remarks> public void Dispose() { var image = _peImage; if (image != null) { image.Dispose(); _peImage = null; } var imageBlock = _lazyImageBlock; if (imageBlock != null) { imageBlock.Dispose(); _lazyImageBlock = null; } var metadataBlock = _lazyMetadataBlock; if (metadataBlock != null) { metadataBlock.Dispose(); _lazyMetadataBlock = null; } var peSectionBlocks = _lazyPESectionBlocks; if (peSectionBlocks != null) { foreach (var block in peSectionBlocks) { if (block != null) { block.Dispose(); } } _lazyPESectionBlocks = null; } } /// <summary> /// Gets the PE headers. /// </summary> /// <exception cref="BadImageFormatException">The headers contain invalid data.</exception> public PEHeaders PEHeaders { get { if (_lazyPEHeaders == null) { InitializePEHeaders(); } return _lazyPEHeaders; } } private void InitializePEHeaders() { Debug.Assert(_peImage != null); StreamConstraints constraints; Stream stream = _peImage.GetStream(out constraints); PEHeaders headers; if (constraints.GuardOpt != null) { lock (constraints.GuardOpt) { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize); } } else { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize); } Interlocked.CompareExchange(ref _lazyPEHeaders, headers, null); } private static PEHeaders ReadPEHeadersNoLock(Stream stream, long imageStartPosition, int imageSize) { Debug.Assert(imageStartPosition >= 0 && imageStartPosition <= stream.Length); stream.Seek(imageStartPosition, SeekOrigin.Begin); return new PEHeaders(stream, imageSize); } /// <summary> /// Returns a view of the entire image as a pointer and length. /// </summary> /// <exception cref="InvalidOperationException">PE image not available.</exception> private AbstractMemoryBlock GetEntireImageBlock() { if (_lazyImageBlock == null) { if (_peImage == null) { throw new InvalidOperationException(SR.PEImageNotAvailable); } var newBlock = _peImage.GetMemoryBlock(); if (Interlocked.CompareExchange(ref _lazyImageBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyImageBlock; } private AbstractMemoryBlock GetMetadataBlock() { if (!HasMetadata) { throw new InvalidOperationException(SR.PEImageDoesNotHaveMetadata); } if (_lazyMetadataBlock == null) { Debug.Assert(_peImage != null, "We always have metadata if peImage is not available."); var newBlock = _peImage.GetMemoryBlock(PEHeaders.MetadataStartOffset, PEHeaders.MetadataSize); if (Interlocked.CompareExchange(ref _lazyMetadataBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyMetadataBlock; } private AbstractMemoryBlock GetPESectionBlock(int index) { Debug.Assert(index >= 0 && index < PEHeaders.SectionHeaders.Length); Debug.Assert(_peImage != null); if (_lazyPESectionBlocks == null) { Interlocked.CompareExchange(ref _lazyPESectionBlocks, new AbstractMemoryBlock[PEHeaders.SectionHeaders.Length], null); } var newBlock = _peImage.GetMemoryBlock( PEHeaders.SectionHeaders[index].PointerToRawData, PEHeaders.SectionHeaders[index].SizeOfRawData); if (Interlocked.CompareExchange(ref _lazyPESectionBlocks[index], newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } return _lazyPESectionBlocks[index]; } /// <summary> /// Return true if the reader can access the entire PE image. /// </summary> /// <remarks> /// Returns false if the <see cref="PEReader"/> is constructed from a stream and only part of it is prefetched into memory. /// </remarks> public bool IsEntireImageAvailable { get { return _lazyImageBlock != null || _peImage != null; } } /// <summary> /// Gets a pointer to and size of the PE image if available (<see cref="IsEntireImageAvailable"/>). /// </summary> /// <exception cref="InvalidOperationException">The entire PE image is not available.</exception> public PEMemoryBlock GetEntireImage() { return new PEMemoryBlock(GetEntireImageBlock()); } /// <summary> /// Returns true if the PE image contains CLI metadata. /// </summary> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public bool HasMetadata { get { return PEHeaders.MetadataSize > 0; } } /// <summary> /// Loads PE section that contains CLI metadata. /// </summary> /// <exception cref="InvalidOperationException">The PE image doesn't contain metadata (<see cref="HasMetadata"/> returns false).</exception> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public PEMemoryBlock GetMetadata() { return new PEMemoryBlock(GetMetadataBlock()); } /// <summary> /// Loads PE section that contains the specified <paramref name="relativeVirtualAddress"/> into memory /// and returns a memory block that starts at <paramref name="relativeVirtualAddress"/> and ends at the end of the containing section. /// </summary> /// <param name="relativeVirtualAddress">Relative Virtual Address of the data to read.</param> /// <returns> /// An empty block if <paramref name="relativeVirtualAddress"/> doesn't represent a location in any of the PE sections of this PE image. /// </returns> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public PEMemoryBlock GetSectionData(int relativeVirtualAddress) { var sectionIndex = PEHeaders.GetContainingSectionIndex(relativeVirtualAddress); if (sectionIndex < 0) { return default(PEMemoryBlock); } int relativeOffset = relativeVirtualAddress - PEHeaders.SectionHeaders[sectionIndex].VirtualAddress; int size = PEHeaders.SectionHeaders[sectionIndex].VirtualSize - relativeOffset; AbstractMemoryBlock block; if (_peImage != null) { block = GetPESectionBlock(sectionIndex); } else { block = GetEntireImageBlock(); relativeOffset += PEHeaders.SectionHeaders[sectionIndex].PointerToRawData; } return new PEMemoryBlock(block, relativeOffset); } /// <summary> /// Reads all Debug Directory table entries. /// </summary> /// <exception cref="BadImageFormatException">Bad format of the entry.</exception> public unsafe ImmutableArray<DebugDirectoryEntry> ReadDebugDirectory() { var debugDirectory = PEHeaders.PEHeader.DebugTableDirectory; if (debugDirectory.Size == 0) { return ImmutableArray<DebugDirectoryEntry>.Empty; } int position; if (!PEHeaders.TryGetDirectoryOffset(debugDirectory, out position)) { throw new BadImageFormatException(SR.InvalidDirectoryRVA); } const int entrySize = 0x1c; if (debugDirectory.Size % entrySize != 0) { throw new BadImageFormatException(SR.InvalidDirectorySize); } using (AbstractMemoryBlock block = _peImage.GetMemoryBlock(position, debugDirectory.Size)) { var reader = new BlobReader(block.Pointer, block.Size); int entryCount = debugDirectory.Size / entrySize; var builder = ImmutableArray.CreateBuilder<DebugDirectoryEntry>(entryCount); for (int i = 0; i < entryCount; i++) { // Reserved, must be zero. int characteristics = reader.ReadInt32(); if (characteristics != 0) { throw new BadImageFormatException(SR.InvalidDebugDirectoryEntryCharacteristics); } uint stamp = reader.ReadUInt32(); ushort majorVersion = reader.ReadUInt16(); ushort minorVersion = reader.ReadUInt16(); var type = (DebugDirectoryEntryType)reader.ReadInt32(); int dataSize = reader.ReadInt32(); int dataRva = reader.ReadInt32(); int dataPointer = reader.ReadInt32(); builder.Add(new DebugDirectoryEntry(stamp, majorVersion, minorVersion, type, dataSize, dataRva, dataPointer)); } return builder.MoveToImmutable(); } } /// <summary> /// Reads the data pointed to by the specifed Debug Directory entry and interprets them as CodeView. /// </summary> /// <exception cref="ArgumentException"><paramref name="entry"/> is not a CodeView entry.</exception> /// <exception cref="BadImageFormatException">Bad format of the data.</exception> public unsafe CodeViewDebugDirectoryData ReadCodeViewDebugDirectoryData(DebugDirectoryEntry entry) { if (entry.Type != DebugDirectoryEntryType.CodeView) { throw new ArgumentException("entry"); } using (AbstractMemoryBlock block = _peImage.GetMemoryBlock(entry.DataPointer, entry.DataSize)) { var reader = new BlobReader(block.Pointer, block.Size); if (reader.ReadByte() != (byte)'R' || reader.ReadByte() != (byte)'S' || reader.ReadByte() != (byte)'D' || reader.ReadByte() != (byte)'S') { throw new BadImageFormatException(SR.UnexpectedCodeViewDataSignature); } Guid guid = reader.ReadGuid(); int age = reader.ReadInt32(); string path = reader.ReadUtf8NullTerminated(); // path may be padded with NULs while (reader.RemainingBytes > 0) { if (reader.ReadByte() != 0) { throw new BadImageFormatException(SR.InvalidPathPadding); } } return new CodeViewDebugDirectoryData(guid, age, path); } } } }
mit
Power-LAB/gls-unibox-delivery
src/Api/Resource/Parcel.php
4152
<?php /** * This file is part of the gls-unixbox-delivery. * (c) 2016 Pierre Tomasina * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Plab\GlsUniboxDelivery\Api\Resource; use Plab\GlsUniboxDelivery\Api\Api; use Plab\GlsUniboxDelivery\Generator\Pdf\Html2Pdf\Html2Pdf; use Plab\GlsUniboxDelivery\Gls\Parameter\ParameterException; /** * Class Parcel * @package Plab\GlsUniboxDelivery\Api\Resource */ class Parcel extends Resource { /** * @throws \Exception */ public function get() { Api::httpError(405); } public function post() { $recipients = $this->bodyAsJson(); $parcelsError = []; if (null === $recipients) { return; } $pdf = new Html2Pdf(); foreach ($recipients as $recipient) { $address2 = ''; $comment = ''; $request = new \Plab\GlsUniboxDelivery\Gls\Request\Basic(); if (!empty($recipient->address2)) { $address2 = $recipient->address2; } if (!empty($recipient->comment)) { $comment = $recipient->comment; } if (35 < strlen($address2)) { $comment = substr($address2, 35) . $comment; $address2 = substr($address2, 0, 35);; if (35 < strlen($comment)) { $comment = substr($comment, 0, 35); } } try { $request ->setRecipientAddress1($recipient->address1) ->setRecipientZipCode($recipient->zipCode) ->setRecipientCity($recipient->city) ->setRecipientCountry($recipient->country) ->setRecipientName($recipient->name) ->setRecipientMobile($recipient->mobile) ->setRecipientReference($recipient->reference) ->setPackageNumber($recipient->packageNumber) ->setWeight($recipient->weight) ; } catch (ParameterException $e) { $parcelsError[] = (object)[ 'reference' => $recipient->reference, 'error' => $e->getMessage(), ]; continue; } if (!empty($address2)) { $request->setRecipientAddress2($address2); } if (!empty($comment)) { $request->setRecipientComment($comment); } $this->provider->prepare($request); try { $response = $this->provider->run(); $Parcel = new \Plab\GlsUniboxDelivery\Generator\Pdf\Html2Pdf\Parcel($response, $request); } catch (\Plab\GlsUniboxDelivery\Gls\Provider\ProviderWsTimeoutException $e) { $Parcel = new \Plab\GlsUniboxDelivery\Generator\Pdf\Html2Pdf\Parcel(null, $request); $Parcel->setTemplate(\Plab\GlsUniboxDelivery\Generator\Pdf\Html2Pdf\Parcel::TEMPLATE_UNISHIP); } $pdf->addParcel($Parcel); } $result = $pdf->render(); $result->parcels += $parcelsError; if (defined('__DEV_MODE__')) { $path = dirname(__DIR__, 2) . '/public'; file_put_contents("$path/gls.json", json_encode($result->parcels)); file_put_contents("$path/gls.pdf", base64_decode($result->file)); } $this->result('json', $result); } /** * @throws \Exception */ public function put() { Api::httpError(405); } /** * @throws \Exception */ public function update() { Api::httpError(405); } /** * @throws \Exception */ public function delete() { Api::httpError(405); } /** * @throws \Exception */ public function head() { Api::httpError(405); } }
mit
jccaicedo/localization-agent
learn/cnn/convertProtobinToNumpy.py
447
from caffe import io as c import numpy as np import os,sys if len(sys.argv) < 3: print 'Use: convertProtobinToNumpy protobinFile numpyOutput' sys.exit() protoData = c.caffe_pb2.BlobProto() f = open(sys.argv[1],'rb') protoData.ParseFromString(f.read()) f.close() array = c.blobproto_to_array(protoData) np.save(sys.argv[2],array[0].swapaxes(1, 0).swapaxes(2,1)[:, :, ::-1]) A = np.load(sys.argv[2]+'.npy') print 'Final matrix shape:',A.shape
mit
fjz13/Medusa
Tool/GameKit/GameKit/Analyzer/RandomNameConfigAnalyzer.cs
4329
// Copyright (c) 2015 fjz13. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. using System; using System.Collections.Generic; using System.Data; using System.IO; using System.Linq; using System.Text; using GameKit.Log; using GameKit.Publish; using GameKit.Resource; using Medusa.CoreProto; using Microsoft.Win32; using ProtoBuf; namespace GameKit.Analyzer { public class RandomNameConfigAnalyzer : IAnalyzer { public const string TableName = "RandomName"; public void PrevProcess() { } public void PostCheck() { } public void Analyze() { Logger.LogAllLine("Analyze RandomName================>"); var reg = Registry.LocalMachine.OpenSubKey(@"SOFTWARE\Microsoft\Jet\4.0\Engines\Excel", true); reg.SetValue("TypeGuessRows", 0); var validNames = Enum.GetNames(typeof(PublishLanguages)); var tabelNames = ExcelHelper.GetExcelTableNames(PathManager.InputConfigStringTablePath.FullName); foreach (string tableName in tabelNames) { string pureTableName = tableName.Replace("$", String.Empty).Replace("'", String.Empty).Replace(TableName, String.Empty); bool isValidTable = validNames.Any(pureTableName.Contains); if (!isValidTable||!tableName.Contains(TableName)) { continue; } var table = ExcelHelper.LoadDataFromExcel(PathManager.InputConfigStringTablePath.FullName, tableName); string resourceName = tableName.Replace("$", String.Empty); var packageInfo = PublishInfo.GetPublishInfo(resourceName); RandomNameConfig config=new RandomNameConfig(); foreach (DataRow row in table.Rows) { bool isValid = true; bool isAllNull = true; for (int i = 0; i < row.ItemArray.Length; i++) { if (row.IsNull(i)) { isValid = false; } else { isAllNull = false; } } if (isValid) { try { RandomNameConfigItem item=new RandomNameConfigItem(); item.Position = Convert.ToUInt32(row["Position"]); item.Value= Convert.ToString(row["Value"]); item.Value = ExcelHelper.ConvertToUTF8(item.Value); config.Items.Add(item); } catch (Exception ex) { Logger.LogErrorLine(ex.Message); ExcelHelper.PrintRow(row); } } else if (!isAllNull) { Logger.LogErrorLine("Invalid string table line:"); ExcelHelper.PrintRow(row); } } //print if ((PublishTarget.Current.PublishInfo.Language & packageInfo.Language) == packageInfo.Language) { string tempPath = PathManager.OutputConfigPath + "/" + TableName + pureTableName + ".bin"; using (var file = File.Open(tempPath, FileMode.Create, FileAccess.ReadWrite)) { Serializer.Serialize(file, config); Logger.LogAllLine("Generate:{0}", tempPath); } var resourceFile = new FileListFile(new FileInfo(tempPath), true, true); FileSystemGenerator.AddFileAndTag(resourceFile); } else { Logger.LogAllLine("Ignore:\t{0}", pureTableName); } } } } }
mit
codeflo/cubehack
source/CubeHack.Core/Game/WorldGenerator.cs
2990
// Copyright (c) the CubeHack authors. All rights reserved. // Licensed under the MIT license. See LICENSE.txt in the project root. using CubeHack.Geometry; using CubeHack.Randomization; using CubeHack.State; using System.Collections.Generic; using System.Threading.Tasks; namespace CubeHack.Game { public class WorldGenerator { private const uint _dirtSeed = 0xba26789cU; private const uint _mountainSeed = 0x512ba950U; private const uint _mountDetailSeed = 0xe12a62c9U; private static readonly Noise2D _dirtNoise = new Noise2D(_dirtSeed, 8, 16); private static readonly Noise2D _mountainNoise = new Noise2D(_mountainSeed, 20, 100); private static readonly Noise2D _mountainDetailNoise = new Noise2D(_mountDetailSeed, 7, 20); private readonly object _mutex = new object(); private readonly HashSet<ChunkPos> _inProgress = new HashSet<ChunkPos>(); public void OnChunkRequested(Chunk chunk) { lock (_mutex) { var chunkPos = chunk.Pos; if (chunk.IsCreated || _inProgress.Contains(chunk.Pos)) return; _inProgress.Add(chunkPos); Task.Run( () => { try { CreateChunk(chunk); } finally { lock (_mutex) { _inProgress.Remove(chunkPos); } } }); } } private void CreateChunk(Chunk chunk) { var chunkPos = chunk.Pos; var tempChunk = new Chunk(null, chunkPos); var cornerPos = (BlockPos)chunkPos; for (int x = 0; x < GeometryConstants.ChunkSize; ++x) { long wx = cornerPos.X + x; for (int z = 0; z < GeometryConstants.ChunkSize; ++z) { long wz = cornerPos.Z + z; var dirtHeight = (long)(_dirtNoise[wx, wz] * 2.5 - 8); var mountainHeight = (long)(_mountainNoise[wx, wz] * 8 + _mountainDetailNoise[wx, wz] * 3 - 10); for (int y = 0; y < GeometryConstants.ChunkSize; ++y) { long wy = cornerPos.Y + y; if (wy < mountainHeight) { tempChunk[x, y, z] = 2; // Rock } else if (wy < dirtHeight) { tempChunk[x, y, z] = 1; // Dirt } } } } tempChunk.IsCreated = true; chunk.PasteChunkData(tempChunk.GetChunkData()); } } }
mit
kelunik/acme
src/Protocol/Account.php
2618
<?php /** * This file is part of the ACME package. * * @copyright Copyright (c) 2015-2017, Niklas Keller * @license MIT */ namespace Kelunik\Acme\Protocol; use Kelunik\Acme\AcmeException; use League\Uri\Http; use Psr\Http\Message\UriInterface; final class Account { public static function fromResponse(?string $url, string $payload): Account { if ($url === null) { throw new AcmeException('Missing account URL'); } return new self(Http::createFromString($url), ...parseResponse($payload, [ 'status' => enum(AccountStatus::getAll()), 'contact' => multiple(contact()), 'orders' => optional(url()), ])); } /** * @var UriInterface URI of the account object. */ private UriInterface $url; /** * @var string The status of this account. */ private string $status; /** * @var array All contacts registered with the server. */ private array $contacts; /** * @var null|UriInterface An url to fetch orders for this registration from */ private ?UriInterface $ordersUrl; /** * Account constructor. * * @param UriInterface $url URI of the registration object. * @param string $status The status of this account. * @param array $contact All contacts registered with the server. * @param UriInterface|null $ordersUrl An url to fetch orders for this registration from */ public function __construct(UriInterface $url, string $status, array $contact = [], ?UriInterface $ordersUrl = null) { $this->url = $url; $this->status = $status; $this->contacts = $contact; $this->ordersUrl = $ordersUrl; } /** * Gets the account URL. * * @return UriInterface URL to retrieve this registration object */ public function getUrl(): UriInterface { return $this->url; } /** * Gets the account status. * * @return string Status of this account. */ public function getStatus(): string { return $this->status; } /** * Gets the contact addresses. * * @return array Contacts registered with the server. */ public function getContacts(): array { return $this->contacts; } /** * Gets the order URI from which the orders of this account can be fetched. * * @return null|UriInterface URI to fetch orders from */ public function getOrdersUrl(): ?UriInterface { return $this->ordersUrl; } }
mit
uupaa/URI.js
test/browser/template/worker.js
748
// URI test onmessage = function(event) { self.unitTest = event.data; // { message, setting: { secondary, baseDir } } if (!self.console) { // polyfill WebWorkerConsole self.console = function() {}; self.console.dir = function() {}; self.console.log = function() {}; self.console.warn = function() {}; self.console.error = function() {}; self.console.table = function() {}; } importScripts("../../lib/WebModule.js"); WebModule.VERIFY = __WEBMODULE_VERIFY__; WebModule.VERBOSE = __WEBMODULE_VERBOSE__; WebModule.PUBLISH = __WEBMODULE_PUBLISH__; __MODULES__ __WMTOOLS__ __SOURCES__ __OUTPUT__ __TEST_CASE__ self.postMessage(self.unitTest); };
mit
GuMiner/TemperFine
src/MapManager.cpp
4300
#include <map> #include <string> #include <sstream> #include "ConversionUtils.h" #include "Logger.h" #include "StringUtils.h" #include "MapManager.h" MapManager::MapManager() { } bool MapManager::LoadMapBlockData(std::vector<std::string>& lines, unsigned char* dataStorage, unsigned int xSize, unsigned int ySize, unsigned int zSize) { std::stringstream errorStream; for (unsigned int k = 0; k < zSize; k++) { for (unsigned int j = 0; j < ySize; j++) { // Break apart our data line. std::vector<std::string> stringParts; StringUtils::Split(lines[currentLine++], StringUtils::Space, true, stringParts); if (stringParts.size() != xSize) { errorStream << "Incorrect element count in layer " << k << ", line " << j << ". Expecting " << xSize << "elements, not " << stringParts.size() << "."; Logger::LogError(errorStream.str().c_str()); return false; } for (unsigned int i = 0; i < xSize; i++) { int value; if (!StringUtils::ParseIntFromString(stringParts[i], value)) { errorStream << "Couldn't decode a value in layer " << k << ", line " << j << ", element " << i << "."; Logger::LogError(errorStream.str().c_str()); return false; } dataStorage[MapInfo::GetIndex(i, j, k, xSize, ySize)] = (unsigned char)value; } } } return true; } bool MapManager::ReadMap(const char* filename, MapInfo& outputMap) { std::vector<std::string> lines; std::map<int, std::string> commentLines; if (!StringUtils::LoadConfigurationFile(filename, lines, commentLines)) { Logger::Log("Unable to properly-parse the config-style file!"); return false; } // First line is the config, followed by the map name. currentLine = 0; int mapVersion; if (!ConversionUtils::LoadInt(lines[currentLine++], mapVersion)) { Logger::Log("Unable to properly load the map config version!"); return false; } outputMap.mapConfigVersion = (unsigned int)mapVersion; outputMap.name = lines[currentLine++]; std::stringstream mapName; mapName << "Loaded map \"" << outputMap.name << "\", config version " << outputMap.mapConfigVersion; Logger::Log(mapName.str().c_str()); // Load the map size. int xSize, ySize, zSize; if (!ConversionUtils::LoadInt(lines[currentLine++], xSize)) { Logger::Log("Unable to load the map X size!"); return false; } if (!ConversionUtils::LoadInt(lines[currentLine++], ySize)) { Logger::Log("Unable to load the map Y size!"); return false; } if (!ConversionUtils::LoadInt(lines[currentLine++], zSize)) { Logger::Log("Unable to load the map Z size!"); return false; } outputMap.xSize = (unsigned int)xSize; outputMap.ySize = (unsigned int)ySize; outputMap.zSize = (unsigned int)zSize; int mapDataSize = outputMap.xSize * outputMap.ySize * outputMap.zSize; outputMap.blockType = new unsigned char[mapDataSize]; outputMap.blockOrientation = new unsigned char[mapDataSize]; outputMap.blockProperty = new unsigned char[mapDataSize]; if (!LoadMapBlockData(lines, outputMap.blockType, outputMap.xSize, outputMap.ySize, outputMap.zSize)) { Logger::Log("Unable to load the list of block types!"); return false; } if (!LoadMapBlockData(lines, outputMap.blockOrientation, outputMap.xSize, outputMap.ySize, outputMap.zSize)) { Logger::Log("Unable to load the list of block orientations!"); return false; } if (!LoadMapBlockData(lines, outputMap.blockProperty, outputMap.xSize, outputMap.ySize, outputMap.zSize)) { Logger::Log("Unable to load the list of block properties!"); return false; } return true; } void MapManager::ClearMap(MapInfo& map) { delete[] map.blockType; delete[] map.blockOrientation; delete[] map.blockProperty; }
mit
NetOfficeFw/NetOffice
Source/Excel/Enums/XlPhoneticAlignment.cs
1263
using System; using NetOffice; using NetOffice.Attributes; namespace NetOffice.ExcelApi.Enums { /// <summary> /// SupportByVersion Excel 9, 10, 11, 12, 14, 15, 16 /// </summary> ///<remarks> MSDN Online Documentation: <see href="https://docs.microsoft.com/en-us/office/vba/api/Excel.XlPhoneticAlignment"/> </remarks> [SupportByVersion("Excel", 9,10,11,12,14,15,16)] [EntityType(EntityType.IsEnum)] public enum XlPhoneticAlignment { /// <summary> /// SupportByVersion Excel 9, 10, 11, 12, 14, 15, 16 /// </summary> /// <remarks>0</remarks> [SupportByVersion("Excel", 9,10,11,12,14,15,16)] xlPhoneticAlignNoControl = 0, /// <summary> /// SupportByVersion Excel 9, 10, 11, 12, 14, 15, 16 /// </summary> /// <remarks>1</remarks> [SupportByVersion("Excel", 9,10,11,12,14,15,16)] xlPhoneticAlignLeft = 1, /// <summary> /// SupportByVersion Excel 9, 10, 11, 12, 14, 15, 16 /// </summary> /// <remarks>2</remarks> [SupportByVersion("Excel", 9,10,11,12,14,15,16)] xlPhoneticAlignCenter = 2, /// <summary> /// SupportByVersion Excel 9, 10, 11, 12, 14, 15, 16 /// </summary> /// <remarks>3</remarks> [SupportByVersion("Excel", 9,10,11,12,14,15,16)] xlPhoneticAlignDistributed = 3 } }
mit
LugosFingite/LudOS
kern/fs/devfs/kbdev.hpp
1807
/* kbdev.hpp Copyright (c) 13 Yann BOUCHER (yann) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef KBDEV_HPP #define KBDEV_HPP #include "fs/interface.hpp" #include <sys/interface_list.h> #include "utils/kmsgbus.hpp" #include "utils/user_ptr.hpp" struct kbdev_node : public vfs::interface_node<kbdev_node, vfs::ientry<ikbdev, IKBDEV_ID>> { kbdev_node(node* parent); template<typename Interface> void fill_interface(Interface*) const {} // TODO : map pointers to user_ptr int get_kbd_state(kbd_state*) const; size_t keyboard_id; MessageBus::RAIIHandle msg_hdl; bool key_state[KeyCount]; }; template <> inline void kbdev_node::fill_interface<ikbdev>(ikbdev* interface) const { register_callback(&kbdev_node::get_kbd_state, interface->get_kbd_state); } #endif // KBDEV_HPP
mit
tijme/not-your-average-web-crawler
nyawc/CrawlerThread.py
4243
# -*- coding: utf-8 -*- # MIT License # # Copyright (c) 2017 Tijme Gommers # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import threading from nyawc.helpers.DebugHelper import DebugHelper from nyawc.http.Handler import Handler from nyawc.QueueItem import QueueItem class CrawlerThread(threading.Thread): """The crawler thread executes the HTTP request using the HTTP handler. Attributes: __callback (obj): The method to call when finished __callback_lock (bool): The callback lock that prevents race conditions. __options (:class:`nyawc.Options`): The settins/options object. __queue_item (:class:`nyawc.QueueItem`): The queue item containing a request to execute. """ def __init__(self, callback, callback_lock, options, queue_item): """Constructs a crawler thread instance Args: callback (obj): The method to call when finished callback_lock (bool): The callback lock that prevents race conditions. options (:class:`nyawc.Options`): The settins/options object. queue_item (:class:`nyawc.QueueItem`): The queue item containing a request to execute. """ threading.Thread.__init__(self) self.__callback = callback self.__callback_lock = callback_lock self.__options = options self.__queue_item = queue_item def run(self): """Executes the HTTP call. Note: If this and the parent handler raised an error, the queue item status will be set to errored instead of finished. This is to prevent e.g. 404 recursion. """ try: self.__options.callbacks.request_in_thread_before_start(self.__queue_item) except Exception as e: print(e) new_requests = [] failed = False try: handler = Handler(self.__options, self.__queue_item) new_requests = handler.get_new_requests() try: self.__queue_item.response.raise_for_status() except Exception: if self.__queue_item.request.parent_raised_error: failed = True else: for new_request in new_requests: new_request.parent_raised_error = True except Exception as e: failed = True error_message = "Setting status of '{}' to '{}' because of an HTTP error.".format( self.__queue_item.request.url, QueueItem.STATUS_ERRORED ) DebugHelper.output(self.__options, error_message) DebugHelper.output(self.__options, e) try: self.__options.callbacks.request_on_error(self.__queue_item, str(e)) except Exception as e: print(e) for new_request in new_requests: new_request.parent_url = self.__queue_item.request.url try: self.__options.callbacks.request_in_thread_after_finish(self.__queue_item) except Exception as e: print(e) with self.__callback_lock: self.__callback(self.__queue_item, new_requests, failed)
mit
Maikell84/gitter-bot
gitter_bot.rb
4209
require 'eventmachine' require 'em-http' require 'json' require 'net/http' require 'open-uri' require 'giphy' require 'imgflip' require_relative 'time_service' require_relative 'leet_service' class GitterBot Giphy::Configuration.configure do |config| config.api_key = ENV['GIPHY_API_KEY'] end def initialize @debug = false @token = ENV['GITTER_TOKEN'] room_ids = [ENV['GITTER_ROOM_ID']] threads = [] @time_service = TimeService.new(self) @leet_service = LeetService.new(self) room_ids.each_with_index do |room, i| threads.push( Thread.new { start_listener(room) } ) end threads.each do |thread| thread.join end end def start_listener(room) puts "Start listening for new messages in room #{room}" stream_url = "https://stream.gitter.im/v1/rooms/#{room}/chatMessages" http = EM::HttpRequest.new(stream_url, keepalive: true, connect_timeout: 0, inactivity_timeout: 0) EventMachine.run do req = http.get(head: {'Authorization' => "Bearer #{@token}", 'accept' => 'application/json'}) req.stream do |chunk| unless chunk.strip.empty? begin @message = JSON.parse(chunk) handle_message(room) rescue JSON::ParserError puts "Rescue JSON parser error: JSON: #{chunk}" end end end end end def handle_message(target_room) p [:message, @message] if @debug p "Target room = #{target_room}" if @debug if @message['text'].downcase.include? 'tell a joke' tell_a_joke(target_room) elsif @message['text'].downcase.start_with? 'gif' show_gif(target_room, @message['text']) elsif @message['text'].downcase.start_with? 'deactivate time service' toggle_service(@time_service, target_room, false) elsif @message['text'].downcase.start_with? 'activate time service' toggle_service(@time_service, target_room, true) elsif @message['text'].downcase.start_with? 'deactivate leet service' toggle_service(@leet_service, target_room, false) elsif @message['text'].downcase.start_with? 'activate leet service' toggle_service(@leet_service, target_room, true) elsif @message['text'].downcase.include? 'but why' but_why(target_room) end spongebob(target_room, @message['text']) unless @message['text'].include? 'http://i.imgflip.com' end def spongebob(target_room, text) text.slice!('spongebob') text.gsub!(/\w/).with_index{|s, i| i.even? ? s.upcase : s.downcase} post = Net::HTTP.post_form(URI.parse('https://api.imgflip.com/caption_image'),template_id: 102156234, text0: '', text1: '', 'boxes[0][text]': '', 'boxes[1][text]': text,username: "imgflip_hubot", password: "imgflip_hubot") send_message(target_room, JSON.parse(post.body)["data"]["url"]) end def tell_a_joke(target_room) response = open('https://icanhazdadjoke.com/', 'Accept' => 'application/json').read joke = JSON.parse(response)['joke'] send_message(target_room, joke) end def show_gif(target_room, text) text.slice!('gif') begin g = Giphy.random(text) send_message(target_room, g.image_url.to_s) rescue g = Giphy.random('404') send_message(target_room, g.image_url.to_s) rescue send_message(target_room, '*no gif found*') end end def but_why(target_room) send_message(target_room, 'https://media.giphy.com/media/1M9fmo1WAFVK0/giphy.gif') end def send_message(target_room, text) send_url = "https://api.gitter.im/v1/rooms/#{target_room}/chatMessages" uri = URI(send_url) req = Net::HTTP::Post.new(uri) req['Authorization'] = "Bearer #{@token}" req['Accept'] = 'application/json' req['Content-Type'] = 'application/json' req.set_form_data('text' => text) res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http| http.request(req) end case res when Net::HTTPSuccess, Net::HTTPRedirection # Message successfully sent else puts 'An HTTP error occured while trying to send a message' puts res.value end end def toggle_service(service, room, active) service.activate(room, active) send_message(room, "#{service.class.name} is #{active ? 'on' : 'off'}") end end GitterBot.new
mit
zrrrzzt/pagelt
index.js
568
'use strict' const validUrl = require('valid-url') const getPage = require('./lib/get-page') module.exports = (uri, callback) => { if (!uri) { return callback(new Error('Missing required param'), null) } if(!validUrl.isWebUri(uri)){ return callback(new Error('Invalid uri'), null) } const start = new Date().getTime() getPage(uri) .then(result => { const end = new Date().getTime() return callback(null, {start: start, end: end, ms: end - start, status: result.statusCode}) }) .catch(error => callback(error, null)) }
mit
cmattson/threetrack
lib/threetrack/version.rb
45
module Threetrack VERSION = "0.0.1pre" end
mit
dotKom/onlineweb4
apps/sso/migrations/0001_initial.py
2053
# -*- coding: utf-8 -*- # Generated by Django 1.9.12 on 2017-03-30 17:52 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import oauth2_provider.generators import oauth2_provider.validators class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Client', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('client_id', models.CharField(db_index=True, default=oauth2_provider.generators.generate_client_id, max_length=100, unique=True)), ('redirect_uris', models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris])), ('client_type', models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], max_length=32)), ('authorization_grant_type', models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], max_length=32)), ('client_secret', models.CharField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, max_length=255)), ('name', models.CharField(blank=True, max_length=255)), ('skip_authorization', models.BooleanField(default=False)), ('scopes', models.TextField(blank=True, verbose_name='Tilganger')), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sso_client', to=settings.AUTH_USER_MODEL)), ], options={ 'permissions': (('view_client', 'View Client'),), }, ), ]
mit
eventualbuddha/add-variable-declarations
test/fixtures/variable-reassignment/input.js
36
var a = 0; let b = 1; a = 2; b = 3;
mit
ShuaiPointChen/def
Code/ShareLib/Photon-Unity3D_v3-2-0-1_SDK/demo-litelobby-chatroom/Assets/Photon/PhotonClient.cs
10736
// -------------------------------------------------------------------------------------------------------------------- // <copyright company="Exit Games GmbH"> // Protocol & Photon Client Lib - Copyright (C) 2010 Exit Games GmbH // </copyright> // <summary> // Part of the "Demo Lobby" for Photon in Unity. // </summary> // <author>[email protected]</author> // -------------------------------------------------------------------------------------------------------------------- using System; using System.Collections; using System.Collections.Generic; using System.Text; using ExitGames.Client.Photon; using ExitGames.Client.Photon.Lite; using UnityEngine; /// <summary> /// This class encapsulates the Lite Application's workflow to keep a connection and /// dispatch operation-results and events into the game logic. /// This (more or less) resembles a game-loop and is a simple base for this project. /// The PhotonClient also keeps track of it's state. In this layer, we (currently) only care /// about the states: Disconnected, Connected, InRoom. More detail could be aquired by directly /// accessing the LitePeer.PeerState. /// </summary> /// <remarks> /// Having this autonomous "game-loop" for each client makes it easier to simulate multiple /// clients in the Editor or in the players. It's usually a good way to test out features /// that require multiple connections. /// </remarks> public class PhotonClient : MonoBehaviour, IPhotonPeerListener { /// <summary> /// The PhotonPeer or LitePeer (extends PhotonPeer) is the central class to communicate with the Photon Server. /// In this case, we use a LiteLobbyPeer and make it protected, so not everyone cann directly access it. /// It's an essential point that a PhotonPeer (or derived class) is not thread safe - it should only be used from a single thread context! /// </summary> protected LiteLobbyPeer Peer; /// <summary>All clients will connect to this server address. Set as: <hostname>:<ip>. Edit before building for iPhone!</summary> public string ServerAddress = "localhost:5055"; /// <summary>The name of the server application this client uses. These are defined in photonsocketserver.config.</summary> protected string ServerApplication = "LiteLobby"; /// <summary>used as send and dispatch interval. simple but ok for a chat.</summary> public int SendIntervalMs = 100; /// <summary>The next timestamp when a Service() call is due.</summary> private int NextSendTickCount = Environment.TickCount; // Gets the peer's "low level" state. This client also has it's own connection states. public PeerStateValue LitePeerState { get { return this.Peer.PeerState; } } /// <summary>The current state of this client.</summary> public ClientState State = ClientState.Disconnected; /// <summary>This client's actor number in a room. It's only valid per room.</summary> public int ActorNumber; // All debug output is collected and buffered, line by line. This could be removed later on. private StringBuilder DebugBuffer = new StringBuilder(); // Converts the DebugBuffer to a single string public string DebugOutput { get { return DebugBuffer.ToString(); } } /// <summary>If false, Debug.Log() won't be used but debug callbacks will still be cached.</summary> public bool DebugOutputToConsole = true; /// <summary>User-friendly description of the various conditions that lead to a disconnect. Shown while disconnected.</summary> public string OfflineReason = String.Empty; // A simpler set of states is enough for this level. The ChatPhotonClient has it's own set. public enum ClientState : byte { Disconnected, Connected, InRoom } /// <summary> /// Called by Unity on start. We create and connect a new LiteLobbyPeer. /// This makes the PhotonClient relatively autonomous. /// </summary> public virtual void Start() { this.Peer = new LiteLobbyPeer(this); this.Connect(); } /// <summary> /// Update is called once per frame by Unity. Every SendIntervalMs, we send a /// UDP package (if anything must be sent) and dispatch incoming events and operation-results. /// </summary> public virtual void Update() { if (Environment.TickCount > this.NextSendTickCount) { this.Peer.Service(); this.NextSendTickCount = Environment.TickCount + this.SendIntervalMs; } } /// <summary> /// Called by Unity when when the application closes. We use that to disconnect. /// Disconnect will immediately send a package to the server telling it that /// the connection is being closed. This way, the server is informed and this peer /// is not considered connected until a timeout happens. /// </summary> public virtual void OnApplicationQuit() { this.Peer.Disconnect(); } /// <summary> /// Aside from calling Peer.Connect() the OfflineReason is also reset. /// If Unity's policy request fails, we set the corresponding "friendly" message. /// </summary> /// <remarks> /// If Unity's policy request fails (for webplayer builds), OnStatusChanged() is called /// nearly immediately after Connect(). Check for StatusCode.SecurityExceptionOnConnect there. /// </remarks> internal virtual void Connect() { this.OfflineReason = String.Empty; // PhotonPeer.Connect() is described in the client reference doc: Photon-DotNet-Client-Documentation_v6-1-0.pdf this.Peer.Connect(this.ServerAddress, this.ServerApplication); } /// <summary> /// This method is from the IPhotonPeerListener interface and called by the library with /// information during development. /// </summary> /// <remarks>Described in the client reference doc: Photon-DotNet-Client-Documentation_v6-1-0.pdf.</remarks> /// <param name="level"></param> /// <param name="message"></param> public void DebugReturn(DebugLevel level, string message) { this.DebugReturn(message); } /// <summary> /// This will append the debug out to a buffer (for display on screen, if needed) and /// can log it out to the console. /// This method is also used by all classes of the Demo, so all debug out is available. /// </summary> /// <param name="message"></param> public void DebugReturn(string message) { this.DebugBuffer.AppendLine(message); if (this.DebugOutputToConsole) { Debug.Log(message); } } /// <summary> /// On this level, only Join and Leave are handled and set the corresponding state. /// This whole class is more or less just automating the Peer / connection and leaves anything /// else to classes that extend it. /// </summary> /// <remarks>Described in the client reference doc: Photon-DotNet-Client-Documentation_v6-1-0.pdf.</remarks> public virtual void OnOperationResponse(OperationResponse operationResponse) { this.DebugReturn(String.Format("OnOperationResponse: {0}", operationResponse.ToStringFull())); switch (operationResponse.OperationCode) { case (byte)LiteOpCode.Join: this.State = ClientState.InRoom; this.ActorNumber = (int)operationResponse[(byte)LiteOpKey.ActorNr]; break; case (byte)LiteOpCode.Leave: this.State = ClientState.Connected; break; } } /// <summary> /// This method is from the IPhotonPeerListener interface and on this level primarily handles error-states. /// </summary> /// <remarks> /// Error conditions that lead to a disconnect get two callbacks from the Photon client library: /// a) The error itself /// b) The following disconnect - due to the reason of a) /// /// This allows a client to just check ClientState.Disconnected and still reliably detect a disconnect state. /// </remarks> /// <param name="statusCode"></param> public virtual void OnStatusChanged(StatusCode statusCode) { this.DebugReturn(String.Format("OnStatusChanged: {0}", statusCode)); switch (statusCode) { case StatusCode.Connect: this.State = ClientState.Connected; break; case StatusCode.Disconnect: this.State = ClientState.Disconnected; this.ActorNumber = 0; break; case StatusCode.ExceptionOnConnect: this.OfflineReason = "Connection failed.\nIs the server online? Firewall open?"; break; case StatusCode.SecurityExceptionOnConnect: this.OfflineReason = "Security Exception on connect.\nMost likely, the policy request failed.\nIs Photon and the Policy App running?"; break; case StatusCode.Exception: this.OfflineReason = "Communication terminated by Exception.\nProbably the server shutdown locally.\nOr the network connection terminated."; break; case StatusCode.TimeoutDisconnect: this.OfflineReason = "Disconnect due to timeout.\nProbably the server shutdown locally.\nOr the network connection terminated."; break; case StatusCode.DisconnectByServer: this.OfflineReason = "Timeout Disconnect by server.\nThe server did not get responses in time."; break; case StatusCode.DisconnectByServerLogic: this.OfflineReason = "Disconnect by server.\nThe servers logic (application) disconnected this client for some reason."; break; case StatusCode.DisconnectByServerUserLimit: this.OfflineReason = "Server reached it's user limit.\nThe server is currently not accepting connections.\nThe license does not allow it."; break; default: this.DebugReturn("StatusCode not handled: " + statusCode); break; } } /// <summary> /// This "generic" PhotonClient does not handle any events but simply prints them out. /// The Game extends and overrides this method with something meaningful. /// </summary> /// <remarks>Described in the client reference doc: Photon-DotNet-Client-Documentation_v6-1-0.pdf.</remarks> /// <param name="photonEvent">The dispatched event.</param> public virtual void OnEvent(EventData photonEvent) { this.DebugReturn(String.Format("OnEvent: {0}", photonEvent.ToStringFull())); } }
mit
radjivC/graph-connected
proba.rb
1285
#!/usr/bin/env ruby # calculate the empirical probabilities.rb # # # Created by radjiv on 10/24/13. # #calculate all distance require 'logger' $logger = Logger.new('proba.log') def bubble_sort(list) return list if list.size <= 1 # already sorted swapped = true while swapped do swapped = false 0.upto(list.size-2) do |i| if list[i] > list[i+1] list[i], list[i+1] = list[i+1], list[i] # swap values swapped = true end end end list end tab = [2, 4, 5, 0, 2, 5, 4, 0, 4, 6, 7, 8, 0, 5, 3, 2, 3, 1, 1, 2] tab = bubble_sort(tab); currentNumber = 0 tabNumber = Array.new() tabFreq = Array.new() countFreq=0 tabPosition=0 currentNumber = tab[0] tabNumber[0]= tab[0] tab.each_with_index do |tab1,i| if(currentNumber == tab[i]) countFreq += 1 tabNumber[tabPosition]= currentNumber else tabFreq[tabPosition] = countFreq tabPosition += 1 countFreq = 1 currentNumber = tab[i] tabNumber[tabPosition]= currentNumber tabFreq[tabPosition] = countFreq end end tabFreq.each_with_index do |tab2,i| tabFreq[i] = tabFreq[i].to_f/tab.size.to_f end tabFreq.each_with_index do |tab2,i| $logger.debug("number = "+tabNumber[i].to_s+" freq = " + tabFreq[i].to_s) end $logger.close
mit
lettenj61/vuescale
vuescale-core/src/main/scala/vuescale/tags/VFragment.scala
527
package vuescale package tags import scala.scalajs.js import vuescale.facade.CreateElement import vuescale.facade.VNode /** DSL to write Vue.js VNode descriptors. The API design is heavily * inspired by Scalatags ones. */ sealed trait VFragment { /** Serialize this fragment into VNode descriptors */ def asDescriptor: js.Object // TODO: give descriptor a type /** Generate VNode by applying this fragment with * given [[CreateElement]] function. */ def renderWith(createElment: CreateElement): VNode }
mit
s-ueno/uENLab
SimpleApp/ExceptionPolicy.cs
804
using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using uEN; using uEN.UI.DataBinding; namespace SimpleApp { [PartCreationPolicy(CreationPolicy.Shared)] [ExportMetadata(Repository.Priority, int.MaxValue)] [Export(typeof(IExceptionPolicy))] public class ExceptionPolicy : IExceptionPolicy { public void Do(object sender, Exception ex) { var appException = ex as BizApplicationException; if (appException != null) { MessageBox.Show(ex.Message, "サンプル", MessageBoxButton.OK, MessageBoxImage.Warning); return; } throw ex; } } }
mit
sievins/sarahs-footsteps
src/components/routes/Book/Book.js
290
import React from 'react' import cover from 'images/book-cover.png' import ImageAndTextTemplate from '../ImageAndTextTemplate' import Overview from './Overview' const Book = () => ( <ImageAndTextTemplate img={{ src: cover, alt: 'book-cover' }} Text={Overview} /> ) export default Book
mit
innogames/gitlabhq
spec/features/merge_request/user_assigns_themselves_spec.rb
1834
# frozen_string_literal: true require 'spec_helper' RSpec.describe 'Merge request > User assigns themselves' do let(:project) { create(:project, :public, :repository) } let(:user) { project.creator } let(:issue1) { create(:issue, project: project) } let(:issue2) { create(:issue, project: project) } let(:merge_request) { create(:merge_request, :simple, source_project: project, author: user, description: "fixes #{issue1.to_reference} and #{issue2.to_reference}") } context 'logged in as a member of the project' do before do sign_in(user) visit project_merge_request_path(project, merge_request) end it 'updates related issues', :js do click_link 'Assign yourself to these issues' expect(page).to have_content '2 issues have been assigned to you' end it 'updates updated_by', :js do expect do click_button 'assign yourself' expect(find('.assignee')).to have_content(user.name) wait_for_all_requests end.to change { merge_request.reload.updated_at } end it 'returns user to the merge request', :js do click_link 'Assign yourself to these issues' expect(page).to have_content merge_request.description end context 'when related issues are already assigned' do before do [issue1, issue2].each { |issue| issue.update!(assignees: [user]) } end it 'does not display if related issues are already assigned' do expect(page).not_to have_content 'Assign yourself' end end end context 'logged in as a non-member of the project' do before do sign_in(create(:user)) visit project_merge_request_path(project, merge_request) end it 'does not show assignment link' do expect(page).not_to have_content 'Assign yourself' end end end
mit
jeidee/goutils
aws/dynamo.go
3240
package aws import ( "errors" "strconv" "strings" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/dynamodb" ) // DynamoDB 다이나모 DB 관련 기능 포함 type DynamoDB struct { Session *session.Session DB *dynamodb.DynamoDB } var dynamoDB *DynamoDB func GetDynamoDB(params ...string) (*DynamoDB, error) { if len(params) == 0 { if dynamoDB == nil { return nil, errors.New("DB is not initialized") } return dynamoDB, nil } region := "" credentialsFile := "" credentialsProfile := "" if len(params) >= 1 { region = params[0] } if len(params) >= 2 { credTokens := strings.Split(params[1], ":") if len(credTokens) == 2 { credentialsFile = credTokens[0] credentialsProfile = credTokens[1] } else { credentialsFile = credTokens[0] } } dynamoDB = &DynamoDB{} // Create a new session sess, err := func() (*session.Session, error) { if credentialsProfile != "" { return session.NewSession(&aws.Config{ Region: aws.String(region), Credentials: credentials.NewSharedCredentials(credentialsFile, credentialsProfile), }) } return session.NewSession(&aws.Config{ Region: aws.String(region), }) }() if err != nil { return nil, err } dynamoDB.Session = sess dynamoDB.DB = dynamodb.New(dynamoDB.Session) return dynamoDB, nil } // ExistsTable 함수는 테이블 존재 여부를 확인한다. func (o *DynamoDB) ExistsTable(table string) (bool, error) { descParams := &dynamodb.DescribeTableInput{ TableName: aws.String(table), } _, err := o.DB.DescribeTable(descParams) if err != nil { return false, err } return true, nil } func GetStringFromAttributeValue(o *dynamodb.AttributeValue) string { if o == nil || o.S == nil { return "" } return *o.S } func GetIntFromAttributeValue(o *dynamodb.AttributeValue) int { if o == nil || o.N == nil { return -1 } n, err := strconv.Atoi(*o.N) if err != nil { return -1 } return n } func GetInt64FromAttributeValue(o *dynamodb.AttributeValue) int64 { if o == nil || o.N == nil { return -1 } n, err := strconv.ParseInt(*o.N, 10, 0) if err != nil { return -1 } return n } func GetBoolFromAttributeValue(o *dynamodb.AttributeValue) bool { if o == nil || o.BOOL == nil { // 기본값은 false return false } return *o.BOOL } func GetAttributeValueFromString(value string) *dynamodb.AttributeValue { return &dynamodb.AttributeValue{S: aws.String(value)} } func GetAttributeValueUpdateFromString(value string) *dynamodb.AttributeValueUpdate { return &dynamodb.AttributeValueUpdate{ Action: aws.String("PUT"), Value: GetAttributeValueFromString(value), } } func GetAttributeValueUpdateFromInt(value int) *dynamodb.AttributeValueUpdate { return &dynamodb.AttributeValueUpdate{ Action: aws.String("PUT"), Value: GetAttributeValueFromInt(value), } } func GetAttributeValueFromInt(value int) *dynamodb.AttributeValue { return &dynamodb.AttributeValue{N: aws.String(strconv.Itoa(value))} } func GetAttributeValueFromInt64(value int64) *dynamodb.AttributeValue { return &dynamodb.AttributeValue{N: aws.String(strconv.FormatInt(value, 10))} }
mit
skybrud/Skybrud.Umbraco.SelfService
src/Skybrud.Umbraco.SelfService/Install/SelfServiceProperty.cs
714
using System; namespace Skybrud.Umbraco.SelfService.Install { internal class SelfServiceProperty { public Guid Guid { get; set; } public string Alias { get; set; } public string Name { get; set; } public string Description { get; set; } public SelfServiceProperty(string guid, string alias, string name) { Guid = Guid.Parse(guid); Alias = alias; Name = name; } public SelfServiceProperty(string guid, string alias, string name, string description) { Guid = Guid.Parse(guid); Alias = alias; Name = name; Description = description; } } }
mit
tvnweb/grandprix
assets/components/foundation-sites/js/foundation.sticky.js
14940
'use strict'; !function($) { /** * Sticky module. * @module foundation.sticky * @requires foundation.util.triggers * @requires foundation.util.mediaQuery */ class Sticky { /** * Creates a new instance of a sticky thing. * @class * @param {jQuery} element - jQuery object to make sticky. * @param {Object} options - options object passed when creating the element programmatically. */ constructor(element, options) { this.$element = element; this.options = $.extend({}, Sticky.defaults, this.$element.data(), options); this._init(); Foundation.registerPlugin(this, 'Sticky'); } /** * Initializes the sticky element by adding classes, getting/setting dimensions, breakpoints and attributes * @function * @private */ _init() { var $parent = this.$element.parent('[data-sticky-container]'), id = this.$element[0].id || Foundation.GetYoDigits(6, 'sticky'), _this = this; if (!$parent.length) { this.wasWrapped = true; } this.$container = $parent.length ? $parent : $(this.options.container).wrapInner(this.$element); this.$container.addClass(this.options.containerClass); this.$element.addClass(this.options.stickyClass) .attr({'data-resize': id}); this.scrollCount = this.options.checkEvery; this.isStuck = false; $(window).one('load.zf.sticky', function(){ //We calculate the container height to have correct values for anchor points offset calculation. _this.containerHeight = _this.$element.css("display") == "none" ? 0 : _this.$element[0].getBoundingClientRect().height; _this.$container.css('height', _this.containerHeight); _this.elemHeight = _this.containerHeight; if(_this.options.anchor !== ''){ _this.$anchor = $('#' + _this.options.anchor); }else{ _this._parsePoints(); } _this._setSizes(function(){ var scroll = window.pageYOffset; _this._calc(false, scroll); //Unstick the element will ensure that proper classes are set. if (!_this.isStuck) { _this._removeSticky((scroll >= _this.topPoint) ? false : true); } }); _this._events(id.split('-').reverse().join('-')); }); } /** * If using multiple elements as anchors, calculates the top and bottom pixel values the sticky thing should stick and unstick on. * @function * @private */ _parsePoints() { var top = this.options.topAnchor == "" ? 1 : this.options.topAnchor, btm = this.options.btmAnchor== "" ? document.documentElement.scrollHeight : this.options.btmAnchor, pts = [top, btm], breaks = {}; for (var i = 0, len = pts.length; i < len && pts[i]; i++) { var pt; if (typeof pts[i] === 'number') { pt = pts[i]; } else { var place = pts[i].split(':'), anchor = $(`#${place[0]}`); pt = anchor.offset().top; if (place[1] && place[1].toLowerCase() === 'bottom') { pt += anchor[0].getBoundingClientRect().height; } } breaks[i] = pt; } this.points = breaks; return; } /** * Adds event handlers for the scrolling element. * @private * @param {String} id - psuedo-random id for unique scroll event listener. */ _events(id) { var _this = this, scrollListener = this.scrollListener = `scroll.zf.${id}`; if (this.isOn) { return; } if (this.canStick) { this.isOn = true; $(window).off(scrollListener) .on(scrollListener, function(e) { if (_this.scrollCount === 0) { _this.scrollCount = _this.options.checkEvery; _this._setSizes(function() { _this._calc(false, window.pageYOffset); }); } else { _this.scrollCount--; _this._calc(false, window.pageYOffset); } }); } this.$element.off('resizeme.zf.trigger') .on('resizeme.zf.trigger', function(e, el) { _this._setSizes(function() { _this._calc(false); if (_this.canStick) { if (!_this.isOn) { _this._events(id); } } else if (_this.isOn) { _this._pauseListeners(scrollListener); } }); }); } /** * Removes event handlers for scroll and change events on anchor. * @fires Sticky#pause * @param {String} scrollListener - unique, namespaced scroll listener attached to `window` */ _pauseListeners(scrollListener) { this.isOn = false; $(window).off(scrollListener); /** * Fires when the plugin is paused due to resize event shrinking the view. * @event Sticky#pause * @private */ this.$element.trigger('pause.zf.sticky'); } /** * Called on every `scroll` event and on `_init` * fires functions based on booleans and cached values * @param {Boolean} checkSizes - true if plugin should recalculate sizes and breakpoints. * @param {Number} scroll - current scroll position passed from scroll event cb function. If not passed, defaults to `window.pageYOffset`. */ _calc(checkSizes, scroll) { if (checkSizes) { this._setSizes(); } if (!this.canStick) { if (this.isStuck) { this._removeSticky(true); } return false; } if (!scroll) { scroll = window.pageYOffset; } if (scroll >= this.topPoint) { if (scroll <= this.bottomPoint) { if (!this.isStuck) { this._setSticky(); } } else { if (this.isStuck) { this._removeSticky(false); } } } else { if (this.isStuck) { this._removeSticky(true); } } } /** * Causes the $element to become stuck. * Adds `position: fixed;`, and helper classes. * @fires Sticky#stuckto * @function * @private */ _setSticky() { var _this = this, stickTo = this.options.stickTo, mrgn = stickTo === 'top' ? 'marginTop' : 'marginBottom', notStuckTo = stickTo === 'top' ? 'bottom' : 'top', css = {}; css[mrgn] = `${this.options[mrgn]}em`; css[stickTo] = 0; css[notStuckTo] = 'auto'; this.isStuck = true; this.$element.removeClass(`is-anchored is-at-${notStuckTo}`) .addClass(`is-stuck is-at-${stickTo}`) .css(css) /** * Fires when the $element has become `position: fixed;` * Namespaced to `top` or `bottom`, e.g. `sticky.zf.stuckto:top` * @event Sticky#stuckto */ .trigger(`sticky.zf.stuckto:${stickTo}`); this.$element.on("transitionend webkitTransitionEnd oTransitionEnd otransitionend MSTransitionEnd", function() { _this._setSizes(); }); } /** * Causes the $element to become unstuck. * Removes `position: fixed;`, and helper classes. * Adds other helper classes. * @param {Boolean} isTop - tells the function if the $element should anchor to the top or bottom of its $anchor element. * @fires Sticky#unstuckfrom * @private */ _removeSticky(isTop) { var stickTo = this.options.stickTo, stickToTop = stickTo === 'top', css = {}, anchorPt = (this.points ? this.points[1] - this.points[0] : this.anchorHeight) - this.elemHeight, mrgn = stickToTop ? 'marginTop' : 'marginBottom', notStuckTo = stickToTop ? 'bottom' : 'top', topOrBottom = isTop ? 'top' : 'bottom'; css[mrgn] = 0; css['bottom'] = 'auto'; if(isTop) { css['top'] = 0; } else { css['top'] = anchorPt; } this.isStuck = false; this.$element.removeClass(`is-stuck is-at-${stickTo}`) .addClass(`is-anchored is-at-${topOrBottom}`) .css(css) /** * Fires when the $element has become anchored. * Namespaced to `top` or `bottom`, e.g. `sticky.zf.unstuckfrom:bottom` * @event Sticky#unstuckfrom */ .trigger(`sticky.zf.unstuckfrom:${topOrBottom}`); } /** * Sets the $element and $container sizes for plugin. * Calls `_setBreakPoints`. * @param {Function} cb - optional callback function to fire on completion of `_setBreakPoints`. * @private */ _setSizes(cb) { this.canStick = Foundation.MediaQuery.is(this.options.stickyOn); if (!this.canStick) { if (cb && typeof cb === 'function') { cb(); } } var _this = this, newElemWidth = this.$container[0].getBoundingClientRect().width, comp = window.getComputedStyle(this.$container[0]), pdngl = parseInt(comp['padding-left'], 10), pdngr = parseInt(comp['padding-right'], 10); if (this.$anchor && this.$anchor.length) { this.anchorHeight = this.$anchor[0].getBoundingClientRect().height; } else { this._parsePoints(); } this.$element.css({ 'max-width': `${newElemWidth - pdngl - pdngr}px` }); var newContainerHeight = this.$element[0].getBoundingClientRect().height || this.containerHeight; if (this.$element.css("display") == "none") { newContainerHeight = 0; } this.containerHeight = newContainerHeight; this.$container.css({ height: newContainerHeight }); this.elemHeight = newContainerHeight; if (!this.isStuck) { if (this.$element.hasClass('is-at-bottom')) { var anchorPt = (this.points ? this.points[1] - this.$container.offset().top : this.anchorHeight) - this.elemHeight; this.$element.css('top', anchorPt); } } this._setBreakPoints(newContainerHeight, function() { if (cb && typeof cb === 'function') { cb(); } }); } /** * Sets the upper and lower breakpoints for the element to become sticky/unsticky. * @param {Number} elemHeight - px value for sticky.$element height, calculated by `_setSizes`. * @param {Function} cb - optional callback function to be called on completion. * @private */ _setBreakPoints(elemHeight, cb) { if (!this.canStick) { if (cb && typeof cb === 'function') { cb(); } else { return false; } } var mTop = emCalc(this.options.marginTop), mBtm = emCalc(this.options.marginBottom), topPoint = this.points ? this.points[0] : this.$anchor.offset().top, bottomPoint = this.points ? this.points[1] : topPoint + this.anchorHeight, // topPoint = this.$anchor.offset().top || this.points[0], // bottomPoint = topPoint + this.anchorHeight || this.points[1], winHeight = window.innerHeight; if (this.options.stickTo === 'top') { topPoint -= mTop; bottomPoint -= (elemHeight + mTop); } else if (this.options.stickTo === 'bottom') { topPoint -= (winHeight - (elemHeight + mBtm)); bottomPoint -= (winHeight - mBtm); } else { //this would be the stickTo: both option... tricky } this.topPoint = topPoint; this.bottomPoint = bottomPoint; if (cb && typeof cb === 'function') { cb(); } } /** * Destroys the current sticky element. * Resets the element to the top position first. * Removes event listeners, JS-added css properties and classes, and unwraps the $element if the JS added the $container. * @function */ destroy() { this._removeSticky(true); this.$element.removeClass(`${this.options.stickyClass} is-anchored is-at-top`) .css({ height: '', top: '', bottom: '', 'max-width': '' }) .off('resizeme.zf.trigger'); if (this.$anchor && this.$anchor.length) { this.$anchor.off('change.zf.sticky'); } $(window).off(this.scrollListener); if (this.wasWrapped) { this.$element.unwrap(); } else { this.$container.removeClass(this.options.containerClass) .css({ height: '' }); } Foundation.unregisterPlugin(this); } } Sticky.defaults = { /** * Customizable container template. Add your own classes for styling and sizing. * @option * @example '&lt;div data-sticky-container class="small-6 columns"&gt;&lt;/div&gt;' */ container: '<div data-sticky-container></div>', /** * Location in the view the element sticks to. * @option * @example 'top' */ stickTo: 'top', /** * If anchored to a single element, the id of that element. * @option * @example 'exampleId' */ anchor: '', /** * If using more than one element as anchor points, the id of the top anchor. * @option * @example 'exampleId:top' */ topAnchor: '', /** * If using more than one element as anchor points, the id of the bottom anchor. * @option * @example 'exampleId:bottom' */ btmAnchor: '', /** * Margin, in `em`'s to apply to the top of the element when it becomes sticky. * @option * @example 1 */ marginTop: 1, /** * Margin, in `em`'s to apply to the bottom of the element when it becomes sticky. * @option * @example 1 */ marginBottom: 1, /** * Breakpoint string that is the minimum screen size an element should become sticky. * @option * @example 'medium' */ stickyOn: 'medium', /** * Class applied to sticky element, and removed on destruction. Foundation defaults to `sticky`. * @option * @example 'sticky' */ stickyClass: 'sticky', /** * Class applied to sticky container. Foundation defaults to `sticky-container`. * @option * @example 'sticky-container' */ containerClass: 'sticky-container', /** * Number of scroll events between the plugin's recalculating sticky points. Setting it to `0` will cause it to recalc every scroll event, setting it to `-1` will prevent recalc on scroll. * @option * @example 50 */ checkEvery: -1 }; /** * Helper function to calculate em values * @param Number {em} - number of em's to calculate into pixels */ function emCalc(em) { return parseInt(window.getComputedStyle(document.body, null).fontSize, 10) * em; } // Window exports Foundation.plugin(Sticky, 'Sticky'); }(jQuery);
mit
spatie/laravel-uptime-monitor
tests/Integration/Events/CertificateCheckFailedTest.php
902
<?php namespace Spatie\UptimeMonitor\Test\Integration\Events; use Illuminate\Support\Facades\Event; use Spatie\UptimeMonitor\Events\CertificateCheckFailed; use Spatie\UptimeMonitor\Models\Monitor; use Spatie\UptimeMonitor\Test\TestCase; class CertificateCheckFailedTest extends TestCase { /** @var \Spatie\UptimeMonitor\Models\Monitor */ protected $monitor; public function setUp(): void { parent::setUp(); Event::fake(); $this->monitor = Monitor::factory()->create(['certificate_check_enabled' => true]); } /** @test */ public function the_invalid_certificate_found_event_will_be_fired_when_an_invalid_certificate_is_found() { $this->monitor->checkCertificate(); Event::assertDispatched(CertificateCheckFailed::class, function ($event) { return $event->monitor->id === $this->monitor->id; }); } }
mit
eventualbuddha/decaffeinate-parser
script/check-file.ts
1233
#!/usr/bin/env node -r ts-node/register -r babel-register import { readdirSync, readFileSync, statSync } from 'fs'; import { basename, extname, join } from 'path'; import { parse } from '../src/parser'; for (let i = 2; i < process.argv.length; i++) { processPath(process.argv[i]); } function processPath(path: string): void { const stat = statSync(path); if (stat.isDirectory()) { processDirectory(path); } else if (stat.isFile() && isCoffeeScriptFile(path)) { processFile(path); } } function processFile(path: string): void { const content = readFileSync(path, { encoding: 'utf8' }); try { parse(content); console.log(`OK ${path}`); } catch (ex) { console.log(`NOT OK ${path}`); console.log(` ${ex.message}`); console.log( ex.stack .split('\n') .map((line: string) => ` ${line}`) .join('\n') ); } } function processDirectory(path: string): void { readdirSync(path).forEach(child => { if (child[0] === '.' || child === 'node_modules') { return; } processPath(join(path, child)); }); } function isCoffeeScriptFile(path: string): boolean { return extname(path) === '.coffee' && basename(path, '.coffee').length > 0; }
mit
NikolaLyutsov/TelerikAcademy
Homeworks/C#1/01. Introduction-to-Programming/15. Age/Properties/AssemblyInfo.cs
1382
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("Age")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Age")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("3151818a-9ced-4e0e-9782-5278c7be8907")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
mit
yogeshsaroya/new-cdnjs
ajax/libs/rxjs/2.3.25/rx.all.compat.min.js
131
version https://git-lfs.github.com/spec/v1 oid sha256:e843b3ea9100d7ba69449785fa689884ec2b29f3aefcc531677ca2b95110fefb size 104789
mit
fc-io/react-tape-redux
app/containers/root.dev.js
340
import React from 'react' import {Router} from 'react-router/es6' import {Provider} from 'react-redux' import routes from '../routes' import DevTools from '../dev_tools' export default ({store, history}) => <Provider store={store}> <div> <Router history={history} routes={routes} /> <DevTools /> </div> </Provider>
mit
tourze/html
src/Feature/StandardEventAttributes.php
453
<?php namespace tourze\Html\Feature; /** * 标准 Event 属性 * * @package tourze\Html\Feature */ trait StandardEventAttributes { /** * 读取指定属性值 * * @param $name * * @return string|array */ protected function getAttribute($name) { } /** * 设置属性值 * * @param $name * @param $value */ protected function setAttribute($name, $value) { } }
mit
MMF-FE/vue-svgicon
demo/taro-demo/src/app.config.ts
241
export default { pages: ['pages/index/index'], window: { backgroundTextStyle: 'light', navigationBarBackgroundColor: '#fff', navigationBarTitleText: 'WeChat', navigationBarTextStyle: 'black', }, }
mit
DrRaider/Unishare
node_modules/couch-db/test/exec.js
398
var Executor = require('../lib/exec'), assert = require('chai').assert; describe('executor', function() { it('add properties', function() { var exec = new Executor('a_B', 'bcd_aff'); assert(typeof exec.aB == 'function' && typeof exec.bcdAff == 'function'); exec = new Executor(['a__b', 'b_c_d']); assert(typeof exec.aB == 'function' && typeof exec.bCD == 'function'); }); });
mit
justphil/angular-es6-components-seed
gulp/tasks/inject-reload.js
255
var gulp = require('gulp'), gulpSequence = require('gulp-sequence'), browserSync = require('browser-sync'); gulp.task('inject:reload', function(cb) { gulpSequence('inject', function() { browserSync.reload(); cb(); }); });
mit
ALEXIUMCOIN/alexium
src/txdb-leveldb.cpp
20632
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file license.txt or http://www.opensource.org/licenses/mit-license.php. #include <map> #include <boost/version.hpp> #include <boost/filesystem.hpp> #include <boost/filesystem/fstream.hpp> #include <leveldb/env.h> #include <leveldb/cache.h> #include <leveldb/filter_policy.h> #include <memenv/memenv.h> #include "kernel.h" #include "checkpoints.h" #include "txdb.h" #include "util.h" #include "main.h" #include "chainparams.h" using namespace std; using namespace boost; leveldb::DB *txdb; // global pointer for LevelDB object instance static leveldb::Options GetOptions() { leveldb::Options options; int nCacheSizeMB = GetArg("-dbcache", 25); options.block_cache = leveldb::NewLRUCache(nCacheSizeMB * 1048576); options.filter_policy = leveldb::NewBloomFilterPolicy(10); return options; } static void init_blockindex(leveldb::Options& options, bool fRemoveOld = false, bool fCreateBootstrap = false) { // First time init. filesystem::path directory = GetDataDir() / "txleveldb"; if (fRemoveOld) { filesystem::remove_all(directory); // remove directory unsigned int nFile = 1; filesystem::path bootstrap = GetDataDir() / "bootstrap.dat"; while (true) { filesystem::path strBlockFile = GetDataDir() / strprintf("AUM%04u.dat", nFile); // Break if no such file if( !filesystem::exists( strBlockFile ) ) break; if (fCreateBootstrap && nFile == 1 && !filesystem::exists(bootstrap)) { filesystem::rename(strBlockFile, bootstrap); } else { filesystem::remove(strBlockFile); } nFile++; } } filesystem::create_directory(directory); LogPrintf("Opening LevelDB in %s\n", directory.string()); leveldb::Status status = leveldb::DB::Open(options, directory.string(), &txdb); if (!status.ok()) { throw runtime_error(strprintf("init_blockindex(): error opening database environment %s", status.ToString())); } } // CDB subclasses are created and destroyed VERY OFTEN. That's why // we shouldn't treat this as a free operations. CTxDB::CTxDB(const char* pszMode) { assert(pszMode); activeBatch = NULL; fReadOnly = (!strchr(pszMode, '+') && !strchr(pszMode, 'w')); if (txdb) { pdb = txdb; return; } bool fCreate = strchr(pszMode, 'c'); options = GetOptions(); options.create_if_missing = fCreate; options.filter_policy = leveldb::NewBloomFilterPolicy(10); init_blockindex(options); // Init directory pdb = txdb; if (Exists(string("version"))) { ReadVersion(nVersion); LogPrintf("Transaction index version is %d\n", nVersion); if (nVersion < DATABASE_VERSION) { LogPrintf("Required index version is %d, removing old database\n", DATABASE_VERSION); // Leveldb instance destruction delete txdb; txdb = pdb = NULL; delete activeBatch; activeBatch = NULL; init_blockindex(options, true, true); // Remove directory and create new database pdb = txdb; bool fTmp = fReadOnly; fReadOnly = false; WriteVersion(DATABASE_VERSION); // Save transaction index version fReadOnly = fTmp; } } else if (fCreate) { bool fTmp = fReadOnly; fReadOnly = false; WriteVersion(DATABASE_VERSION); fReadOnly = fTmp; } LogPrintf("Opened LevelDB successfully\n"); } void CTxDB::Close() { delete txdb; txdb = pdb = NULL; delete options.filter_policy; options.filter_policy = NULL; delete options.block_cache; options.block_cache = NULL; delete activeBatch; activeBatch = NULL; } bool CTxDB::TxnBegin() { assert(!activeBatch); activeBatch = new leveldb::WriteBatch(); return true; } bool CTxDB::TxnCommit() { assert(activeBatch); leveldb::Status status = pdb->Write(leveldb::WriteOptions(), activeBatch); delete activeBatch; activeBatch = NULL; if (!status.ok()) { LogPrintf("LevelDB batch commit failure: %s\n", status.ToString()); return false; } return true; } class CBatchScanner : public leveldb::WriteBatch::Handler { public: std::string needle; bool *deleted; std::string *foundValue; bool foundEntry; CBatchScanner() : foundEntry(false) {} virtual void Put(const leveldb::Slice& key, const leveldb::Slice& value) { if (key.ToString() == needle) { foundEntry = true; *deleted = false; *foundValue = value.ToString(); } } virtual void Delete(const leveldb::Slice& key) { if (key.ToString() == needle) { foundEntry = true; *deleted = true; } } }; // When performing a read, if we have an active batch we need to check it first // before reading from the database, as the rest of the code assumes that once // a database transaction begins reads are consistent with it. It would be good // to change that assumption in future and avoid the performance hit, though in // practice it does not appear to be large. bool CTxDB::ScanBatch(const CDataStream &key, string *value, bool *deleted) const { assert(activeBatch); *deleted = false; CBatchScanner scanner; scanner.needle = key.str(); scanner.deleted = deleted; scanner.foundValue = value; leveldb::Status status = activeBatch->Iterate(&scanner); if (!status.ok()) { throw runtime_error(status.ToString()); } return scanner.foundEntry; } bool CTxDB::ReadTxIndex(uint256 hash, CTxIndex& txindex) { txindex.SetNull(); return Read(make_pair(string("tx"), hash), txindex); } bool CTxDB::UpdateTxIndex(uint256 hash, const CTxIndex& txindex) { return Write(make_pair(string("tx"), hash), txindex); } bool CTxDB::AddTxIndex(const CTransaction& tx, const CDiskTxPos& pos, int nHeight) { // Add to tx index uint256 hash = tx.GetHash(); CTxIndex txindex(pos, tx.vout.size()); return Write(make_pair(string("tx"), hash), txindex); } bool CTxDB::EraseTxIndex(const CTransaction& tx) { uint256 hash = tx.GetHash(); return Erase(make_pair(string("tx"), hash)); } bool CTxDB::ContainsTx(uint256 hash) { return Exists(make_pair(string("tx"), hash)); } bool CTxDB::ReadDiskTx(uint256 hash, CTransaction& tx, CTxIndex& txindex) { tx.SetNull(); if (!ReadTxIndex(hash, txindex)) return false; return (tx.ReadFromDisk(txindex.pos)); } bool CTxDB::ReadDiskTx(uint256 hash, CTransaction& tx) { CTxIndex txindex; return ReadDiskTx(hash, tx, txindex); } bool CTxDB::ReadDiskTx(COutPoint outpoint, CTransaction& tx, CTxIndex& txindex) { return ReadDiskTx(outpoint.hash, tx, txindex); } bool CTxDB::ReadDiskTx(COutPoint outpoint, CTransaction& tx) { CTxIndex txindex; return ReadDiskTx(outpoint.hash, tx, txindex); } bool CTxDB::WriteBlockIndex(const CDiskBlockIndex& blockindex) { return Write(make_pair(string("blockindex"), blockindex.GetBlockHash()), blockindex); } bool CTxDB::ReadHashBestChain(uint256& hashBestChain) { return Read(string("hashBestChain"), hashBestChain); } bool CTxDB::WriteHashBestChain(uint256 hashBestChain) { return Write(string("hashBestChain"), hashBestChain); } bool CTxDB::ReadBestInvalidTrust(CBigNum& bnBestInvalidTrust) { return Read(string("bnBestInvalidTrust"), bnBestInvalidTrust); } bool CTxDB::WriteBestInvalidTrust(CBigNum bnBestInvalidTrust) { return Write(string("bnBestInvalidTrust"), bnBestInvalidTrust); } bool CTxDB::ReadSyncCheckpoint(uint256& hashCheckpoint) { return Read(string("hashSyncCheckpoint"), hashCheckpoint); } bool CTxDB::WriteSyncCheckpoint(uint256 hashCheckpoint) { return Write(string("hashSyncCheckpoint"), hashCheckpoint); } bool CTxDB::ReadCheckpointPubKey(string& strPubKey) { return Read(string("strCheckpointPubKey"), strPubKey); } bool CTxDB::WriteCheckpointPubKey(const string& strPubKey) { return Write(string("strCheckpointPubKey"), strPubKey); } static CBlockIndex *InsertBlockIndex(uint256 hash) { if (hash == 0) return NULL; // Return existing map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) return (*mi).second; // Create new CBlockIndex* pindexNew = new CBlockIndex(); if (!pindexNew) throw runtime_error("LoadBlockIndex() : new CBlockIndex failed"); mi = mapBlockIndex.insert(make_pair(hash, pindexNew)).first; pindexNew->phashBlock = &((*mi).first); return pindexNew; } bool CTxDB::LoadBlockIndex() { if (mapBlockIndex.size() > 0) { // Already loaded once in this session. It can happen during migration // from BDB. return true; } // The block index is an in-memory structure that maps hashes to on-disk // locations where the contents of the block can be found. Here, we scan it // out of the DB and into mapBlockIndex. leveldb::Iterator *iterator = pdb->NewIterator(leveldb::ReadOptions()); // Seek to start key. CDataStream ssStartKey(SER_DISK, CLIENT_VERSION); ssStartKey << make_pair(string("blockindex"), uint256(0)); iterator->Seek(ssStartKey.str()); // Now read each entry. while (iterator->Valid()) { boost::this_thread::interruption_point(); // Unpack keys and values. CDataStream ssKey(SER_DISK, CLIENT_VERSION); ssKey.write(iterator->key().data(), iterator->key().size()); CDataStream ssValue(SER_DISK, CLIENT_VERSION); ssValue.write(iterator->value().data(), iterator->value().size()); string strType; ssKey >> strType; // Did we reach the end of the data to read? if (strType != "blockindex") break; CDiskBlockIndex diskindex; ssValue >> diskindex; uint256 blockHash = diskindex.GetBlockHash(); // Construct block index object CBlockIndex* pindexNew = InsertBlockIndex(blockHash); pindexNew->pprev = InsertBlockIndex(diskindex.hashPrev); pindexNew->pnext = InsertBlockIndex(diskindex.hashNext); pindexNew->nFile = diskindex.nFile; pindexNew->nBlockPos = diskindex.nBlockPos; pindexNew->nHeight = diskindex.nHeight; pindexNew->nMint = diskindex.nMint; pindexNew->nMoneySupply = diskindex.nMoneySupply; pindexNew->nFlags = diskindex.nFlags; pindexNew->nStakeModifier = diskindex.nStakeModifier; pindexNew->prevoutStake = diskindex.prevoutStake; pindexNew->nStakeTime = diskindex.nStakeTime; pindexNew->hashProof = diskindex.hashProof; pindexNew->nVersion = diskindex.nVersion; pindexNew->hashMerkleRoot = diskindex.hashMerkleRoot; pindexNew->nTime = diskindex.nTime; pindexNew->nBits = diskindex.nBits; pindexNew->nNonce = diskindex.nNonce; // Watch for genesis block if (pindexGenesisBlock == NULL && blockHash == Params().HashGenesisBlock()) pindexGenesisBlock = pindexNew; if (!pindexNew->CheckIndex()) { delete iterator; return error("LoadBlockIndex() : CheckIndex failed at %d", pindexNew->nHeight); } // NovaCoin: build setStakeSeen if (pindexNew->IsProofOfStake()) setStakeSeen.insert(make_pair(pindexNew->prevoutStake, pindexNew->nStakeTime)); iterator->Next(); } delete iterator; boost::this_thread::interruption_point(); // Calculate nChainTrust vector<pair<int, CBlockIndex*> > vSortedByHeight; vSortedByHeight.reserve(mapBlockIndex.size()); BOOST_FOREACH(const PAIRTYPE(uint256, CBlockIndex*)& item, mapBlockIndex) { CBlockIndex* pindex = item.second; vSortedByHeight.push_back(make_pair(pindex->nHeight, pindex)); } sort(vSortedByHeight.begin(), vSortedByHeight.end()); BOOST_FOREACH(const PAIRTYPE(int, CBlockIndex*)& item, vSortedByHeight) { CBlockIndex* pindex = item.second; pindex->nChainTrust = (pindex->pprev ? pindex->pprev->nChainTrust : 0) + pindex->GetBlockTrust(); } // Load hashBestChain pointer to end of best chain if (!ReadHashBestChain(hashBestChain)) { if (pindexGenesisBlock == NULL) return true; return error("CTxDB::LoadBlockIndex() : hashBestChain not loaded"); } if (!mapBlockIndex.count(hashBestChain)) return error("CTxDB::LoadBlockIndex() : hashBestChain not found in the block index"); pindexBest = mapBlockIndex[hashBestChain]; nBestHeight = pindexBest->nHeight; nBestChainTrust = pindexBest->nChainTrust; LogPrintf("LoadBlockIndex(): hashBestChain=%s height=%d trust=%s date=%s\n", hashBestChain.ToString(), nBestHeight, CBigNum(nBestChainTrust).ToString(), DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime())); // NovaCoin: load hashSyncCheckpoint if (!ReadSyncCheckpoint(Checkpoints::hashSyncCheckpoint)) return error("CTxDB::LoadBlockIndex() : hashSyncCheckpoint not loaded"); LogPrintf("LoadBlockIndex(): synchronized checkpoint %s\n", Checkpoints::hashSyncCheckpoint.ToString()); // Load bnBestInvalidTrust, OK if it doesn't exist CBigNum bnBestInvalidTrust; ReadBestInvalidTrust(bnBestInvalidTrust); nBestInvalidTrust = bnBestInvalidTrust.getuint256(); // Verify blocks in the best chain int nCheckLevel = GetArg("-checklevel", 1); int nCheckDepth = GetArg( "-checkblocks", 500); if (nCheckDepth == 0) nCheckDepth = 1000000000; // suffices until the year 19000 if (nCheckDepth > nBestHeight) nCheckDepth = nBestHeight; LogPrintf("Verifying last %i blocks at level %i\n", nCheckDepth, nCheckLevel); CBlockIndex* pindexFork = NULL; map<pair<unsigned int, unsigned int>, CBlockIndex*> mapBlockPos; for (CBlockIndex* pindex = pindexBest; pindex && pindex->pprev; pindex = pindex->pprev) { boost::this_thread::interruption_point(); if (pindex->nHeight < nBestHeight-nCheckDepth) break; CBlock block; if (!block.ReadFromDisk(pindex)) return error("LoadBlockIndex() : block.ReadFromDisk failed"); // check level 1: verify block validity // check level 7: verify block signature too if (nCheckLevel>0 && !block.CheckBlock(true, true, (nCheckLevel>6))) { LogPrintf("LoadBlockIndex() : *** found bad block at %d, hash=%s\n", pindex->nHeight, pindex->GetBlockHash().ToString()); pindexFork = pindex->pprev; } // check level 2: verify transaction index validity if (nCheckLevel>1) { pair<unsigned int, unsigned int> pos = make_pair(pindex->nFile, pindex->nBlockPos); mapBlockPos[pos] = pindex; BOOST_FOREACH(const CTransaction &tx, block.vtx) { uint256 hashTx = tx.GetHash(); CTxIndex txindex; if (ReadTxIndex(hashTx, txindex)) { // check level 3: checker transaction hashes if (nCheckLevel>2 || pindex->nFile != txindex.pos.nFile || pindex->nBlockPos != txindex.pos.nBlockPos) { // either an error or a duplicate transaction CTransaction txFound; if (!txFound.ReadFromDisk(txindex.pos)) { LogPrintf("LoadBlockIndex() : *** cannot read mislocated transaction %s\n", hashTx.ToString()); pindexFork = pindex->pprev; } else if (txFound.GetHash() != hashTx) // not a duplicate tx { LogPrintf("LoadBlockIndex(): *** invalid tx position for %s\n", hashTx.ToString()); pindexFork = pindex->pprev; } } // check level 4: check whether spent txouts were spent within the main chain unsigned int nOutput = 0; if (nCheckLevel>3) { BOOST_FOREACH(const CDiskTxPos &txpos, txindex.vSpent) { if (!txpos.IsNull()) { pair<unsigned int, unsigned int> posFind = make_pair(txpos.nFile, txpos.nBlockPos); if (!mapBlockPos.count(posFind)) { LogPrintf("LoadBlockIndex(): *** found bad spend at %d, hashBlock=%s, hashTx=%s\n", pindex->nHeight, pindex->GetBlockHash().ToString(), hashTx.ToString()); pindexFork = pindex->pprev; } // check level 6: check whether spent txouts were spent by a valid transaction that consume them if (nCheckLevel>5) { CTransaction txSpend; if (!txSpend.ReadFromDisk(txpos)) { LogPrintf("LoadBlockIndex(): *** cannot read spending transaction of %s:%i from disk\n", hashTx.ToString(), nOutput); pindexFork = pindex->pprev; } else if (!txSpend.CheckTransaction()) { LogPrintf("LoadBlockIndex(): *** spending transaction of %s:%i is invalid\n", hashTx.ToString(), nOutput); pindexFork = pindex->pprev; } else { bool fFound = false; BOOST_FOREACH(const CTxIn &txin, txSpend.vin) if (txin.prevout.hash == hashTx && txin.prevout.n == nOutput) fFound = true; if (!fFound) { LogPrintf("LoadBlockIndex(): *** spending transaction of %s:%i does not spend it\n", hashTx.ToString(), nOutput); pindexFork = pindex->pprev; } } } } nOutput++; } } } // check level 5: check whether all prevouts are marked spent if (nCheckLevel>4) { BOOST_FOREACH(const CTxIn &txin, tx.vin) { CTxIndex txindex; if (ReadTxIndex(txin.prevout.hash, txindex)) if (txindex.vSpent.size()-1 < txin.prevout.n || txindex.vSpent[txin.prevout.n].IsNull()) { LogPrintf("LoadBlockIndex(): *** found unspent prevout %s:%i in %s\n", txin.prevout.hash.ToString(), txin.prevout.n, hashTx.ToString()); pindexFork = pindex->pprev; } } } } } } if (pindexFork) { boost::this_thread::interruption_point(); // Reorg back to the fork LogPrintf("LoadBlockIndex() : *** moving best chain pointer back to block %d\n", pindexFork->nHeight); CBlock block; if (!block.ReadFromDisk(pindexFork)) return error("LoadBlockIndex() : block.ReadFromDisk failed"); CTxDB txdb; block.SetBestChain(txdb, pindexFork); } return true; }
mit
raulmedinacampos/sicipac
application/models/Leyendas_md.php
1892
<?php class Leyendas_md extends CI_Model { const tabla="LEYENDAS"; function __construct() { // Call the Model constructor parent::__construct(); } function GetAll() { $query = $this->db->get(self::tabla); return $query->result(); } function GetAllAdmin() { $this->db->where('ACTIVO != ', 'E'); $query = $this->db->get(self::tabla); return $query->result(); } function GetById($id) { $this->db->where(array('IDLEYENDA'=>$id)); $query = $this->db->get(self::tabla); return $query->result_array(); } function InsertRecord($data) { $this->db->set('CLAVE', $data[0]); $this->db->set('SECCION', $data[1]); $this->db->set('LEYENDA', $data[2]); $this->db->set('FECHAINSERCION', "to_date('$data[3]', 'RRRR-MM-DD')",FALSE); $this->db->set('USUARIOINSERTO', $data[4]); $this->db->set('ACTIVO', $data[5]); $this->db->insert(self::tabla,$this); //return $this->db->insert_id(); return true; } function UpdateRecord($data,$id) { $this->db->set('CLAVE', $data[0]); $this->db->set('SECCION', $data[1]); $this->db->set('LEYENDA', $data[2]); $this->db->set('FECHAMODIFICACION', "to_date('$data[3]', 'RRRR-MM-DD')",FALSE); $this->db->set('USUARIOMODIFICO', $data[4]); $this->db->set('ACTIVO', $data[5]); $this->db->update(self::tabla, $this, array('IDLEYENDA' => $id)); return $id; } function Disable($id) { $this->ACTIVO = 'N'; $this->db->update(self::tabla, $this, array('IDLEYENDA' => $id)); } function Enable($id) { $this->ACTIVO = 'S'; $this->db->update(self::tabla, $this, array('IDLEYENDA' => $id)); } function Delete($id) { $this->ACTIVO = 'E'; $this->db->update(self::tabla, $this, array('IDLEYENDA' => $id)); } }
mit
Luukschoen/ngx-frozen
src/common/shape.helper.ts
904
/** * Generates a rounded rectanglar path * * @export * @param {*} x, y, w, h, r, tl, tr, bl, br * @returns {string} */ export function roundedRect(x, y, w, h, r, [tl, tr, bl, br]: boolean[]) { let retval = ''; w = Math.floor(w); h = Math.floor(h); w = w === 0 ? 1 : w; h = h === 0 ? 1 : h; retval = `M${[x + r, y]}`; retval += `h${w - 2 * r}`; if (tr) { retval += `a${[r, r]} 0 0 1 ${[r, r]}`; } else { retval += `h${r}v${r}`; } retval += `v${h - 2 * r}`; if (br) { retval += `a${[r, r]} 0 0 1 ${[-r, r]}`; } else { retval += `v${r}h${-r}`; } retval += `h${2 * r - w}`; if (bl) { retval += `a${[r, r]} 0 0 1 ${[-r, -r]}`; } else { retval += `h${-r}v${-r}`; } retval += `v${2 * r - h}`; if (tl) { retval += `a${[r, r]} 0 0 1 ${[r, -r]}`; } else { retval += `v${-r}h${r}`; } retval += `z`; return retval; }
mit
bkahlert/seqan-research
raw/pmsb13/pmsb13-data-20130530/sources/fjt74l9mlcqisdus/2013-04-17T22-58-12.907+0200/sandbox/my_sandbox/apps/tutorial_23/tutorial_23.cpp
257
#include <seqan/sequence.h> using namespace seqan; using namespace std; int main(){ String<String<int>> x; String<int>y; append(y,1); append(y,2); append(y,3); append(x,y); append(y,4); append(x,y); //cout<<x<<endl; return 0; }
mit
poifox/marionette-require-starter
js/fixtures/menu.js
1085
define(["app"], function(App) { App.module("Fixtures", function(Fixtures, App, Backbone, Marionette, $, _) { Fixtures.MenuItem = Backbone.Model.extend({ defaults: { name: "UNDEFINED", url: "/", trigger: "boilerplates:index", icon: "", } }); Fixtures.MenuCollection = Backbone.Collection.extend({ model: Fixtures.MenuItem }); var API = { getLeftMenu: function() { return new Fixtures.MenuCollection([ // {name: "Home", url: "/", trigger: "boilerplates:home"}, {name: "Boilerplates", url: "/boilerplates", trigger: "boilerplates:index"}, {name: "About", url: "/about", trigger: "static:about"} ]); }, getRightMenu: function() { return new Fixtures.MenuCollection([ // {name: "Home", url: "/", trigger: "boilerplates:home"}, {name: "Right", url: "/", trigger: "boilerplates:home"}]); } }; App.reqres.setHandler("fixtures:menu:left", function () { return API.getLeftMenu(); }); App.reqres.setHandler("fixtures:menu:right", function () { return API.getRightMenu(); }); }); });
mit
jimeh/zynapse
config/environment.php
1449
<?php /* Zynapse Environment - configure server environments and display modes */ # default environment - overridden by $host_config # ( development | test | production ) $environment = 'development'; # default server display mode - overridden by $host_config $mode = 'web'; # if you don't need any of the advanced host-specific # configuration features, you can disable it as it becomes # excess code which you don't need. $enable_advanced_host_config = true; # host configuration # - set environment, display mode, and root path for # specific hosts. available options are "environment", # "mode", and "root". $host_config = array( // 'zynapse' => array( // // ), // 'wap.zynapse' => array( // 'mode' => 'wap', // ), // 'admin.zynapse' => array( // 'root' => 'admin', // ), // 'zynapse.org' => array( // 'environment' => 'production', // ), // 'admin.zynapse.org' => array( // 'environment' => 'production', // 'root' => 'admin', // ), ); # set custom path to zynapse libs $zynapse_libs = ''; # Timer enabled in production environment? # - its always enabled in development and test environments $timer_enabled = false; # enable php error logging? - recommended $enable_logging = true; # enable internal error logging? - recommended $internal_logging = true; # if zynapse's root is not the root of the server, define # the prefix path (without leading or trailing slashes). $url_prefix = ''; ?>
mit
goodwinxp/Yorozuya
library/ATF/LPGOPHER_VIEW_ATTRIBUTE_TYPE.hpp
302
// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually #pragma once #include <common/common.h> #include <GOPHER_VIEW_ATTRIBUTE_TYPE.hpp> START_ATF_NAMESPACE typedef GOPHER_VIEW_ATTRIBUTE_TYPE *LPGOPHER_VIEW_ATTRIBUTE_TYPE; END_ATF_NAMESPACE
mit
intuinno/vistalk
vistalk3/settings/base.py
7984
# Django settings for vistalk3 project # Django settings for Django celery import djcelery djcelery.setup_loader() DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'django_testdb', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': 'testadmin', 'PASSWORD': 'testing', 'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Parse database configuration from $DATABASE_URL import dj_database_url DATABASES['default'] = dj_database_url.config() # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers ALLOWED_HOSTS = ['*'] # Static asset configuration # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/Indianapolis' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/var/www/example.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://example.com/media/", "http://media.example.com/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/var/www/example.com/static/" STATIC_ROOT = '/var/www/sociavis.com/static' # URL prefix for static files. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = '/static/' # Additional locations of static files import os BASE_DIR = os.path.dirname(os.path.abspath(__file__)) STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(BASE_DIR, 'static'), ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '9b5&^x)driak8pg10q7bl=)*62y_ynca1w@0^kimm1#zt7yyec' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'vistalk3.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'vistalk3.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(BASE_DIR, 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'polls', 'blog', 'wordconfuse', 'django.contrib.comments', 'movievis', 'yelpvis', 'jsonify', 'newsvis', 'newsvis2', 'social_auth', 'ribbit', 'todo', 'activitysync', 'djcelery', 'celerytest', 'carson', ) ACTIVITYSYNC_PROVIDERS = ( 'activitysync.providers.twitterprovider.TwitterUserProvider', 'activitysync.providers.twitterprovider.TwitterSearchProvider', ) ACTIVITYSYNC_SETTINGS = { 'TWITTER_USERNAME' : 'intuinno', 'TWITTER_SEARCHTERM': '#THICKE', } # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } AUTHENTICATION_BACKENDS = ( 'social_auth.backends.twitter.TwitterBackend', 'social_auth.backends.facebook.FacebookBackend', 'social_auth.backends.google.GoogleOAuthBackend', 'social_auth.backends.google.GoogleOAuth2Backend', 'social_auth.backends.google.GoogleBackend', 'social_auth.backends.yahoo.YahooBackend', 'social_auth.backends.browserid.BrowserIDBackend', 'social_auth.backends.contrib.linkedin.LinkedinBackend', 'social_auth.backends.contrib.disqus.DisqusBackend', 'social_auth.backends.contrib.livejournal.LiveJournalBackend', 'social_auth.backends.contrib.orkut.OrkutBackend', 'social_auth.backends.contrib.foursquare.FoursquareBackend', 'social_auth.backends.contrib.github.GithubBackend', 'social_auth.backends.contrib.vk.VKOAuth2Backend', 'social_auth.backends.contrib.live.LiveBackend', 'social_auth.backends.contrib.skyrock.SkyrockBackend', 'social_auth.backends.contrib.yahoo.YahooOAuthBackend', 'social_auth.backends.contrib.readability.ReadabilityBackend', 'social_auth.backends.contrib.fedora.FedoraBackend', 'social_auth.backends.OpenIDBackend', 'django.contrib.auth.backends.ModelBackend', ) # Settings for the django-carson CONSUMER_KEY='AtDvumv2XT7iAbzsPEInQ' CONSUMER_SECRET='z5atyQUwygHxSuSlBGTUMgflzudWzYBkSalPLx5FF4U' TOKEN_KEY='38975687-rBGM5Wj23VNdYz8n8Gg8zMqgOBKfZ4uHaZ5uCH8MD' TOKEN_SECRET='qpPMWwzMgWt2kYocyLM6nI3NUEcyWK2FHw8MlT8j90'
mit
zeno15/Ascendancy
include/Infrastructure/FontManager.hpp
505
#ifndef INCLUDED_ASCENDANCY_INFRASTRUCTURE_FONT_MANAGER_HPP_ #define INCLUDED_ASCENDANCY_INFRASTRUCTURE_FONT_MANAGER_HPP_ #include <map> #include <SFML/Graphics.hpp> namespace asc { class FontManager { public: FontManager(); ~FontManager(); bool loadFont(const std::string& _filepath, const std::string& _fontName); sf::Font& getFont(const std::string& _fontName); private: std::map<std::string, sf::Font *> m_Fonts; }; } #endif // INCLUDED_ASCENDANCY_INFRASTRUCTURE_FONT_MANAGER_HPP_
mit
FanHuaRan/interview.algorithm
java/concurrentdemo/src/com/fhr/concurrentdemo/threadcon/characterprinters/TwoLockPrinter.java
2317
package com.fhr.concurrentdemo.threadcon.characterprinters; import org.junit.Test; /** * 基于两个lock实现连续打印abcabc.... * * @author fhr * @since 2017/09/04 */ public class TwoLockPrinter { @Test public void test() throws InterruptedException { // 打印A线程的锁 Object lockA = new Object(); // 打印B线程的锁 Object lockB = new Object(); // 打印C线程的锁 Object lockC = new Object(); ThreadGroup group = new ThreadGroup("xx"); // 打印a的线程 Thread threadA = new Thread(group, new Printer(lockC, lockA, 'A')); // 打印b的线程 Thread threadB = new Thread(group, new Printer(lockA, lockB, 'B')); // 打印c的线程 Thread threadC = new Thread(group, new Printer(lockB, lockC, 'C')); // 依次启动a b c线程 threadA.start(); Thread.sleep(100); threadB.start(); Thread.sleep(100); threadC.start(); // 主线程循环让出cpu使用权 while (group.activeCount() > 0) { Thread.yield(); } } // 打印线程 private class Printer implements Runnable { // 打印次数 private static final int PRINT_COUNT = 6; // 前一个线程的打印锁 private final Object fontLock; // 本线程的打印锁 private final Object thisLock; // 打印字符 private final char printChar; public Printer(Object fontLock, Object thisLock, char printChar) { super(); this.fontLock = fontLock; this.thisLock = thisLock; this.printChar = printChar; } @Override public void run() { // 连续打印PRINT_COUNT次 for (int i = 0; i < PRINT_COUNT; i++) { // 获取前一个线程的打印锁 synchronized (fontLock) { // 获取本线程的打印锁 synchronized (thisLock) { // 打印字符 System.out.print(printChar); // 通过本线程的打印锁唤醒后面的线程 // notify和notifyall均可,因为同一时刻只有一个线程在等待 thisLock.notify(); // 不是最后一次则通过fontLock等待被唤醒 // 必须要加判断,不然能够打印6次 但6次后就会直接死锁 if (i < PRINT_COUNT - 1) { try { // 通过fontLock等待被唤醒 fontLock.wait(); } catch (InterruptedException e) { e.printStackTrace(); return; } } } } } } } }
mit
cybernude/hipth
test-env.php
663
<?php function GetClientMac(){ $macAddr=false; $arp=`arp -n`; $lines=explode("\n", $arp); foreach($lines as $line){ $cols=preg_split('/\s+/', trim($line)); if ($cols[0]==$_SERVER['REMOTE_ADDR']){ $macAddr=$cols[2]; } } return $macAddr; } $client_mac = GetClientMac(); echo $client_mac; echo "<br>"; echo $_SERVER['REMOTE_ADDR']; echo $yy; echo "<br>"; echo $_SERVER['PHP_SELF']; echo "<br>"; echo $_SERVER['SERVER_NAME']; echo "<br>"; echo $_SERVER['HTTP_HOST']; echo "<br>"; echo $_SERVER['HTTP_REFERER']; echo "<br>"; echo $_SERVER['HTTP_USER_AGENT']; echo "<br>"; echo $_SERVER['SCRIPT_NAME']; ?>
mit
AldorEla/albinuta
src/Alb/Bundle/AppBundle/Controller/StoryController.php
4852
<?php namespace Alb\Bundle\AppBundle\Controller; use Alb\Bundle\AppBundle\Entity\Story; use Alb\Bundle\AppBundle\Repository\StoryRepository; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Symfony\Component\HttpFoundation\Request; /** * Story controller. * */ class StoryController extends Controller { const STORIES_LISTING_LIMIT = 9; // 9 /** * Lists all story entities. * */ public function indexAction($page, Request $request) { $doctrine = $this->getDoctrine(); $em = $doctrine->getManager(); $limit = self::STORIES_LISTING_LIMIT; $stories = StoryRepository::findAllStories($em, $page, $limit); $totalStories = StoryRepository::getTotalStoryItems($em); $currentPage = $request->get('page'); return $this->render('AlbAppBundle:story:index.html.twig', array( 'stories' => $stories, 'totalStories' => $totalStories, 'currentPage' => $currentPage, )); } /** * Creates a new story entity. * */ public function newAction(Request $request) { $story = new Story(); $form = $this->createForm('Alb\Bundle\AppBundle\Form\StoryType', $story); $form->handleRequest($request); if ($form->isSubmitted() && $form->isValid()) { $em = $this->getDoctrine()->getManager(); $em->persist($story); $em->flush(); return $this->redirectToRoute('story_show', array('id' => $story->getId())); } return $this->render('AlbAppBundle:story:new.html.twig', array( 'story' => $story, 'form' => $form->createView(), )); } /** * Finds and displays a story entity. * */ public function showAction(Story $story) { $deleteForm = $this->createDeleteForm($story); return $this->render('AlbAppBundle:story:show.html.twig', array( 'story' => $story, 'delete_form' => $deleteForm->createView(), )); } /** * Displays a form to edit an existing story entity. * */ public function editAction(Request $request, Story $story) { $deleteForm = $this->createDeleteForm($story); $editForm = $this->createForm('Alb\Bundle\AppBundle\Form\StoryType', $story); $editForm->handleRequest($request); if ($editForm->isSubmitted() && $editForm->isValid()) { $this->getDoctrine()->getManager()->flush(); return $this->redirectToRoute('story_edit', array('id' => $story->getId())); } return $this->render('AlbAppBundle:story:edit.html.twig', array( 'story' => $story, 'edit_form' => $editForm->createView(), 'delete_form' => $deleteForm->createView(), )); } /** * Deletes a story entity. * */ public function deleteAction(Request $request, Story $story) { $form = $this->createDeleteForm($story); $form->handleRequest($request); if ($form->isSubmitted() && $form->isValid()) { $em = $this->getDoctrine()->getManager(); $em->remove($story); $em->flush(); } return $this->redirectToRoute('story_index'); } /** * Creates a form to delete a story entity. * * @param Story $story The story entity * * @return \Symfony\Component\Form\Form The form */ private function createDeleteForm(Story $story) { return $this->createFormBuilder() ->setAction($this->generateUrl('story_delete', array('id' => $story->getId()))) ->setMethod('DELETE') ->getForm() ; } public function paginationAction($totalStories, $currentPage) { // Get first page $first = 1; // Get last page $last = ''; // Get previous page $previous = ''; // Get next page $next = ''; // Default empty pagination and add the built elements to the pagination array $pagination = []; // Get amount of pages $limit = SELF::STORIES_LISTING_LIMIT; $pages = 1; if($totalStories > $limit) { $pages = $totalStories / $limit; $pages = ceil($pages); $pagination['first'] = $first; $pagination['last'] = $last; $pagination['pages'] = $pages; $pagination['previous'] = $previous; $pagination['next'] = $next; } return $this->render('AlbAppBundle:story:pagination.html.twig', array( 'pagination' => $pagination, 'currentPage' => $currentPage )); } }
mit
skeiter9/javascript-para-todo_demo
webapp/node_modules/webpack/benchmark/fixtures/926.async.js
172
require.ensure([], function(require) { require("./115.async.js"); require("./231.async.js"); require("./463.async.js"); require("./925.async.js"); }); module.exports = 926;
mit
devfreak/skim
src/Vimtag/DevBundle/Entity/Url.php
5328
<?php namespace Vimtag\DevBundle\Entity; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\ORM\Mapping as ORM; /** * Url * * @ORM\Table() * @ORM\Entity(repositoryClass="Vimtag\DevBundle\Entity\UrlRepository") */ class Url { /** * @var integer * * @ORM\Column(name="id", type="integer") * @ORM\Id * @ORM\GeneratedValue(strategy="AUTO") */ private $id; /** * @var string * * @ORM\Column(name="url", type="string", length=255) */ private $url; /** * @var integer * * @ORM\Column(name="interests", type="integer") */ private $interests; /** * @var integer * * @ORM\Column(name="views", type="integer") */ private $views; /** * @var integer * * @ORM\Column(name="not_interest", type="integer") */ private $not_interest; /** * @ORM\ManyToOne(targetEntity="Vimtag\DevBundle\Entity\User", inversedBy="urls") * @ORM\JoinColumn(name="user_id", referencedColumnName="id") */ private $user; /** * @ORM\OneToMany(targetEntity="Vimtag\DevBundle\Entity\UrlScore", mappedBy="url") */ protected $scores; /** * @ORM\ManyToOne(targetEntity="Vimtag\DevBundle\Entity\Category", inversedBy="categories") * @ORM\JoinColumn(name="category_id", referencedColumnName="id") */ protected $category; /** * @ORM\OneToMany(targetEntity="Vimtag\DevBundle\Entity\History", mappedBy="url") */ private $history; /** * Constructor */ public function __construct() { $this->scores = new \Doctrine\Common\Collections\ArrayCollection(); } /** * Get id * * @return integer */ public function getId() { return $this->id; } /** * Set url * * @param string $url * @return Url */ public function setUrl($url) { $this->url = $url; return $this; } /** * Get url * * @return string */ public function getUrl() { return $this->url; } /** * Set user * * @param \Vimtag\DevBundle\Entity\User $user * @return Url */ public function setUser(\Vimtag\DevBundle\Entity\User $user = null) { $this->user = $user; return $this; } /** * Get user * * @return \Vimtag\DevBundle\Entity\User */ public function getUser() { return $this->user; } /** * Add scores * * @param \Vimtag\DevBundle\Entity\UrlScore $scores * @return Url */ public function addScore(\Vimtag\DevBundle\Entity\UrlScore $scores) { $this->scores[] = $scores; return $this; } /** * Remove scores * * @param \Vimtag\DevBundle\Entity\UrlScore $scores */ public function removeScore(\Vimtag\DevBundle\Entity\UrlScore $scores) { $this->scores->removeElement($scores); } /** * Get scores * * @return \Doctrine\Common\Collections\Collection */ public function getScores() { return $this->scores; } /** * Set category * * @param \Vimtag\DevBundle\Entity\Category $category * @return Url */ public function setCategory(\Vimtag\DevBundle\Entity\Category $category = null) { $this->category = $category; return $this; } /** * Get category * * @return \Vimtag\DevBundle\Entity\Category */ public function getCategory() { return $this->category; } /** * Set history * * @param \Vimtag\DevBundle\Entity\History $history * @return Url */ public function setHistory(\Vimtag\DevBundle\Entity\History $history = null) { $this->history = $history; return $this; } /** * Get history * * @return \Vimtag\DevBundle\Entity\History */ public function getHistory() { return $this->history; } /** * Set interests * * @param integer $interests * @return Url */ public function setInterests($interests) { $this->interests = $interests; return $this; } /** * Get interests * * @return integer */ public function getInterests() { return $this->interests; } /** * Set views * * @param integer $views * @return Url */ public function setViews($views) { $this->views = $views; return $this; } /** * Get views * * @return integer */ public function getViews() { return $this->views; } /** * Add history * * @param \Vimtag\DevBundle\Entity\UserScore $history * @return Url */ public function addHistory(\Vimtag\DevBundle\Entity\UserScore $history) { $this->history[] = $history; return $this; } /** * Remove history * * @param \Vimtag\DevBundle\Entity\UserScore $history */ public function removeHistory(\Vimtag\DevBundle\Entity\UserScore $history) { $this->history->removeElement($history); } }
mit
mrpapercut/wscript
testfiles/COMobjects/JSclasses/DXImageTransform.Microsoft.Barn.1.js
597
class dximagetransform_microsoft_barn_1 { constructor() { // int Capabilities () {get} this.Capabilities = undefined; // float Duration () {get} {set} this.Duration = undefined; // string Motion () {get} {set} this.Motion = undefined; // string Orientation () {get} {set} this.Orientation = undefined; // float Progress () {get} {set} this.Progress = undefined; // float StepResolution () {get} this.StepResolution = undefined; } } module.exports = dximagetransform_microsoft_barn_1;
mit
soulchainer/qtile
test/test_bar.py
12819
# Copyright (c) 2011 Florian Mounier # Copyright (c) 2012-2013 Craig Barnes # Copyright (c) 2012 roger # Copyright (c) 2012, 2014-2015 Tycho Andersen # Copyright (c) 2014 Sean Vig # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import os import pytest import tempfile import libqtile.layout import libqtile.bar import libqtile.widget import libqtile.config import libqtile.confreader class GBConfig: auto_fullscreen = True keys = [] mouse = [] groups = [ libqtile.config.Group("a"), libqtile.config.Group("bb"), libqtile.config.Group("ccc"), libqtile.config.Group("dddd"), libqtile.config.Group("Pppy") ] layouts = [libqtile.layout.stack.Stack(num_stacks=1)] floating_layout = libqtile.layout.floating.Floating() screens = [ libqtile.config.Screen( top=libqtile.bar.Bar( [ libqtile.widget.CPUGraph( width=libqtile.bar.STRETCH, type="linefill", border_width=20, margin_x=1, margin_y=1 ), libqtile.widget.MemoryGraph(type="line"), libqtile.widget.SwapGraph(type="box"), libqtile.widget.TextBox(name="text", background="333333"), ], 50, ), bottom=libqtile.bar.Bar( [ libqtile.widget.GroupBox(), libqtile.widget.AGroupBox(), libqtile.widget.Prompt(), libqtile.widget.WindowName(), libqtile.widget.Sep(), libqtile.widget.Clock(), ], 50 ), # TODO: Add vertical bars and test widgets that support them ) ] main = None gb_config = pytest.mark.parametrize("qtile", [GBConfig], indirect=True) def test_completion(): c = libqtile.widget.prompt.CommandCompleter(None, True) c.reset() c.lookup = [ ("a", "x/a"), ("aa", "x/aa"), ] assert c.complete("a") == "a" assert c.actual() == "x/a" assert c.complete("a") == "aa" assert c.complete("a") == "a" c = libqtile.widget.prompt.CommandCompleter(None) r = c.complete("l") assert c.actual().endswith(r) c.reset() assert c.complete("/bi") == "/bin/" c.reset() assert c.complete("/bin") != "/bin/" c.reset() home_dir = os.path.expanduser("~") with tempfile.TemporaryDirectory(prefix="qtile_test_", dir=home_dir) as absolute_tmp_path: tmp_dirname = absolute_tmp_path[len(home_dir + os.sep):] user_input = os.path.join("~", tmp_dirname) assert c.complete(user_input) == user_input c.reset() test_bin_dir = os.path.join(absolute_tmp_path, "qtile-test-bin") os.mkdir(test_bin_dir) assert c.complete(user_input) == os.path.join(user_input, "qtile-test-bin") + os.sep c.reset() s = "thisisatotallynonexistantpathforsure" assert c.complete(s) == s assert c.actual() == s c.reset() @gb_config def test_draw(qtile): qtile.test_window("one") b = qtile.c.bar["bottom"].info() assert b["widgets"][0]["name"] == "groupbox" @gb_config def test_prompt(qtile): assert qtile.c.widget["prompt"].info()["width"] == 0 qtile.c.spawncmd(":") qtile.c.widget["prompt"].fake_keypress("a") qtile.c.widget["prompt"].fake_keypress("Tab") qtile.c.spawncmd(":") qtile.c.widget["prompt"].fake_keypress("slash") qtile.c.widget["prompt"].fake_keypress("Tab") @gb_config def test_event(qtile): qtile.c.group["bb"].toscreen() @gb_config def test_textbox(qtile): assert "text" in qtile.c.list_widgets() s = "some text" qtile.c.widget["text"].update(s) assert qtile.c.widget["text"].get() == s s = "Aye, much longer string than the initial one" qtile.c.widget["text"].update(s) assert qtile.c.widget["text"].get() == s qtile.c.group["Pppy"].toscreen() qtile.c.widget["text"].set_font(fontsize=12) @gb_config def test_textbox_errors(qtile): qtile.c.widget["text"].update(None) qtile.c.widget["text"].update("".join(chr(i) for i in range(255))) qtile.c.widget["text"].update("V\xE2r\xE2na\xE7\xEE") qtile.c.widget["text"].update("\ua000") @gb_config def test_groupbox_button_press(qtile): qtile.c.group["ccc"].toscreen() assert qtile.c.groups()["a"]["screen"] is None qtile.c.bar["bottom"].fake_button_press(0, "bottom", 10, 10, 1) assert qtile.c.groups()["a"]["screen"] == 0 class GeomConf: auto_fullscreen = False main = None keys = [] mouse = [] groups = [ libqtile.config.Group("a"), libqtile.config.Group("b"), libqtile.config.Group("c"), libqtile.config.Group("d") ] layouts = [libqtile.layout.stack.Stack(num_stacks=1)] floating_layout = libqtile.layout.floating.Floating() screens = [ libqtile.config.Screen( top=libqtile.bar.Bar([], 10), bottom=libqtile.bar.Bar([], 10), left=libqtile.bar.Bar([], 10), right=libqtile.bar.Bar([], 10), ) ] geom_config = pytest.mark.parametrize("qtile", [GeomConf], indirect=True) class DBarH(libqtile.bar.Bar): def __init__(self, widgets, size): libqtile.bar.Bar.__init__(self, widgets, size) self.horizontal = True class DBarV(libqtile.bar.Bar): def __init__(self, widgets, size): libqtile.bar.Bar.__init__(self, widgets, size) self.horizontal = False class DWidget: def __init__(self, length, length_type): self.length, self.length_type = length, length_type @geom_config def test_geometry(qtile): qtile.test_xeyes() g = qtile.c.screens()[0]["gaps"] assert g["top"] == (0, 0, 800, 10) assert g["bottom"] == (0, 590, 800, 10) assert g["left"] == (0, 10, 10, 580) assert g["right"] == (790, 10, 10, 580) assert len(qtile.c.windows()) == 1 geom = qtile.c.windows()[0] assert geom["x"] == 10 assert geom["y"] == 10 assert geom["width"] == 778 assert geom["height"] == 578 internal = qtile.c.internal_windows() assert len(internal) == 4 wid = qtile.c.bar["bottom"].info()["window"] assert qtile.c.window[wid].inspect() @geom_config def test_resize(qtile): def wd(l): return [i.length for i in l] def offx(l): return [i.offsetx for i in l] def offy(l): return [i.offsety for i in l] for DBar, off in ((DBarH, offx), (DBarV, offy)): # noqa: N806 b = DBar([], 100) dwidget_list = [ DWidget(10, libqtile.bar.CALCULATED), DWidget(None, libqtile.bar.STRETCH), DWidget(None, libqtile.bar.STRETCH), DWidget(10, libqtile.bar.CALCULATED), ] b._resize(100, dwidget_list) assert wd(dwidget_list) == [10, 40, 40, 10] assert off(dwidget_list) == [0, 10, 50, 90] b._resize(101, dwidget_list) assert wd(dwidget_list) == [10, 40, 41, 10] assert off(dwidget_list) == [0, 10, 50, 91] dwidget_list = [ DWidget(10, libqtile.bar.CALCULATED) ] b._resize(100, dwidget_list) assert wd(dwidget_list) == [10] assert off(dwidget_list) == [0] dwidget_list = [ DWidget(10, libqtile.bar.CALCULATED), DWidget(None, libqtile.bar.STRETCH) ] b._resize(100, dwidget_list) assert wd(dwidget_list) == [10, 90] assert off(dwidget_list) == [0, 10] dwidget_list = [ DWidget(None, libqtile.bar.STRETCH), DWidget(10, libqtile.bar.CALCULATED), ] b._resize(100, dwidget_list) assert wd(dwidget_list) == [90, 10] assert off(dwidget_list) == [0, 90] dwidget_list = [ DWidget(10, libqtile.bar.CALCULATED), DWidget(None, libqtile.bar.STRETCH), DWidget(10, libqtile.bar.CALCULATED), ] b._resize(100, dwidget_list) assert wd(dwidget_list) == [10, 80, 10] assert off(dwidget_list) == [0, 10, 90] class ExampleWidget(libqtile.widget.base._Widget): orientations = libqtile.widget.base.ORIENTATION_HORIZONTAL def __init__(self): libqtile.widget.base._Widget.__init__(self, 10) def draw(self): pass class IncompatibleWidgetConf: main = None keys = [] mouse = [] groups = [libqtile.config.Group("a")] layouts = [libqtile.layout.stack.Stack(num_stacks=1)] floating_layout = libqtile.layout.floating.Floating() screens = [ libqtile.config.Screen( left=libqtile.bar.Bar( [ # This widget doesn't support vertical orientation ExampleWidget(), ], 10 ), ) ] def test_incompatible_widget(qtile_nospawn): config = IncompatibleWidgetConf # Ensure that adding a widget that doesn't support the orientation of the # bar raises ConfigError with pytest.raises(libqtile.confreader.ConfigError): qtile_nospawn.create_manager(config) class MultiStretchConf: main = None keys = [] mouse = [] groups = [libqtile.config.Group("a")] layouts = [libqtile.layout.stack.Stack(num_stacks=1)] floating_layout = libqtile.layout.floating.Floating() screens = [ libqtile.config.Screen( top=libqtile.bar.Bar( [ libqtile.widget.Spacer(libqtile.bar.STRETCH), libqtile.widget.Spacer(libqtile.bar.STRETCH), ], 10 ), ) ] def test_multiple_stretches(qtile_nospawn): config = MultiStretchConf # Ensure that adding two STRETCH widgets to the same bar raises ConfigError with pytest.raises(libqtile.confreader.ConfigError): qtile_nospawn.create_manager(config) def test_basic(qtile_nospawn): config = GeomConf config.screens = [ libqtile.config.Screen( bottom=libqtile.bar.Bar( [ ExampleWidget(), libqtile.widget.Spacer(libqtile.bar.STRETCH), ExampleWidget() ], 10 ) ) ] qtile_nospawn.start(config) i = qtile_nospawn.c.bar["bottom"].info() assert i["widgets"][0]["offset"] == 0 assert i["widgets"][1]["offset"] == 10 assert i["widgets"][1]["width"] == 780 assert i["widgets"][2]["offset"] == 790 libqtile.hook.clear() def test_singlespacer(qtile_nospawn): config = GeomConf config.screens = [ libqtile.config.Screen( bottom=libqtile.bar.Bar( [ libqtile.widget.Spacer(libqtile.bar.STRETCH), ], 10 ) ) ] qtile_nospawn.start(config) i = qtile_nospawn.c.bar["bottom"].info() assert i["widgets"][0]["offset"] == 0 assert i["widgets"][0]["width"] == 800 libqtile.hook.clear() def test_nospacer(qtile_nospawn): config = GeomConf config.screens = [ libqtile.config.Screen( bottom=libqtile.bar.Bar( [ ExampleWidget(), ExampleWidget() ], 10 ) ) ] qtile_nospawn.start(config) i = qtile_nospawn.c.bar["bottom"].info() assert i["widgets"][0]["offset"] == 0 assert i["widgets"][1]["offset"] == 10 libqtile.hook.clear()
mit
aaronSig/jBetfair
src/main/java/com/jbetfair/api/params/MarketSort.java
253
package com.jbetfair.api.params; public enum MarketSort implements APIRequestParam { MINIMUM_TRADED, MAXIMUM_TRADED, MINIMUM_AVAILABLE, MAXIMUM_AVAILABLE, FIRST_TO_START, LAST_TO_START; public String getParameterName() { return "sort"; }; }
mit
GeoSmartCity-CIP/adminCS
src/featureDetail.js
5622
var cs = cs || {}; cs.featureDetail = function(feature) { cs.featureDetail.feature_ = feature; //cs.feature.zoom2feature(feature); cs.featureDetail.renderFeature(); cs.featureDetail.showDetail(); $('.sidebar-header-text').html(feature.get('label')); return this; }; cs.featureDetail.featureElement_ = ''; cs.featureDetail.width = 350; cs.featureDetail.feature_ = {}; cs.featureDetail.renderFeature = function(){ cs.featureDetail.featureElement_ = $('<div>',{class:''}); cs.featureDetail.renderToolButtons(); cs.featureDetail.wrapper = $('<div>',{class: 'cs-featureDetail-wrapper'}) .appendTo(cs.featureDetail.featureElement_); cs.featureDetail.renderProperties(); }; cs.featureDetail.renderProperties = function(){ var properties = cs.featureDetail.feature_.getProperties(); var wrapper = cs.featureDetail.wrapper; for (var attr in cs.fdAttrs){ var key = cs.fdAttrs[attr]; var item = cs.datatype.constructor(cs.featureDetail.feature_, key, properties[key] ); var itemWrapper = $('<div>',{class: 'cs-featureDetail-item-wrapper'}) .appendTo(wrapper); $('<span>',{class: 'cs-featureDetail-item-name'}) .html(key) .appendTo(wrapper); $('<span>',{class: 'cs-featureDetail-item-value'}) .html(item.getFdValue()) .appendTo(wrapper); } }; cs.featureDetail.renderToolButtons = function() { var toolBar = cs.featureDetail.toolButtons = $('<div>', {class : 'gs-featureDetail-toolButtons btn-toolbar', role:'toolbar', 'aria-label': 'neco'}) .appendTo(cs.featureDetail.featureElement_); var btnGroup = $('<div>', {class : 'btn-group', role:'group', 'aria-label': 'neco'}) .appendTo(toolBar); $('<button>', {class : 'btn btn-default'}) .appendTo(btnGroup) .html('<i class="fa fa-map-marker"></i>') .on('click', function(){cs.feature.zoom2feature(cs.featureDetail.feature_, 22)}); $('<button>', {class : 'btn btn-default'}) .appendTo(btnGroup) .html('<i class="fa fa-comments-o"></i>') .on('click', function(){ cs.sideBar_.find('.sidebar-content').animate({ scrollTop: $('#featureDetailContent').height() }, 1000); this.blur(); }); cs.featureDetail.btnGroupSecure = $('<div>', {class : 'btn-group ', role:'group', 'aria-label': 'neco'}) .appendTo(toolBar); if (!cs.user.isAuthorized){ cs.featureDetail.btnGroupSecure.hide(); } $('<button>', {class : 'btn btn-default'}) .appendTo(cs.featureDetail.btnGroupSecure) .html('<i class="fa fa-edit"></i>') .on('click', cs.featureDetail.switchToEdit); $('<button>', {class : 'btn btn-default'}) .appendTo(cs.featureDetail.btnGroupSecure) .html('<i class="fa fa-trash"></i>') .on('click', function(){ console.log('Event removal is not yet implemented!') }); }; cs.featureDetail.renderEditableForm = function(){ var properties = cs.featureDetail.feature_.getProperties(); cs.featureDetail.form_ = $('<form>') .appendTo(cs.featureDetail.wrapper); for (var attr in cs.fdAttrs){ var key = cs.fdAttrs[attr]; var item = cs.datatype.constructor(cs.featureDetail.feature_, key, properties[key] ); var editValue = item.getEditValue(); if (editValue) { var formGroup = $('<fieldset>',{ class:'form-group'}) .appendTo(cs.featureDetail.form_); $('<label>',{for: key, class: 'cs-featureDetail-form-label'}) .html(key) .appendTo(formGroup); editValue.addClass('cs-featureDetail-form-item') .addClass('form-control') .appendTo(formGroup); } } cs.featureDetail.renderButtons(); }; cs.featureDetail.showDetail = function() { cs.sideBar_.find('#featureDetailContent') .html(cs.featureDetail.featureElement_); cs.sideBar_.open('featureDetail'); }; cs.featureDetail.renderButtons = function() { var wrapper = $('<div>',{id:'formButtons', class:'cs-featureDetail-formButtons pull-right form-group'}) .appendTo(cs.featureDetail.form_); $('<button>',{class: 'btn btn-default'}) .html('Cancel') .on('click', function(evt){evt.preventDefault();cs.featureDetail.switchToProperties();}) .appendTo(wrapper); $('<button>',{type: 'submit', class: 'btn btn-default btn-primary'}) .html('Submit') .on('click',cs.featureDetail.onSubmitButtonClick_) .appendTo(wrapper); }; cs.featureDetail.onSubmitButtonClick_ = function(evt) { evt.preventDefault(); cs.featureDetail.switchToProperties(); var data = cs.featureDetail.form_.serializeArray().reduce(function(obj, item) { obj[item.name] = item.value; return obj; }, {}); data.id = cs.featureDetail.feature_.get('id'); data.user = {id: cs.user.name, password: cs.user.password}; gsc.cs.eventUpdate(data) .done( function (evt) { cs.feature.updateProperties(cs.featureDetail.feature_, data); }).fail( function (evt) { cs.feature.updateProperties(cs.featureDetail.feature_, data); } ) }; cs.featureDetail.switchToEdit = function() { cs.featureDetail.wrapper.empty(); cs.featureDetail.renderEditableForm(); }; cs.featureDetail.switchToProperties = function() { cs.featureDetail.wrapper.empty(); cs.featureDetail.renderProperties(); }; cs.featureDetail.rerenderFd = function(){ cs.featureDetail.wrapper.empty(); cs.featureDetail.renderProperties(); };
mit
nours/RestAdminBundle
Util/Inflector.php
1013
<?php /* * This file is part of RestAdminBundle. * * (c) David Coudrier <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Nours\RestAdminBundle\Util; use Doctrine\Inflector\InflectorFactory; /** * Class Inflector * * @author David Coudrier <[email protected]> */ final class Inflector { private static function getInflector() { static $inflector; if (!$inflector) { $inflector = InflectorFactory::create()->build(); } return $inflector; } public static function classify(string $word): string { return self::getInflector()->classify($word); } public static function tableize(string $word): string { return self::getInflector()->tableize($word); } public static function pluralize(string $word): string { return self::getInflector()->pluralize($word); } }
mit
CodeCatz/litterbox
Lena/ex1.py
400
print "Is this real life?" print "Why am I here?" print "How about some pizza?" print "another line" print "exercise two already?" #this is for using hash print "chickens time!" print "poulets", 1 + 1 print "birds", 65 / 4 print "eggs:" print 27*76/5 print "Is it greater?", 5 > 2 print "Is it greater", 5 < 2 # print this will run print "This won't run" print "fuck me am i ever gonna", 13 + 37
mit