prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>foldable.rs<|end_file_name|><|fim▁begin|>use lift::Foldable;
use std::hash::Hash;
use std::collections::linked_list::LinkedList;
use std::collections::vec_deque::VecDeque;
use std::collections::{BinaryHeap, BTreeSet, HashSet};
//Implementation of Foldable for Vec
foldable!(Vec);
//Implementation of Foldable for LinkedList
foldable!(LinkedList);
//Implementation of Foldable for VeqDeque
foldable!(VecDeque);
//Implemenatation of Foldable for BinaryHeap
impl<T: Ord> Foldable for BinaryHeap<T> {
type A = T;
fn foldr<F>(&self, accum: Self::A, f: F) -> Self::A
where F: FnMut(Self::A, &Self::A) -> Self::A
{
self.iter().fold(accum, f)
}
}
//Implementation of Foldable for BTreeSet
impl<T: Ord> Foldable for BTreeSet<T> {
type A = T;
fn foldr<F>(&self, accum: Self::A, f: F) -> Self::A
where F: FnMut(Self::A, &Self::A) -> Self::A
{<|fim▁hole|>}
//Implementation of Foldable for HashSet
impl<T: Hash + Eq> Foldable for HashSet<T> {
type A = T;
fn foldr<F>(&self, accum: Self::A, f: F) -> Self::A
where F: FnMut(Self::A, &Self::A) -> Self::A
{
self.iter().fold(accum, f)
}
}<|fim▁end|> | self.iter().fold(accum, f)
} |
<|file_name|>assetloader.js<|end_file_name|><|fim▁begin|>'use strict';
import EventMap from 'eventmap';
import Log from './log';
var audioTypes = {
'mp3': 'audio/mpeg',
'wav': 'audio/wav',
'ogg': 'audio/ogg'
};
var imageTypes = {
'png': 'image/png',
'jpg': 'image/jpg',
'gif': 'image/gif'
};
class AssetLoader extends EventMap {
constructor(assets) {
super();
this.assets = assets || {};
this.files = {};
this.maxAssets = 0;
this.assetsLoaded = 0;
this.percentLoaded = 0;<|fim▁hole|> }
start() {
// TODO: Something was wrong here. So it's deleted right now
}
}
export default AssetLoader;<|fim▁end|> | this.cache = {}; |
<|file_name|>test-perf2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import time
import unittest
import dpkt
class TestPerf(unittest.TestCase):
rounds = 10000
def setUp(self):
self.start = time.time()
def tearDown(self):
print self.rounds / (time.time() - self.start), 'rounds/s'
def test_pack(self):<|fim▁hole|> str(dpkt.ip.IP())
print 'pack:',
def test_unpack(self):
buf = str(dpkt.ip.IP())
for i in xrange(self.rounds):
dpkt.ip.IP(buf)
print 'unpack:',
if __name__ == '__main__':
unittest.main()<|fim▁end|> | for i in xrange(self.rounds): |
<|file_name|>TestConfigView.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tamaya.events;
import org.apache.tamaya.ConfigException;
import org.apache.tamaya.ConfigOperator;
import org.apache.tamaya.ConfigQuery;
import org.apache.tamaya.Configuration;
import org.apache.tamaya.ConfigurationProvider;
import org.apache.tamaya.TypeLiteral;
import org.apache.tamaya.spi.PropertyConverter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* Created by Anatole on 24.03.2015.
*/
public class TestConfigView implements ConfigOperator{
private static final TestConfigView INSTANCE = new TestConfigView();
private TestConfigView(){}
public static ConfigOperator of(){
return INSTANCE;
}
@Override
public Configuration operate(final Configuration config) {
return new Configuration() {
@Override
public Map<String, String> getProperties() {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, String> en : config.getProperties().entrySet()) {
if (en.getKey().startsWith("test")) {
result.put(en.getKey(), en.getValue());
}
}
return result;
// return config.getProperties().entrySet().stream().filter(e -> e.getKey().startsWith("test")).collect(
// Collectors.toMap(en -> en.getKey(), en -> en.getValue()));
}
@Override
public Configuration with(ConfigOperator operator) {
return null;
}
@Override
public <T> T query(ConfigQuery<T> query) {
return null;
}
@Override
public String get(String key) {
return getProperties().get(key);
}
@Override
public <T> T get(String key, Class<T> type) {
return (T) get(key, TypeLiteral.of(type));
}
/**
* Accesses the current String value for the given key and tries to convert it
* using the {@link org.apache.tamaya.spi.PropertyConverter} instances provided by the current
* {@link org.apache.tamaya.spi.ConfigurationContext}.
*
* @param key the property's absolute, or relative path, e.g. @code
* a/b/c/d.myProperty}.
* @param type The target type required, not null.
* @param <T> the value type
* @return the converted value, never null.
*/
@Override
public <T> T get(String key, TypeLiteral<T> type) {
String value = get(key);
if (value != null) {
List<PropertyConverter<T>> converters = ConfigurationProvider.getConfigurationContext()
.getPropertyConverters(type);
for (PropertyConverter<T> converter : converters) {
try {
T t = converter.convert(value);
if (t != null) {
return t;
}
} catch (Exception e) {
Logger.getLogger(getClass().getName())
.log(Level.FINEST, "PropertyConverter: " + converter + " failed to convert value: "
+ value, e);<|fim▁hole|> }
}
throw new ConfigException("Unparseable config value for type: " + type.getRawType().getName() + ": " + key);
}
return null;
}
};
}
}<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# pysaml2 documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 24 08:13:41 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pysaml2'
copyright = u'2010-2011, Roland Hedberg'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2.0beta'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'<|fim▁hole|># A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysaml2doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'pysaml2.tex', u'pysaml2 Documentation',
u'Roland Hedberg', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True<|fim▁end|> | |
<|file_name|>stats.py<|end_file_name|><|fim▁begin|>import glob, csv, re, shutil, mustache, time
import numpy as np
oddsfile = list(sorted(glob.glob('raw/odds*.csv')))[-1]
timestamp = re.search('s(.*?)\.', oddsfile).group(1)
with open(oddsfile) as infile:
reader = csv.reader(infile)
header = reader.next()
teams = [row for row in reader]
fixed = []
for team in teams:
t = team[0:2]
for odd in team[2:]:
if odd:
o = float(odd)
# betdaq lists some impossible odds. WTF?
if o < 1: o = 1.
t.append(o)
fixed.append(t)
teams = fixed
summary = []
for team in teams:
odds = team[2:]
try:
max_ = max(odds)
except ValueError:
#nobody is offering odds on this team, they're eliminated, skip them
continue
min_ = min(odds)
mean = np.mean(odds)
median = np.median(odds)
summary.append(team[:2] + [max_, min_, mean, median])
summaryfile = "raw/summary%s.csv" % timestamp
with file(summaryfile, 'w') as outfile:
w = csv.writer(outfile)
w.writerow(['name', 'group', 'max', 'min', 'mean', 'median'])
for row in summary:
w.writerow(row)
shutil.copy2(summaryfile, "summary.csv")
last_updated = time.strftime("%b %d %Y %H:%M")<|fim▁hole|>file("index.html", 'w').write(out)<|fim▁end|> |
context = {"last_updated": last_updated}
out = mustache.render(file("index.mustache.html").read(), context) |
<|file_name|>IndexProcessor.java<|end_file_name|><|fim▁begin|>/*
* Symphony - A modern community (forum/SNS/blog) platform written in Java.
* Copyright (C) 2012-2017, b3log.org & hacpai.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.b3log.symphony.processor;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.b3log.latke.Keys;
import org.b3log.latke.Latkes;
import org.b3log.latke.ioc.inject.Inject;
import org.b3log.latke.logging.Level;
import org.b3log.latke.logging.Logger;
import org.b3log.latke.model.Pagination;
import org.b3log.latke.service.LangPropsService;
import org.b3log.latke.servlet.HTTPRequestContext;
import org.b3log.latke.servlet.HTTPRequestMethod;
import org.b3log.latke.servlet.annotation.After;
import org.b3log.latke.servlet.annotation.Before;
import org.b3log.latke.servlet.annotation.RequestProcessing;
import org.b3log.latke.servlet.annotation.RequestProcessor;
import org.b3log.latke.servlet.renderer.freemarker.AbstractFreeMarkerRenderer;
import org.b3log.latke.util.Locales;
import org.b3log.latke.util.Stopwatchs;
import org.b3log.latke.util.Strings;
import org.b3log.symphony.model.Article;
import org.b3log.symphony.model.Common;
import org.b3log.symphony.model.UserExt;
import org.b3log.symphony.processor.advice.AnonymousViewCheck;
import org.b3log.symphony.processor.advice.PermissionGrant;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchEndAdvice;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchStartAdvice;
import org.b3log.symphony.service.*;
import org.b3log.symphony.util.Emotions;
import org.b3log.symphony.util.Markdowns;
import org.b3log.symphony.util.Symphonys;
import org.json.JSONObject;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.InputStream;
import java.util.*;
/**
* Index processor.
* <p>
* <ul>
* <li>Shows index (/), GET</li>
* <li>Shows recent articles (/recent), GET</li>
* <li>Shows hot articles (/hot), GET</li>
* <li>Shows perfect articles (/perfect), GET</li>
* <li>Shows about (/about), GET</li>
* <li>Shows b3log (/b3log), GET</li>
* <li>Shows SymHub (/symhub), GET</li>
* <li>Shows kill browser (/kill-browser), GET</li>
* </ul>
* </p>
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @author <a href="http://vanessa.b3log.org">Liyuan Li</a>
* @version 1.12.3.24, Dec 26, 2016
* @since 0.2.0
*/
@RequestProcessor
public class IndexProcessor {
/**
* Logger.
*/
private static final Logger LOGGER = Logger.getLogger(IndexProcessor.class.getName());
/**
* Article query service.
*/
@Inject
private ArticleQueryService articleQueryService;
/**
* User query service.
*/
@Inject
private UserQueryService userQueryService;
/**
* User management service.
*/
@Inject
private UserMgmtService userMgmtService;
/**
* Data model service.
*/
@Inject
private DataModelService dataModelService;
/**
* Language service.
*/
@Inject
private LangPropsService langPropsService;
/**
* Timeline management service.
*/
@Inject
private TimelineMgmtService timelineMgmtService;
/**
* Shows md guide.
*
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/guide/markdown", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showMDGuide(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("other/md-guide.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
InputStream inputStream = null;
try {
inputStream = IndexProcessor.class.getResourceAsStream("/md_guide.md");
final String md = IOUtils.toString(inputStream, "UTF-8");
String html = Emotions.convert(md);
html = Markdowns.toHTML(html);
dataModel.put("md", md);
dataModel.put("html", html);
} catch (final Exception e) {
LOGGER.log(Level.ERROR, "Loads markdown guide failed", e);
} finally {
IOUtils.closeQuietly(inputStream);
}
dataModelService.fillHeaderAndFooter(request, response, dataModel);
}
/**
* Shows index.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = {"", "/"}, method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showIndex(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("index.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
final List<JSONObject> recentArticles = articleQueryService.getIndexRecentArticles(avatarViewMode);
dataModel.put(Common.RECENT_ARTICLES, recentArticles);
JSONObject currentUser = userQueryService.getCurrentUser(request);
if (null == currentUser) {
userMgmtService.tryLogInWithCookie(request, context.getResponse());
}
currentUser = userQueryService.getCurrentUser(request);
if (null != currentUser) {
if (!UserExt.finshedGuide(currentUser)) {
response.sendRedirect(Latkes.getServePath() + "/guide");
return;
}
final String userId = currentUser.optString(Keys.OBJECT_ID);
final int pageSize = Symphonys.getInt("indexArticlesCnt");
final List<JSONObject> followingTagArticles = articleQueryService.getFollowingTagArticles(
avatarViewMode, userId, 1, pageSize);
dataModel.put(Common.FOLLOWING_TAG_ARTICLES, followingTagArticles);
final List<JSONObject> followingUserArticles = articleQueryService.getFollowingUserArticles(
avatarViewMode, userId, 1, pageSize);
dataModel.put(Common.FOLLOWING_USER_ARTICLES, followingUserArticles);
} else {
dataModel.put(Common.FOLLOWING_TAG_ARTICLES, Collections.emptyList());
dataModel.put(Common.FOLLOWING_USER_ARTICLES, Collections.emptyList());
}<|fim▁hole|> final List<JSONObject> timelines = timelineMgmtService.getTimelines();
dataModel.put(Common.TIMELINES, timelines);
dataModel.put(Common.SELECTED, Common.INDEX);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
dataModelService.fillIndexTags(dataModel);
}
/**
* Shows recent articles.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = {"/recent", "/recent/hot", "/recent/good", "/recent/reply"}, method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showRecent(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("recent.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
String pageNumStr = request.getParameter("p");
if (Strings.isEmptyOrNull(pageNumStr) || !Strings.isNumeric(pageNumStr)) {
pageNumStr = "1";
}
final int pageNum = Integer.valueOf(pageNumStr);
int pageSize = Symphonys.getInt("indexArticlesCnt");
final JSONObject user = userQueryService.getCurrentUser(request);
if (null != user) {
pageSize = user.optInt(UserExt.USER_LIST_PAGE_SIZE);
if (!UserExt.finshedGuide(user)) {
response.sendRedirect(Latkes.getServePath() + "/guide");
return;
}
}
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
String sortModeStr = StringUtils.substringAfter(request.getRequestURI(), "/recent");
int sortMode;
switch (sortModeStr) {
case "":
sortMode = 0;
break;
case "/hot":
sortMode = 1;
break;
case "/good":
sortMode = 2;
break;
case "/reply":
sortMode = 3;
break;
default:
sortMode = 0;
}
final JSONObject result = articleQueryService.getRecentArticles(avatarViewMode, sortMode, pageNum, pageSize);
final List<JSONObject> allArticles = (List<JSONObject>) result.get(Article.ARTICLES);
dataModel.put(Common.SELECTED, Common.RECENT);
final List<JSONObject> stickArticles = new ArrayList<>();
final Iterator<JSONObject> iterator = allArticles.iterator();
while (iterator.hasNext()) {
final JSONObject article = iterator.next();
final boolean stick = article.optInt(Article.ARTICLE_T_STICK_REMAINS) > 0;
article.put(Article.ARTICLE_T_IS_STICK, stick);
if (stick) {
stickArticles.add(article);
iterator.remove();
}
}
dataModel.put(Common.STICK_ARTICLES, stickArticles);
dataModel.put(Common.LATEST_ARTICLES, allArticles);
final JSONObject pagination = result.getJSONObject(Pagination.PAGINATION);
final int pageCount = pagination.optInt(Pagination.PAGINATION_PAGE_COUNT);
final List<Integer> pageNums = (List<Integer>) pagination.get(Pagination.PAGINATION_PAGE_NUMS);
if (!pageNums.isEmpty()) {
dataModel.put(Pagination.PAGINATION_FIRST_PAGE_NUM, pageNums.get(0));
dataModel.put(Pagination.PAGINATION_LAST_PAGE_NUM, pageNums.get(pageNums.size() - 1));
}
dataModel.put(Pagination.PAGINATION_CURRENT_PAGE_NUM, pageNum);
dataModel.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);
dataModel.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
dataModelService.fillRandomArticles(avatarViewMode, dataModel);
dataModelService.fillSideHotArticles(avatarViewMode, dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
dataModel.put(Common.CURRENT, StringUtils.substringAfter(request.getRequestURI(), "/recent"));
}
/**
* Shows hot articles.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/hot", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showHotArticles(final HTTPRequestContext context,
final HttpServletRequest request, final HttpServletResponse response) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("hot.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
int pageSize = Symphonys.getInt("indexArticlesCnt");
final JSONObject user = userQueryService.getCurrentUser(request);
if (null != user) {
pageSize = user.optInt(UserExt.USER_LIST_PAGE_SIZE);
}
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
final List<JSONObject> indexArticles = articleQueryService.getHotArticles(avatarViewMode, pageSize);
dataModel.put(Common.INDEX_ARTICLES, indexArticles);
dataModel.put(Common.SELECTED, Common.HOT);
Stopwatchs.start("Fills");
try {
dataModelService.fillHeaderAndFooter(request, response, dataModel);
if (!(Boolean) dataModel.get(Common.IS_MOBILE)) {
dataModelService.fillRandomArticles(avatarViewMode, dataModel);
}
dataModelService.fillSideHotArticles(avatarViewMode, dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
} finally {
Stopwatchs.end();
}
}
/**
* Shows SymHub page.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/symhub", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showSymHub(final HTTPRequestContext context,
final HttpServletRequest request, final HttpServletResponse response) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("other/symhub.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
final List<JSONObject> syms = Symphonys.getSyms();
dataModel.put("syms", (Object) syms);
Stopwatchs.start("Fills");
try {
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
if (!(Boolean) dataModel.get(Common.IS_MOBILE)) {
dataModelService.fillRandomArticles(avatarViewMode, dataModel);
}
dataModelService.fillSideHotArticles(avatarViewMode, dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
} finally {
Stopwatchs.end();
}
}
/**
* Shows perfect articles.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/perfect", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showPerfectArticles(final HTTPRequestContext context,
final HttpServletRequest request, final HttpServletResponse response) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("perfect.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
String pageNumStr = request.getParameter("p");
if (Strings.isEmptyOrNull(pageNumStr) || !Strings.isNumeric(pageNumStr)) {
pageNumStr = "1";
}
final int pageNum = Integer.valueOf(pageNumStr);
int pageSize = Symphonys.getInt("indexArticlesCnt");
final JSONObject user = userQueryService.getCurrentUser(request);
if (null != user) {
pageSize = user.optInt(UserExt.USER_LIST_PAGE_SIZE);
if (!UserExt.finshedGuide(user)) {
response.sendRedirect(Latkes.getServePath() + "/guide");
return;
}
}
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
final JSONObject result = articleQueryService.getPerfectArticles(avatarViewMode, pageNum, pageSize);
final List<JSONObject> perfectArticles = (List<JSONObject>) result.get(Article.ARTICLES);
dataModel.put(Common.PERFECT_ARTICLES, perfectArticles);
dataModel.put(Common.SELECTED, Common.PERFECT);
final JSONObject pagination = result.getJSONObject(Pagination.PAGINATION);
final int pageCount = pagination.optInt(Pagination.PAGINATION_PAGE_COUNT);
final List<Integer> pageNums = (List<Integer>) pagination.get(Pagination.PAGINATION_PAGE_NUMS);
if (!pageNums.isEmpty()) {
dataModel.put(Pagination.PAGINATION_FIRST_PAGE_NUM, pageNums.get(0));
dataModel.put(Pagination.PAGINATION_LAST_PAGE_NUM, pageNums.get(pageNums.size() - 1));
}
dataModel.put(Pagination.PAGINATION_CURRENT_PAGE_NUM, pageNum);
dataModel.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);
dataModel.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
dataModelService.fillRandomArticles(avatarViewMode, dataModel);
dataModelService.fillSideHotArticles(avatarViewMode, dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
}
/**
* Shows about.
*
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/about", method = HTTPRequestMethod.GET)
@Before(adviceClass = StopwatchStartAdvice.class)
@After(adviceClass = StopwatchEndAdvice.class)
public void showAbout(final HttpServletResponse response) throws Exception {
response.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
response.setHeader("Location", "https://hacpai.com/article/1440573175609");
response.flushBuffer();
}
/**
* Shows b3log.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/b3log", method = HTTPRequestMethod.GET)
@Before(adviceClass = StopwatchStartAdvice.class)
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showB3log(final HTTPRequestContext context,
final HttpServletRequest request, final HttpServletResponse response) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("other/b3log.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
dataModelService.fillHeaderAndFooter(request, response, dataModel);
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
dataModelService.fillRandomArticles(avatarViewMode, dataModel);
dataModelService.fillSideHotArticles(avatarViewMode, dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
}
/**
* Shows kill browser page with the specified context.
*
* @param context the specified context
* @param request the specified HTTP servlet request
* @param response the specified HTTP servlet response
*/
@RequestProcessing(value = "/kill-browser", method = HTTPRequestMethod.GET)
@Before(adviceClass = StopwatchStartAdvice.class)
@After(adviceClass = StopwatchEndAdvice.class)
public void showKillBrowser(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response) {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
renderer.setTemplateName("other/kill-browser.ftl");
context.setRenderer(renderer);
final Map<String, Object> dataModel = renderer.getDataModel();
final Map<String, String> langs = langPropsService.getAll(Locales.getLocale());
dataModel.putAll(langs);
Keys.fillRuntime(dataModel);
dataModelService.fillMinified(dataModel);
}
}<|fim▁end|> |
final List<JSONObject> perfectArticles = articleQueryService.getIndexPerfectArticles(avatarViewMode);
dataModel.put(Common.PERFECT_ARTICLES, perfectArticles);
|
<|file_name|>cli-client.js<|end_file_name|><|fim▁begin|>const fetch = require("isomorphic-fetch");
function getUrl(query, variables) {
const urlRoot = `http://localhost:3000/graphql?query=${query}`;
if (variables == null) {
return urlRoot;
} else {
return urlRoot + `&variables=${JSON.stringify(variables)}`;
}
}
function run(query, variables) {
const url = getUrl(query, variables)
return fetch(encodeURI(url))
.then(response => response.json())
.then(json => (console.log(JSON.stringify(json, null, 2)), json))
.catch(e => console.error(e));
}
// Single operation query
run(`{
hero {
name
}
}`)
// Query Operations cna have names, but that's only
// required when a query has several operations.
// This one has a name, but only one op.
// The response looks the same as an un-named operation.
run(`
query HeroNameQuery {
hero {
name
}
}`);
// Requests more fields, including some deeply-nested members
// of the `friends` property.
// The syntax is like a relaxed version of JSON without values
run(`{
hero {
id
name
friends {
id, name
}
}
}`);
// GraphQL is designed for deeply-nested queries, hence "Graph".
run(`{
hero {
name
friends {
name
appearsIn
friends {
name
}
}
}
}`);
// Pass hard-coded parameters...
run(`{
human(id: "1000") {
name
}
}`)
// ... or typed runtime query parameters.
// parameter names start with $
// the sigil isn't included in the passed variable name
run(`
query HumanById($id: String!) {
human(id: $id) {
name
}
}`, { id: "1001" })
// fields can be aliased
// The field names in the response will be called `luke` and leia`, not `human`.
// Aliases are required to fetch the same field several times - using
/// "human" twice is invalid.
run(`{
luke: human(id: "1003") {
name
},
leia: human(id: "1000") {
name
}
}`);
// Common parts of queries can be extracted into fragmments
// with a syntax like Object rest params
run(`
fragment HumanFragment on Human {
name, homePlanet
}
<|fim▁hole|> ...HumanFragment
},
leia: human(id: "1003") {
...HumanFragment
}
}
`)
// The speciala field __typename identifies the result object's type.
// This is especially useful for interface fields.
run(`
fragment HeroFragment on Character {
name
__typename
}
{
hope: hero(episode: NEWHOPE) {
...HeroFragment
},
empire: hero(episode: EMPIRE) {
...HeroFragment
},
jedi: hero(episode: JEDI) {
...HeroFragment
},
hero {
...HeroFragment
}
}
`)<|fim▁end|> | {
luke: human(id: "1000") { |
<|file_name|>CsvToMySql.java<|end_file_name|><|fim▁begin|>package com.camillepradel.movierecommender.utils;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
public class CsvToMySql {
static final String pathToCsvFiles = "D:\\MovieRecommender\\src\\main\\java\\com\\camillepradel\\movierecommender\\utils\\";
static final String usersCsvFile = pathToCsvFiles + "users.csv";
static final String moviesCsvFile = pathToCsvFiles + "movies.csv";
static final String genresCsvFile = pathToCsvFiles + "genres.csv";
static final String movGenreCsvFile = pathToCsvFiles + "mov_genre.csv";
static final String ratingsCsvFile = pathToCsvFiles + "ratings.csv";
static final String friendsCsvFile = pathToCsvFiles + "friends.csv";
static final String cvsSplitBy = ",";
private static void commitUsers(Connection connection) throws SQLException {
System.out.println(usersCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS users (\n"
+ " id int(11) NOT NULL AUTO_INCREMENT,\n"
+ " age int(11) NOT NULL,\n"
+ " sex varchar(1) NOT NULL,\n"
+ " occupation varchar(60) NOT NULL,\n"
+ " zip varchar(6) NOT NULL,\n"
+ " PRIMARY KEY (`id`)\n"
+ ");");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(usersCsvFile))) {
String insertQuery = "INSERT INTO users (id, age, sex, occupation, zip) VALUES (?, ?, ?, ?, ?)";
PreparedStatement insertUsers = null;
try {
connection.setAutoCommit(false);
insertUsers = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
insertUsers.setInt(1, Integer.parseInt(values[0]));
insertUsers.setInt(2, Integer.parseInt(values[1]));
insertUsers.setString(3, values[2]);
insertUsers.setString(4, values[3]);
insertUsers.setString(5, values[4]);
insertUsers.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {
System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertUsers != null) {
insertUsers.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static void commitMovies(Connection connection) throws SQLException {
// movies.csv
System.out.println(moviesCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS movies (\n"
+ " id int(11) NOT NULL AUTO_INCREMENT,\n"
+ " title varchar(200) NOT NULL,\n"
+ " date date NOT NULL,\n"
+ " PRIMARY KEY (`id`)\n"
+ ");");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(moviesCsvFile))) {
String insertQuery = "INSERT INTO movies (id, title, date) VALUES (?, ?, ?)";
PreparedStatement insertMovies = null;
try {
connection.setAutoCommit(false);
insertMovies = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
int movieId = Integer.parseInt(values[0]);
String title = String.join(",", Arrays.copyOfRange(values, 1, values.length - 1));
Date date = new Date(Long.parseLong(values[values.length - 1]) * 1000);
insertMovies.setInt(1, movieId);
insertMovies.setString(2, title);
insertMovies.setDate(3, date);
insertMovies.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {
System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertMovies != null) {
insertMovies.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static void commitGenres(Connection connection) throws SQLException {
// genres.csv
System.out.println(genresCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS genres (\n"
+ " name varchar(60) NOT NULL,\n"
+ " id int(11) NOT NULL AUTO_INCREMENT,\n"
+ " PRIMARY KEY (`id`)\n"
+ ");");
// necessary to make mySQL accept 0 as a valid id value for genre unknown
statement.execute("SET SESSION sql_mode='NO_AUTO_VALUE_ON_ZERO';");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(genresCsvFile))) {
String insertQuery = "INSERT INTO genres (name, id) VALUES (?, ?)";
PreparedStatement insertGenres = null;
try {
connection.setAutoCommit(false);
insertGenres = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
String name = values[0];
int genreId = Integer.parseInt(values[1]);
insertGenres.setString(1, name);
insertGenres.setInt(2, genreId);
insertGenres.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {
System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertGenres != null) {
insertGenres.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static void commitMovieGenre(Connection connection) throws SQLException {
// mov_genre.csv
System.out.println(movGenreCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS movie_genre (\n"
+ " movie_id int(11) NOT NULL,\n"
+ " genre_id int(11) NOT NULL,\n"
+ " KEY movie_id (movie_id),\n"
+ " KEY genre_id (genre_id)\n"
+ ");");
statement.executeUpdate("ALTER TABLE movie_genre\n"
+ " ADD CONSTRAINT movie_genre_to_movie FOREIGN KEY (movie_id) REFERENCES movies(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
statement.executeUpdate("ALTER TABLE movie_genre\n"
+ " ADD CONSTRAINT movie_genre_to_genre FOREIGN KEY (genre_id) REFERENCES genres(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(movGenreCsvFile))) {
String insertQuery = "INSERT INTO movie_genre (movie_id, genre_id) VALUES (?, ?)";
PreparedStatement insertMovieGenre = null;
try {
connection.setAutoCommit(false);
insertMovieGenre = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
int movieId = Integer.parseInt(values[0]);
int genreId = Integer.parseInt(values[1]);
insertMovieGenre.setInt(1, movieId);
insertMovieGenre.setInt(2, genreId);
insertMovieGenre.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {
System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertMovieGenre != null) {
insertMovieGenre.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static void commitRatings(Connection connection) throws SQLException {
// ratings.csv
System.out.println(ratingsCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS ratings (\n"
+ " user_id int(11) NOT NULL,\n"
+ " movie_id int(11) NOT NULL,\n"
+ " rating int(11) NOT NULL,\n"
+ " date date NOT NULL,\n"
+ " KEY user_id (user_id),\n"
+ " KEY movie_id (movie_id)\n"
+ ");");
statement.executeUpdate("ALTER TABLE ratings\n"
+ " ADD CONSTRAINT ratings_to_user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
statement.executeUpdate("ALTER TABLE ratings\n"
+ " ADD CONSTRAINT ratings_to_movie FOREIGN KEY (movie_id) REFERENCES movies(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
statement.executeUpdate("ALTER TABLE ratings\n"
+ " ADD UNIQUE unique_index(user_id, movie_id);\n");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(ratingsCsvFile))) {
String insertQuery = "INSERT INTO ratings (user_id, movie_id, rating, date) VALUES (?, ?, ?, ?)";
PreparedStatement insertRatings = null;
try {
connection.setAutoCommit(false);
insertRatings = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
int userId = Integer.parseInt(values[0]);
int movieId = Integer.parseInt(values[1]);
int ratingValue = Integer.parseInt(values[2]);
Date date = new Date(Long.parseLong(values[3]) * 1000);
insertRatings.setInt(1, userId);
insertRatings.setInt(2, movieId);
insertRatings.setInt(3, ratingValue);
insertRatings.setDate(4, date);
insertRatings.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {
System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertRatings != null) {
insertRatings.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static void commitFriends(Connection connection) throws SQLException {
// friends.csv
System.out.println(friendsCsvFile);
// create table
Statement statement = connection.createStatement();
statement.executeUpdate("CREATE TABLE IF NOT EXISTS friends (\n"
+ " user1_id int(11) NOT NULL,\n"
+ " user2_id int(11) NOT NULL,\n"
+ " KEY user1_id (user1_id),\n"
+ " KEY user2_id (user2_id)\n"
+ ");");
statement.executeUpdate("ALTER TABLE friends\n"
+ " ADD CONSTRAINT friends_to_user1 FOREIGN KEY (user1_id) REFERENCES users(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
statement.executeUpdate("ALTER TABLE friends\n"
+ " ADD CONSTRAINT friends_to_user2 FOREIGN KEY (user2_id) REFERENCES users(id) ON DELETE CASCADE ON UPDATE CASCADE;\n");
// populate table
try (BufferedReader br = new BufferedReader(new FileReader(friendsCsvFile))) {
String insertQuery = "INSERT INTO friends (user1_id, user2_id) VALUES (?, ?)";
PreparedStatement insertFriends = null;
try {
connection.setAutoCommit(false);
insertFriends = connection.prepareStatement(insertQuery);
String line;
br.readLine(); // skip first line
while ((line = br.readLine()) != null) {
String[] values = line.split(cvsSplitBy);
int user1Id = Integer.parseInt(values[0]);
int user2Id = Integer.parseInt(values[1]);
insertFriends.setInt(1, user1Id);
insertFriends.setInt(2, user2Id);
insertFriends.executeUpdate();
}
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
if (connection != null) {
try {<|fim▁hole|> System.err.print("Transaction is being rolled back");
connection.rollback();
} catch (SQLException e2) {
e2.printStackTrace();
}
}
} finally {
if (insertFriends != null) {
insertFriends.close();
}
connection.setAutoCommit(true);
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
// load JDBC driver
try {
Class.forName("com.mysql.cj.jdbc.Driver");
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
// db connection info
String url = "jdbc:mysql://localhost:3306"
+ "?zeroDateTimeBehavior=convertToNull&useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC";
String login = "admin";
String password = "Q86PhnJRiEa7";
Connection connection = null;
try {
connection = DriverManager.getConnection(url, login, password);
Statement statement = connection.createStatement();
// create database
statement.executeUpdate("DROP DATABASE IF EXISTS movie_recommender;");
statement.executeUpdate("CREATE DATABASE movie_recommender;");
statement.executeUpdate("USE movie_recommender;");
commitUsers(connection);
commitMovies(connection);
commitGenres(connection);
commitMovieGenre(connection);
commitRatings(connection);
commitFriends(connection);
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException ignore) {
// exception occured while closing connexion -> nothing else can be done
}
}
}
System.out.println("done");
}
}<|fim▁end|> | |
<|file_name|>sync_broadcast.rs<|end_file_name|><|fim▁begin|>#![allow(clippy::cognitive_complexity)]
#![warn(rust_2018_idioms)]
#![cfg(feature = "sync")]
#[cfg(target_arch = "wasm32")]
use wasm_bindgen_test::wasm_bindgen_test as test;
use tokio::sync::broadcast;
use tokio_test::task;
use tokio_test::{
assert_err, assert_ok, assert_pending, assert_ready, assert_ready_err, assert_ready_ok,
};
use std::sync::Arc;
macro_rules! assert_recv {
($e:expr) => {
match $e.try_recv() {
Ok(value) => value,
Err(e) => panic!("expected recv; got = {:?}", e),
}
};
}
macro_rules! assert_empty {
($e:expr) => {
match $e.try_recv() {
Ok(value) => panic!("expected empty; got = {:?}", value),
Err(broadcast::error::TryRecvError::Empty) => {}
Err(e) => panic!("expected empty; got = {:?}", e),
}
};
}
macro_rules! assert_lagged {
($e:expr, $n:expr) => {
match assert_err!($e) {
broadcast::error::TryRecvError::Lagged(n) => {
assert_eq!(n, $n);
}
_ => panic!("did not lag"),
}
};
}
macro_rules! assert_closed {
($e:expr) => {
match assert_err!($e) {
broadcast::error::TryRecvError::Closed => {}
_ => panic!("did not lag"),
}
};
}
trait AssertSend: Send + Sync {}
impl AssertSend for broadcast::Sender<i32> {}
impl AssertSend for broadcast::Receiver<i32> {}
#[test]
fn send_try_recv_bounded() {
let (tx, mut rx) = broadcast::channel(16);
assert_empty!(rx);
let n = assert_ok!(tx.send("hello"));
assert_eq!(n, 1);
let val = assert_recv!(rx);
assert_eq!(val, "hello");
assert_empty!(rx);
}
#[test]
fn send_two_recv() {
let (tx, mut rx1) = broadcast::channel(16);
let mut rx2 = tx.subscribe();
assert_empty!(rx1);
assert_empty!(rx2);
let n = assert_ok!(tx.send("hello"));
assert_eq!(n, 2);
let val = assert_recv!(rx1);
assert_eq!(val, "hello");
let val = assert_recv!(rx2);
assert_eq!(val, "hello");
assert_empty!(rx1);
assert_empty!(rx2);
}
#[test]
fn send_recv_bounded() {
let (tx, mut rx) = broadcast::channel(16);
let mut recv = task::spawn(rx.recv());
assert_pending!(recv.poll());
assert_ok!(tx.send("hello"));
assert!(recv.is_woken());
let val = assert_ready_ok!(recv.poll());
assert_eq!(val, "hello");
}
#[test]
fn send_two_recv_bounded() {
let (tx, mut rx1) = broadcast::channel(16);
let mut rx2 = tx.subscribe();
let mut recv1 = task::spawn(rx1.recv());
let mut recv2 = task::spawn(rx2.recv());
assert_pending!(recv1.poll());
assert_pending!(recv2.poll());
assert_ok!(tx.send("hello"));
assert!(recv1.is_woken());
assert!(recv2.is_woken());
let val1 = assert_ready_ok!(recv1.poll());
let val2 = assert_ready_ok!(recv2.poll());
assert_eq!(val1, "hello");
assert_eq!(val2, "hello");
drop((recv1, recv2));
let mut recv1 = task::spawn(rx1.recv());
let mut recv2 = task::spawn(rx2.recv());
assert_pending!(recv1.poll());
assert_ok!(tx.send("world"));
assert!(recv1.is_woken());
assert!(!recv2.is_woken());
let val1 = assert_ready_ok!(recv1.poll());
let val2 = assert_ready_ok!(recv2.poll());
assert_eq!(val1, "world");
assert_eq!(val2, "world");
}
#[test]
fn change_tasks() {
let (tx, mut rx) = broadcast::channel(1);
let mut recv = Box::pin(rx.recv());
let mut task1 = task::spawn(&mut recv);
assert_pending!(task1.poll());
let mut task2 = task::spawn(&mut recv);
assert_pending!(task2.poll());
tx.send("hello").unwrap();
assert!(task2.is_woken());
}
#[test]
fn send_slow_rx() {
let (tx, mut rx1) = broadcast::channel(16);
let mut rx2 = tx.subscribe();
{
let mut recv2 = task::spawn(rx2.recv());
{
let mut recv1 = task::spawn(rx1.recv());
assert_pending!(recv1.poll());
assert_pending!(recv2.poll());
assert_ok!(tx.send("one"));
assert!(recv1.is_woken());
assert!(recv2.is_woken());
assert_ok!(tx.send("two"));
let val = assert_ready_ok!(recv1.poll());
assert_eq!(val, "one");
}
let val = assert_ready_ok!(task::spawn(rx1.recv()).poll());
assert_eq!(val, "two");
let mut recv1 = task::spawn(rx1.recv());
assert_pending!(recv1.poll());
assert_ok!(tx.send("three"));
assert!(recv1.is_woken());
let val = assert_ready_ok!(recv1.poll());
assert_eq!(val, "three");
let val = assert_ready_ok!(recv2.poll());
assert_eq!(val, "one");
}
let val = assert_recv!(rx2);
assert_eq!(val, "two");
let val = assert_recv!(rx2);
assert_eq!(val, "three");
}
#[test]
fn drop_rx_while_values_remain() {
let (tx, mut rx1) = broadcast::channel(16);
let mut rx2 = tx.subscribe();
assert_ok!(tx.send("one"));
assert_ok!(tx.send("two"));
assert_recv!(rx1);
assert_recv!(rx2);
drop(rx2);
drop(rx1);
}
#[test]
fn lagging_rx() {
let (tx, mut rx1) = broadcast::channel(2);
let mut rx2 = tx.subscribe();
assert_ok!(tx.send("one"));
assert_ok!(tx.send("two"));
assert_eq!("one", assert_recv!(rx1));
assert_ok!(tx.send("three"));
// Lagged too far
let x = dbg!(rx2.try_recv());
assert_lagged!(x, 1);
// Calling again gets the next value
assert_eq!("two", assert_recv!(rx2));
assert_eq!("two", assert_recv!(rx1));
assert_eq!("three", assert_recv!(rx1));
assert_ok!(tx.send("four"));
assert_ok!(tx.send("five"));
assert_lagged!(rx2.try_recv(), 1);
assert_ok!(tx.send("six"));
assert_lagged!(rx2.try_recv(), 1);
}
#[test]
fn send_no_rx() {
let (tx, _) = broadcast::channel(16);
assert_err!(tx.send("hello"));
let mut rx = tx.subscribe();
assert_ok!(tx.send("world"));
let val = assert_recv!(rx);
assert_eq!("world", val);
}
#[test]
#[should_panic]
#[cfg(not(target_arch = "wasm32"))] // wasm currently doesn't support unwinding
fn zero_capacity() {
broadcast::channel::<()>(0);
}
#[test]
#[should_panic]
#[cfg(not(target_arch = "wasm32"))] // wasm currently doesn't support unwinding
fn capacity_too_big() {
use std::usize;
broadcast::channel::<()>(1 + (usize::MAX >> 1));
}
#[test]
#[cfg(not(target_arch = "wasm32"))] // wasm currently doesn't support unwinding
fn panic_in_clone() {
use std::panic::{self, AssertUnwindSafe};
#[derive(Eq, PartialEq, Debug)]
struct MyVal(usize);
impl Clone for MyVal {
fn clone(&self) -> MyVal {
assert_ne!(0, self.0);
MyVal(self.0)
}
}
let (tx, mut rx) = broadcast::channel(16);
assert_ok!(tx.send(MyVal(0)));
assert_ok!(tx.send(MyVal(1)));
let res = panic::catch_unwind(AssertUnwindSafe(|| {
let _ = rx.try_recv();
}));
assert_err!(res);
let val = assert_recv!(rx);
assert_eq!(val, MyVal(1));
}
#[test]
fn dropping_tx_notifies_rx() {
let (tx, mut rx1) = broadcast::channel::<()>(16);
let mut rx2 = tx.subscribe();<|fim▁hole|> let mut recv1 = task::spawn(rx1.recv());
let mut recv2 = task::spawn(rx2.recv());
assert_pending!(recv1.poll());
assert_pending!(recv2.poll());
drop(tx);
assert_pending!(recv1.poll());
assert_pending!(recv2.poll());
drop(tx2);
assert!(recv1.is_woken());
assert!(recv2.is_woken());
let err = assert_ready_err!(recv1.poll());
assert!(is_closed(err));
let err = assert_ready_err!(recv2.poll());
assert!(is_closed(err));
}
#[test]
fn unconsumed_messages_are_dropped() {
let (tx, rx) = broadcast::channel(16);
let msg = Arc::new(());
assert_ok!(tx.send(msg.clone()));
assert_eq!(2, Arc::strong_count(&msg));
drop(rx);
assert_eq!(1, Arc::strong_count(&msg));
}
#[test]
fn single_capacity_recvs() {
let (tx, mut rx) = broadcast::channel(1);
assert_ok!(tx.send(1));
assert_eq!(assert_recv!(rx), 1);
assert_empty!(rx);
}
#[test]
fn single_capacity_recvs_after_drop_1() {
let (tx, mut rx) = broadcast::channel(1);
assert_ok!(tx.send(1));
drop(tx);
assert_eq!(assert_recv!(rx), 1);
assert_closed!(rx.try_recv());
}
#[test]
fn single_capacity_recvs_after_drop_2() {
let (tx, mut rx) = broadcast::channel(1);
assert_ok!(tx.send(1));
assert_ok!(tx.send(2));
drop(tx);
assert_lagged!(rx.try_recv(), 1);
assert_eq!(assert_recv!(rx), 2);
assert_closed!(rx.try_recv());
}
#[test]
fn dropping_sender_does_not_overwrite() {
let (tx, mut rx) = broadcast::channel(2);
assert_ok!(tx.send(1));
assert_ok!(tx.send(2));
drop(tx);
assert_eq!(assert_recv!(rx), 1);
assert_eq!(assert_recv!(rx), 2);
assert_closed!(rx.try_recv());
}
#[test]
fn lagging_receiver_recovers_after_wrap_closed_1() {
let (tx, mut rx) = broadcast::channel(2);
assert_ok!(tx.send(1));
assert_ok!(tx.send(2));
assert_ok!(tx.send(3));
drop(tx);
assert_lagged!(rx.try_recv(), 1);
assert_eq!(assert_recv!(rx), 2);
assert_eq!(assert_recv!(rx), 3);
assert_closed!(rx.try_recv());
}
#[test]
fn lagging_receiver_recovers_after_wrap_closed_2() {
let (tx, mut rx) = broadcast::channel(2);
assert_ok!(tx.send(1));
assert_ok!(tx.send(2));
assert_ok!(tx.send(3));
assert_ok!(tx.send(4));
drop(tx);
assert_lagged!(rx.try_recv(), 2);
assert_eq!(assert_recv!(rx), 3);
assert_eq!(assert_recv!(rx), 4);
assert_closed!(rx.try_recv());
}
#[test]
fn lagging_receiver_recovers_after_wrap_open() {
let (tx, mut rx) = broadcast::channel(2);
assert_ok!(tx.send(1));
assert_ok!(tx.send(2));
assert_ok!(tx.send(3));
assert_lagged!(rx.try_recv(), 1);
assert_eq!(assert_recv!(rx), 2);
assert_eq!(assert_recv!(rx), 3);
assert_empty!(rx);
}
#[test]
fn receiver_len_with_lagged() {
let (tx, mut rx) = broadcast::channel(3);
tx.send(10).unwrap();
tx.send(20).unwrap();
tx.send(30).unwrap();
tx.send(40).unwrap();
assert_eq!(rx.len(), 4);
assert_eq!(assert_recv!(rx), 10);
tx.send(50).unwrap();
tx.send(60).unwrap();
assert_eq!(rx.len(), 5);
assert_lagged!(rx.try_recv(), 1);
}
fn is_closed(err: broadcast::error::RecvError) -> bool {
matches!(err, broadcast::error::RecvError::Closed)
}<|fim▁end|> |
let tx2 = tx.clone();
|
<|file_name|>generate-certs.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A chain with four possible intermediates with different notBefore and notAfter
dates, for testing path bulding prioritization.
"""
import sys
sys.path += ['../..']
import gencerts
DATE_A = '150101120000Z'
DATE_B = '150102120000Z'
DATE_C = '180101120000Z'
DATE_D = '180102120000Z'
root = gencerts.create_self_signed_root_certificate('Root')
root.set_validity_range(DATE_A, DATE_D)
int_ac = gencerts.create_intermediate_certificate('Intermediate', root)
int_ac.set_validity_range(DATE_A, DATE_C)
int_ad = gencerts.create_intermediate_certificate('Intermediate', root)
int_ad.set_validity_range(DATE_A, DATE_D)
int_ad.set_key(int_ac.get_key())
int_bc = gencerts.create_intermediate_certificate('Intermediate', root)
int_bc.set_validity_range(DATE_B, DATE_C)
int_bc.set_key(int_ac.get_key())
int_bd = gencerts.create_intermediate_certificate('Intermediate', root)
int_bd.set_validity_range(DATE_B, DATE_D)
int_bd.set_key(int_ac.get_key())
target = gencerts.create_end_entity_certificate('Target', int_ac)
target.set_validity_range(DATE_A, DATE_D)
gencerts.write_chain('The root', [root], out_pem='root.pem')
gencerts.write_chain('Intermediate with validity range A..C',
[int_ac], out_pem='int_ac.pem')
gencerts.write_chain('Intermediate with validity range A..D',
[int_ad], out_pem='int_ad.pem')
gencerts.write_chain('Intermediate with validity range B..C',
[int_bc], out_pem='int_bc.pem')
gencerts.write_chain('Intermediate with validity range B..D',
[int_bd], out_pem='int_bd.pem')
gencerts.write_chain('The target', [target], out_pem='target.pem')<|fim▁end|> | #!/usr/bin/python |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-<|fim▁hole|>from distutils.core import setup
import switchscreen
shutil.copyfile("switchscreen.py", "switchscreen")
setup(
name = "switchscreen",
version = switchscreen.__version__,
description = "",
author = u"Régis FLORET",
author_mail = "[email protected]",
url = "http://code.google.com/p/switchscreen/",
scripts = [
'switchscreen',
]
)
os.remove("switchscreen")<|fim▁end|> |
import shutil
import os
|
<|file_name|>autowrapped_static_text.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import wx
from copy import copy
sWhitespace = ' \t\n'
def SplitAndKeep(string, splitchars = " \t\n"):
substrs = []<|fim▁hole|> if string[i] in splitchars:
substrs.append(string[:i])
substrs.append(string[i])
string = string[i+1:]
i = 0
else:
i += 1
if i >= len(string):
substrs.append(string)
break
return substrs
class AutowrappedStaticText(wx.StaticText):
"""A StaticText-like widget which implements word wrapping."""
def __init__(self, *args, **kwargs):
wx.StaticText.__init__(self, *args, **kwargs)
self.label = super(AutowrappedStaticText, self).GetLabel()
self.pieces = SplitAndKeep(self.label, sWhitespace)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.lastWrap = None
self.Wrap()
def SetLabel(self, newLabel):
"""Store the new label and recalculate the wrapped version."""
self.label = newLabel
self.pieces = SplitAndKeep(self.label, sWhitespace)
self.Wrap()
def GetLabel(self):
"""Returns the label (unwrapped)."""
return self.label
def Wrap(self):
"""Wraps the words in label."""
maxWidth = self.GetParent().GetVirtualSizeTuple()[0] - 10
#TODO: Fix this so that we're not wasting cycles, but so that it actually works
#if self.lastWrap and self.lastWrap == maxWidth:
# return
self.lastWrap = maxWidth
pieces = copy(self.pieces)
lines = []
currentLine = []
currentString = ""
while len(pieces) > 0:
nextPiece = pieces.pop(0)
newString = currentString + nextPiece
newWidth = self.GetTextExtent(newString)[0]
currentPieceCount = len(currentLine)
if (currentPieceCount > 0 and newWidth > maxWidth) or nextPiece == '\n':
if currentPieceCount > 0 and currentLine[-1] in sWhitespace:
currentLine = currentLine[:-1]
if nextPiece in sWhitespace:
pieces = pieces[1:]
currentLine.append('\n')
lines.extend(currentLine)
currentLine = [nextPiece]
currentString = nextPiece
else:
currentString += nextPiece
currentLine.append(nextPiece)
lines.extend(currentLine)
line = "".join(lines)
super(AutowrappedStaticText, self).SetLabel(line)
self.Refresh()
def OnSize(self, event):
self.Wrap()<|fim▁end|> |
i = 0
while len(string) > 0: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>'use strict';
/**
* Load all public assets, e.g js, css, images
*/
exports.register = function (server, options, next) {
server.route({
method: 'GET',
path: '/public/{params*}',
config: {
description: 'load assets',
auth: false,
handler: {
directory: {
path: 'public'
}
}
}
});
return next();
};
exports.register.attributes = {
name: 'Assets'
};<|fim▁end|> | |
<|file_name|>default_log.cpp<|end_file_name|><|fim▁begin|>#include "pch.hpp"
#include "default_log.hpp"
#include "service_log.hpp"
#include "service_helpers.hpp"
namespace be {<|fim▁hole|>///////////////////////////////////////////////////////////////////////////////
bool default_log_available() {
return check_service<Log>();
}
///////////////////////////////////////////////////////////////////////////////
Log& default_log() {
return service<Log>();
}
} // be<|fim▁end|> | |
<|file_name|>20120430B.py<|end_file_name|><|fim▁begin|>"""
Consider this game: Write 8 blanks on a sheet of paper. Randomly pick a digit 0-9. After seeing the digit, choose one
of the 8 blanks to place that digit in. Randomly choose another digit (with replacement) and then choose one of the 7
remaining blanks to place it in. Repeat until you've filled all 8 blanks. You win if the 8 digits written down are in
order from smallest to largest.
Write a program that plays this game by itself and determines whether it won or not. Run it 1 million times and post
your probability of winning.
Assigning digits to blanks randomly lets you win about 0.02% of the time. Here's a python script that wins about 10.3%<|fim▁hole|>import random
def trial():
indices = range(8) # remaining unassigned indices
s = [None] * 8 # the digits in their assigned places
while indices:
d = random.randint(0,9) # choose a random digit
index = indices[int(d*len(indices)/10)] # assign it an index
s[index] = str(d)
indices.remove(index)
return s == sorted(s)
print sum(trial() for _ in range(1000000))
thanks to cosmologicon for the challenge at /r/dailyprogrammer_ideas ..
link [http://www.reddit.com/r/dailyprogrammer_ideas/comments/s30be/intermediate_digitassigning_game/]
"""
import random
import itertools
def que_sort(data):
# print(data)
return all(b >= a for a, b in zip(data, itertools.islice(data, 1, None)))
TRIALS = 1
win = 0
for a in range(TRIALS):
l = [None] * 8
p = list(range(8))
while p:
d = random.randint(0,9)
# i = random.choice(p)
i = int(d * (len(p)) / 10)
print(p[i])
l[p[i]] = d
p.pop(i)
print(l)
if que_sort(l):
win += 1
print('{}/{} - {}%'.format(win, TRIALS, win/TRIALS*100))<|fim▁end|> | of the time. Can you do better?
|
<|file_name|>test_statistics.py<|end_file_name|><|fim▁begin|>import numpy as np
import pytest
from numpy.testing import assert_allclose
try:
import scipy
except ImportError:
HAS_SCIPY = False
else:
HAS_SCIPY = True
import astropy.units as u
from astropy.timeseries.periodograms.lombscargle import LombScargle
from astropy.timeseries.periodograms.lombscargle._statistics import (fap_single, inv_fap_single,
METHODS)
from astropy.timeseries.periodograms.lombscargle.utils import convert_normalization, compute_chi2_ref
METHOD_KWDS = dict(bootstrap={'n_bootstraps': 20, 'random_seed': 42})
NORMALIZATIONS = ['standard', 'psd', 'log', 'model']
def make_data(N=100, period=1, theta=[10, 2, 3], dy=1, rseed=0, units=False):
"""Generate some data for testing"""
rng = np.random.RandomState(rseed)
t = 5 * period * rng.rand(N)
omega = 2 * np.pi / period
y = theta[0] + theta[1] * np.sin(omega * t) + theta[2] * np.cos(omega * t)
dy = dy * (0.5 + rng.rand(N))
y += dy * rng.randn(N)
fmax = 5
if units:
return t * u.day, y * u.mag, dy * u.mag, fmax / u.day
else:
return t, y, dy, fmax
def null_data(N=1000, dy=1, rseed=0, units=False):
"""Generate null hypothesis data"""
rng = np.random.RandomState(rseed)
t = 100 * rng.rand(N)
dy = 0.5 * dy * (1 + rng.rand(N))
y = dy * rng.randn(N)
fmax = 40
if units:
return t * u.day, y * u.mag, dy * u.mag, fmax / u.day
else:
return t, y, dy, fmax
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('units', [False, True])
def test_distribution(normalization, with_errors, units):
t, y, dy, fmax = null_data(units=units)
if not with_errors:
dy = None
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
z = np.linspace(0, power.max(), 1000)
# Test that pdf and cdf are consistent
dz = z[1] - z[0]
z_mid = z[:-1] + 0.5 * dz
pdf = ls.distribution(z_mid)
cdf = ls.distribution(z, cumulative=True)
if isinstance(dz, u.Quantity):
dz = dz.value
assert_allclose(pdf, np.diff(cdf) / dz, rtol=1E-5, atol=1E-8)
# psd normalization without specified errors produces bad results
if not (normalization == 'psd' and not with_errors):
# Test that observed power is distributed according to the theoretical pdf
hist, bins = np.histogram(power, 30, density=True)
midpoints = 0.5 * (bins[1:] + bins[:-1])
pdf = ls.distribution(midpoints)
assert_allclose(hist, pdf, rtol=0.05, atol=0.05 * pdf[0])
@pytest.mark.parametrize('N', [10, 100, 1000])
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_inverse_single(N, normalization):
fap = np.linspace(0, 1, 11)
z = inv_fap_single(fap, N, normalization)
fap_out = fap_single(z, N, normalization)
assert_allclose(fap, fap_out)
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('units', [False, True])
def test_inverse_bootstrap(normalization, use_errs, units):
t, y, dy, fmax = null_data(units=units)
if not use_errs:
dy = None
fap = np.linspace(0, 1, 11)
method = 'bootstrap'
method_kwds = METHOD_KWDS['bootstrap']
ls = LombScargle(t, y, dy, normalization=normalization)
z = ls.false_alarm_level(fap, maximum_frequency=fmax,
method=method, method_kwds=method_kwds)
fap_out = ls.false_alarm_probability(z, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
# atol = 1 / n_bootstraps
assert_allclose(fap, fap_out, atol=0.05)<|fim▁hole|>@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('N', [10, 100, 1000])
@pytest.mark.parametrize('units', [False, True])
def test_inverses(method, normalization, use_errs, N, units, T=5):
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
t, y, dy, fmax = make_data(N, rseed=543, units=units)
if not use_errs:
dy = None
method_kwds = METHOD_KWDS.get(method, None)
fap = np.logspace(-10, 0, 11)
ls = LombScargle(t, y, dy, normalization=normalization)
z = ls.false_alarm_level(fap, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
fap_out = ls.false_alarm_probability(z, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
assert_allclose(fap, fap_out)
@pytest.mark.parametrize('method', sorted(METHODS))
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('units', [False, True])
def test_false_alarm_smoketest(method, normalization, units):
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
kwds = METHOD_KWDS.get(method, None)
t, y, dy, fmax = make_data(units=units)
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
Z = np.linspace(power.min(), power.max(), 30)
fap = ls.false_alarm_probability(Z, maximum_frequency=fmax,
method=method, method_kwds=kwds)
assert len(fap) == len(Z)
if method != 'davies':
assert np.all(fap <= 1)
assert np.all(fap[:-1] >= fap[1:]) # monotonically decreasing
@pytest.mark.parametrize('method', sorted(METHODS))
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('normalization', sorted(set(NORMALIZATIONS) - {'psd'}))
@pytest.mark.parametrize('units', [False, True])
def test_false_alarm_equivalence(method, normalization, use_errs, units):
# Note: the PSD normalization is not equivalent to the others, in that it
# depends on the absolute errors rather than relative errors. Because the
# scaling contributes to the distribution, it cannot be converted directly
# from any of the three normalized versions.
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
kwds = METHOD_KWDS.get(method, None)
t, y, dy, fmax = make_data(units=units)
if not use_errs:
dy = None
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
Z = np.linspace(power.min(), power.max(), 30)
fap = ls.false_alarm_probability(Z, maximum_frequency=fmax,
method=method, method_kwds=kwds)
# Compute the equivalent Z values in the standard normalization
# and check that the FAP is consistent
Z_std = convert_normalization(Z, len(t),
from_normalization=normalization,
to_normalization='standard',
chi2_ref=compute_chi2_ref(y, dy))
ls = LombScargle(t, y, dy, normalization='standard')
fap_std = ls.false_alarm_probability(Z_std, maximum_frequency=fmax,
method=method, method_kwds=kwds)
assert_allclose(fap, fap_std, rtol=0.1)<|fim▁end|> |
@pytest.mark.parametrize('method', sorted(set(METHODS) - {'bootstrap'})) |
<|file_name|>users.rs<|end_file_name|><|fim▁begin|>#![crate_name = "users"]
/*
* This file is part of the uutils coreutils package.
*
* (c) KokaKiwi <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/<|fim▁hole|>// Allow dead code here in order to keep all fields, constants here, for consistency.
#![allow(dead_code)]
extern crate getopts;
extern crate libc;
use getopts::Options;
use std::ffi::{CStr, CString};
use std::mem;
use std::ptr;
use utmpx::*;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
#[path = "../common/utmpx.rs"]
mod utmpx;
extern {
fn getutxent() -> *const c_utmp;
fn getutxid(ut: *const c_utmp) -> *const c_utmp;
fn getutxline(ut: *const c_utmp) -> *const c_utmp;
fn pututxline(ut: *const c_utmp) -> *const c_utmp;
fn setutxent();
fn endutxent();
#[cfg(any(target_os = "macos", target_os = "linux"))]
fn utmpxname(file: *const libc::c_char) -> libc::c_int;
}
#[cfg(target_os = "freebsd")]
unsafe extern fn utmpxname(_file: *const libc::c_char) -> libc::c_int {
0
}
static NAME: &'static str = "users";
static VERSION: &'static str = "1.0.0";
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("V", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("{}", f),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTION]... [FILE]", NAME);
println!("");
println!("{}", opts.usage("Output who is currently logged in according to FILE."));
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let filename = if matches.free.len() > 0 {
matches.free[0].as_ref()
} else {
DEFAULT_FILE
};
exec(filename);
0
}
fn exec(filename: &str) {
unsafe {
utmpxname(CString::new(filename).unwrap().as_ptr());
}
let mut users = vec!();
unsafe {
setutxent();
loop {
let line = getutxent();
if line == ptr::null() {
break;
}
if (*line).ut_type == USER_PROCESS {
let user = String::from_utf8_lossy(CStr::from_ptr(mem::transmute(&(*line).ut_user)).to_bytes()).to_string();
users.push(user);
}
}
endutxent();
}
if users.len() > 0 {
users.sort();
println!("{}", users.connect(" "));
}
}<|fim▁end|> |
/* last synced with: whoami (GNU coreutils) 8.22 */
|
<|file_name|>Strings.java<|end_file_name|><|fim▁begin|>package cc.mallet.util;
/**
* Static utility methods for Strings
*/
final public class Strings {
public static int commonPrefixIndex (String[] strings)
{
int prefixLen = strings[0].length();
for (int i = 1; i < strings.length; i++) {
if (strings[i].length() < prefixLen)
prefixLen = strings[i].length();
int j = 0;
if (prefixLen == 0)
return 0;
while (j < prefixLen) {
if (strings[i-1].charAt(j) != strings[i].charAt(j)) {
prefixLen = j;
<|fim▁hole|> }
j++;
}
}
return prefixLen;
}
public static String commonPrefix (String[] strings)
{
return strings[0].substring (0, commonPrefixIndex(strings));
}
public static int count (String string, char ch)
{
int idx = -1;
int count = 0;
while ((idx = string.indexOf (ch, idx+1)) >= 0) { count++; };
return count;
}
public static double levenshteinDistance (String s, String t) {
int n = s.length();
int m = t.length();
int d[][]; // matrix
int i; // iterates through s
int j; // iterates through t
char s_i; // ith character of s
char t_j; // jth character of t
int cost; // cost
if (n == 0)
return 1.0;
if (m == 0)
return 1.0;
d = new int[n+1][m+1];
for (i = 0; i <= n; i++)
d[i][0] = i;
for (j = 0; j <= m; j++)
d[0][j] = j;
for (i = 1; i <= n; i++) {
s_i = s.charAt (i - 1);
for (j = 1; j <= m; j++) {
t_j = t.charAt (j - 1);
cost = (s_i == t_j) ? 0 : 1;
d[i][j] = minimum (d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1] + cost);
}
}
int longer = (n > m) ? n : m;
return (double)d[n][m] / longer; // Normalize to 0-1.
}
private static int minimum (int a, int b, int c) {
int mi = a;
if (b < mi) {
mi = b;
}
if (c < mi) {
mi = c;
}
return mi;
}
}<|fim▁end|> | break;
|
<|file_name|>LoginEventHandler.java<|end_file_name|><|fim▁begin|>package zoara.sfs2x.extension;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import zoara.sfs2x.extension.simulation.World;
import zoara.sfs2x.extension.utils.RoomHelper;
import com.smartfoxserver.bitswarm.sessions.ISession;
import com.smartfoxserver.v2.core.ISFSEvent;
import com.smartfoxserver.v2.core.SFSEventParam;
import com.smartfoxserver.v2.db.IDBManager;
import com.smartfoxserver.v2.exceptions.SFSErrorCode;
import com.smartfoxserver.v2.exceptions.SFSErrorData;
import com.smartfoxserver.v2.exceptions.SFSException;
import com.smartfoxserver.v2.exceptions.SFSLoginException;
import com.smartfoxserver.v2.extensions.BaseServerEventHandler;
import com.smartfoxserver.v2.security.DefaultPermissionProfile;
public class LoginEventHandler extends BaseServerEventHandler
{
@Override
public void handleServerEvent(ISFSEvent event) throws SFSException
{
// Grab parameters from client request
String userName = (String) event.getParameter(SFSEventParam.LOGIN_NAME);
String cryptedPass = (String) event.getParameter(SFSEventParam.LOGIN_PASSWORD);
ISession session = (ISession) event.getParameter(SFSEventParam.SESSION);
// Get password from DB
IDBManager dbManager = getParentExtension().getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
// Build a prepared statement
PreparedStatement stmt = connection.prepareStatement(
"SELECT Password, ID, ClanID, Zone FROM player_info WHERE Username = ?"
);
stmt.setString(1, userName);
// Execute query
ResultSet res = stmt.executeQuery();
// Verify that one record was found
if (!res.first())
{
// This is the part that goes to the client
SFSErrorData errData = new SFSErrorData(SFSErrorCode.LOGIN_BAD_USERNAME);
errData.addParameter(userName);
// This is logged on the server side
throw new SFSLoginException("Bad username: " + userName, errData);
}
String dbpassword = res.getString("Password");
int dbId = res.getInt("ID");
//String zone = res.getString("Zone");
int clanId = res.getInt("ClanID");
String zone = res.getString("Zone");
// Return connection to the DBManager connection pool
connection.close();
String thisZone = getParentExtension().getParentZone().getName();
if ((zone.equals("Adult") && !zone.equals(thisZone)) ||
(!zone.equals("Adult") && thisZone.equals("Adult")))
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.JOIN_GAME_ACCESS_DENIED);
data.addParameter(thisZone);
throw new SFSLoginException("Login failed. User " + userName +
" is not a member of Server " + thisZone, data);
}
World world = RoomHelper.getWorld(this);
if (world.hasPlayer(userName))
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.LOGIN_ALREADY_LOGGED);
String[] params = { userName, thisZone };
data.setParams(Arrays.asList(params));
throw new SFSLoginException("Login failed: " + userName +
" is already logged in!", data);
}
// Verify the secure password
if (!getApi().checkSecurePassword(session, dbpassword, cryptedPass))
{
if (dbId < 10) <|fim▁hole|> trace("Passwords did not match, but logging in anyway.");
}
else
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.LOGIN_BAD_PASSWORD);
data.addParameter(userName);
throw new SFSLoginException("Login failed for user: " + userName, data);
}
}
// Store the client dbId in the session
session.setProperty(ZoaraExtension.DATABASE_ID, dbId);
if (clanId != 0) {
session.setProperty(ZoaraExtension.CLAN_ID, clanId);
}
session.setProperty("$permission", DefaultPermissionProfile.STANDARD);
}
catch (SQLException e) // User name was not found
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
throw new SFSLoginException("A SQL Error occurred: " + e.getMessage(), errData);
}
}
}<|fim▁end|> | { |
<|file_name|>worker.py<|end_file_name|><|fim▁begin|>import os
import socket
import venusian
from botocore.exceptions import ClientError
from flowy.swf.client import SWFClient, IDENTITY_SIZE
from flowy.swf.decision import SWFActivityDecision
from flowy.swf.decision import SWFWorkflowDecision
from flowy.swf.history import SWFExecutionHistory
from flowy.utils import logger
from flowy.utils import setup_default_logger
from flowy.worker import Worker
__all__ = ['SWFWorkflowWorker', 'SWFActivityWorker']
class SWFWorker(Worker):
def __init__(self):
super(SWFWorker, self).__init__()
self.remote_reg_callbacks = []
def __call__(self, name, version, input_data, decision, *extra_args):
return super(SWFWorker, self).__call__(
(str(name), str(version)), input_data, decision, *extra_args)
def register_remote(self, swf_client, domain):
"""Register or check compatibility of all configs in Amazon SWF."""<|fim▁hole|> for remote_reg_callback in self.remote_reg_callbacks:
# Raises if there are registration problems
remote_reg_callback(swf_client, domain)
def register(self, config, func, version, name=None):
super(SWFWorker, self).register(config, func, (name, version))
def add_remote_reg_callback(self, callback):
self.remote_reg_callbacks.append(callback)
def make_scanner(self):
return venusian.Scanner(
register_task=self.register_task,
add_remote_reg_callback=self.add_remote_reg_callback)
class SWFWorkflowWorker(SWFWorker):
categories = ['swf_workflow']
# Be explicit about what arguments are expected
def __call__(self, name, version, input_data, decision, execution_history):
super(SWFWorkflowWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision, execution_history) # extra_args passed to proxies
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Starts an endless single threaded/single process worker loop.
The worker polls endlessly for new decisions from the specified domain
and task list and runs them.
If reg_remote is set, all registered workflow are registered remotely.
An identity can be set to track this worker in the SWF console,
otherwise a default identity is generated from this machine domain and
process pid.
If setup_log is set, a default configuration for the logger is loaded.
A custom SWF client can be passed in swf_client, otherwise a default
client is used.
"""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
name, version, input_data, exec_history, decision = poll_decision(
swf_client, domain, task_list, identity)
self(name, version, input_data, decision, exec_history)
except KeyboardInterrupt:
pass
class SWFActivityWorker(SWFWorker):
categories = ['swf_activity']
# Be explicit about what arguments are expected
def __call__(self, name, version, input_data, decision):
# No extra arguments are used
super(SWFActivityWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision.heartbeat) # extra_args
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Same as SWFWorkflowWorker.run_forever but for activities."""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
swf_response = {}
while not swf_response.get('taskToken'):
try:
swf_response = swf_client.poll_for_activity_task(
domain, task_list, identity=identity)
except ClientError:
# add a delay before retrying?
logger.exception('Error while polling for activities:')
at = swf_response['activityType']
decision = SWFActivityDecision(swf_client, swf_response['taskToken'])
self(at['name'], at['version'], swf_response['input'], decision)
except KeyboardInterrupt:
pass
def default_identity():
"""Generate a local identity string for this process."""
identity = "%s-%s" % (socket.getfqdn(), os.getpid())
return identity[-IDENTITY_SIZE:] # keep the most important part
def poll_decision(swf_client, domain, task_list, identity=None):
"""Poll a decision and create a SWFWorkflowContext structure.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll decision
:param identity: an identity str of the request maker
:rtype: tuple
:returns: a tuple consisting of (name, version, input_data,
:class:'SWFExecutionHistory', :class:`SWFWorkflowDecision`)
"""
first_page = poll_first_page(swf_client, domain, task_list, identity)
token = first_page['taskToken']
all_events = events(swf_client, domain, task_list, first_page, identity)
# Sometimes the first event is on the second page,
# and the first page is empty
first_event = next(all_events)
assert first_event['eventType'] == 'WorkflowExecutionStarted'
wesea = 'workflowExecutionStartedEventAttributes'
assert first_event[wesea]['taskList']['name'] == task_list
task_duration = first_event[wesea]['taskStartToCloseTimeout']
workflow_duration = first_event[wesea]['executionStartToCloseTimeout']
tags = first_event[wesea].get('tagList', None)
child_policy = first_event[wesea]['childPolicy']
name = first_event[wesea]['workflowType']['name']
version = first_event[wesea]['workflowType']['version']
input_data = first_event[wesea]['input']
try:
running, timedout, results, errors, order = load_events(all_events)
except _PaginationError:
# There's nothing better to do than to retry
return poll_decision(swf_client, domain, task_list, identity)
execution_history = SWFExecutionHistory(running, timedout, results, errors, order)
decision = SWFWorkflowDecision(swf_client, token, name, version, task_list,
task_duration, workflow_duration, tags,
child_policy)
return name, version, input_data, execution_history, decision
def poll_first_page(swf_client, domain, task_list, identity=None):
"""Return the response from loading the first page. In case of errors,
empty responses or whatnot retry until a valid response.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param identity: an identity str of the request maker
:rtype: dict[str, str|int|list|dict]
:returns: a dict containing workflow information and list of events
"""
swf_response = {}
while not swf_response.get('taskToken'):
try:
swf_response = swf_client.poll_for_decision_task(domain, task_list,
identity=identity)
except ClientError:
logger.exception('Error while polling for decisions:')
return swf_response
def poll_page(swf_client, domain, task_list, token, identity=None):
"""Return a specific page. In case of errors retry a number of times.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param token: the token string for the requested page
:param identity: an identity str of the request maker
:rtype: dict[str, str|int|list|dict]
:returns: a dict containing workflow information and list of events
"""
for _ in range(7): # give up after a limited number of retries
try:
swf_response = swf_client.poll_for_decision_task(
domain, task_list, identity=identity, next_page_token=token)
break
except ClientError:
logger.exception('Error while polling for decision page:')
else:
raise _PaginationError()
return swf_response
def events(swf_client, domain, task_list, first_page, identity=None):
"""Load pages one by one and generate all events found.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param first_page: the page dict structure from which to start generating
the events, usually the response from :func:`poll_first_page`
:param identity: an identity str of the request maker
:rtype: collections.Iterator[dict[str, int|str|dict[str, int|str|dict]]
:returns: iterator over all of the events
"""
page = first_page
while 1:
for event in page['events']:
yield event
if not page.get('nextPageToken'):
break
page = poll_page(swf_client, domain, task_list, page['nextPageToken'],
identity=identity)
def load_events(event_iter):
"""Combine all events in their order.
This returns a tuple of the following things:
running - a set of the ids of running tasks
timedout - a set of the ids of tasks that have timedout
results - a dictionary of id -> result for each finished task
errors - a dictionary of id -> error message for each failed task
order - an list of task ids in the order they finished
"""
running, timedout = set(), set()
results, errors = {}, {}
order = []
event2call = {}
for event in event_iter:
e_type = event.get('eventType')
if e_type == 'ActivityTaskScheduled':
eid = event['activityTaskScheduledEventAttributes']['activityId']
event2call[event['eventId']] = eid
running.add(eid)
elif e_type == 'ActivityTaskCompleted':
atcea = 'activityTaskCompletedEventAttributes'
eid = event2call[event[atcea]['scheduledEventId']]
result = event[atcea]['result']
running.remove(eid)
results[eid] = result
order.append(eid)
elif e_type == 'ActivityTaskFailed':
atfea = 'activityTaskFailedEventAttributes'
eid = event2call[event[atfea]['scheduledEventId']]
reason = event[atfea]['reason']
running.remove(eid)
errors[eid] = reason
order.append(eid)
elif e_type == 'ActivityTaskTimedOut':
attoea = 'activityTaskTimedOutEventAttributes'
eid = event2call[event[attoea]['scheduledEventId']]
running.remove(eid)
timedout.add(eid)
order.append(eid)
elif e_type == 'ScheduleActivityTaskFailed':
satfea = 'scheduleActivityTaskFailedEventAttributes'
eid = event[satfea]['activityId']
reason = event[satfea]['cause']
# when a job is not found it's not even started
errors[eid] = reason
order.append(eid)
elif e_type == 'StartChildWorkflowExecutionInitiated':
scweiea = 'startChildWorkflowExecutionInitiatedEventAttributes'
eid = _subworkflow_call_key(event[scweiea]['workflowId'])
running.add(eid)
elif e_type == 'ChildWorkflowExecutionCompleted':
cwecea = 'childWorkflowExecutionCompletedEventAttributes'
eid = _subworkflow_call_key(
event[cwecea]['workflowExecution']['workflowId'])
result = event[cwecea]['result']
running.remove(eid)
results[eid] = result
order.append(eid)
elif e_type == 'ChildWorkflowExecutionFailed':
cwefea = 'childWorkflowExecutionFailedEventAttributes'
eid = _subworkflow_call_key(
event[cwefea]['workflowExecution']['workflowId'])
reason = event[cwefea]['reason']
running.remove(eid)
errors[eid] = reason
order.append(eid)
elif e_type == 'ChildWorkflowExecutionTimedOut':
cwetoea = 'childWorkflowExecutionTimedOutEventAttributes'
eid = _subworkflow_call_key(
event[cwetoea]['workflowExecution']['workflowId'])
running.remove(eid)
timedout.add(eid)
order.append(eid)
elif e_type == 'StartChildWorkflowExecutionFailed':
scwefea = 'startChildWorkflowExecutionFailedEventAttributes'
eid = _subworkflow_call_key(event[scwefea]['workflowId'])
reason = event[scwefea]['cause']
errors[eid] = reason
order.append(eid)
elif e_type == 'TimerStarted':
eid = event['timerStartedEventAttributes']['timerId']
running.add(eid)
elif e_type == 'TimerFired':
eid = event['timerFiredEventAttributes']['timerId']
running.remove(eid)
results[eid] = None
return running, timedout, results, errors, order
class _PaginationError(Exception):
"""Can't retrieve the next page after X retries."""
def _subworkflow_call_key(w_id):
return w_id.split(':')[-1]<|fim▁end|> | |
<|file_name|>unit-test.js<|end_file_name|><|fim▁begin|>var page = require('webpage').create();
var url;
if (phantom.args) {
url = phantom.args[0];
} else {
url = require('system').args[1];
}
page.onConsoleMessage = function (message) {
console.log(message);<|fim▁hole|>
function exit(code) {
setTimeout(function(){ phantom.exit(code); }, 0);
phantom.onError = function(){};
}
console.log("Loading URL: " + url);
page.open(url, function (status) {
if (status != "success") {
console.log('Failed to open ' + url);
phantom.exit(1);
}
console.log("Running test.");
var result = page.evaluate(function() {
return chess_game.test_runner.runner();
});
if (result != 0) {
console.log("*** Test failed! ***");
exit(1);
}
else {
console.log("Test succeeded.");
exit(0);
}
});<|fim▁end|> | }; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>VERSION = (0, 11)
__version__ = '.'.join(map(str, VERSION))<|fim▁hole|>DATE = "2015-02-06"<|fim▁end|> | |
<|file_name|>location_mock.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Location, LocationStrategy, PlatformLocation} from '@angular/common';
import {EventEmitter, Injectable} from '@angular/core';
import {SubscriptionLike} from 'rxjs';
import {normalizeQueryParams} from '../../src/location/util';
/**
* A spy for {@link Location} that allows tests to fire simulated location events.
*
* @publicApi
*/
@Injectable()
export class SpyLocation implements Location {
urlChanges: string[] = [];
private _history: LocationState[] = [new LocationState('', '', null)];
private _historyIndex: number = 0;
/** @internal */
_subject: EventEmitter<any> = new EventEmitter();
/** @internal */
_baseHref: string = '';
/** @internal */
_platformStrategy: LocationStrategy = null!;
/** @internal */
_platformLocation: PlatformLocation = null!;
/** @internal */
_urlChangeListeners: ((url: string, state: unknown) => void)[] = [];
/** @internal */
_urlChangeSubscription: SubscriptionLike|null = null;
ngOnDestroy(): void {
this._urlChangeSubscription?.unsubscribe();
this._urlChangeListeners = [];
}
setInitialPath(url: string) {
this._history[this._historyIndex].path = url;
}
setBaseHref(url: string) {
this._baseHref = url;
}
path(): string {
return this._history[this._historyIndex].path;
}
getState(): unknown {
return this._history[this._historyIndex].state;
}
isCurrentPathEqualTo(path: string, query: string = ''): boolean {
const givenPath = path.endsWith('/') ? path.substring(0, path.length - 1) : path;
const currPath =
this.path().endsWith('/') ? this.path().substring(0, this.path().length - 1) : this.path();
return currPath == givenPath + (query.length > 0 ? ('?' + query) : '');
}
simulateUrlPop(pathname: string) {
this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'});
}
simulateHashChange(pathname: string) {
const path = this.prepareExternalUrl(pathname);
this.pushHistory(path, '', null);
this.urlChanges.push('hash: ' + pathname);
// the browser will automatically fire popstate event before each `hashchange` event, so we need
// to simulate it.
this._subject.emit({'url': pathname, 'pop': true, 'type': 'popstate'});
this._subject.emit({'url': pathname, 'pop': true, 'type': 'hashchange'});
}
prepareExternalUrl(url: string): string {
if (url.length > 0 && !url.startsWith('/')) {
url = '/' + url;
}
return this._baseHref + url;
}
go(path: string, query: string = '', state: any = null) {
path = this.prepareExternalUrl(path);
this.pushHistory(path, query, state);
const locationState = this._history[this._historyIndex - 1];
if (locationState.path == path && locationState.query == query) {
return;
}
const url = path + (query.length > 0 ? ('?' + query) : '');
this.urlChanges.push(url);
this._notifyUrlChangeListeners(path + normalizeQueryParams(query), state);
}
replaceState(path: string, query: string = '', state: any = null) {
path = this.prepareExternalUrl(path);
const history = this._history[this._historyIndex];
if (history.path == path && history.query == query) {
return;
}
history.path = path;
history.query = query;
history.state = state;
const url = path + (query.length > 0 ? ('?' + query) : '');
this.urlChanges.push('replace: ' + url);
this._notifyUrlChangeListeners(path + normalizeQueryParams(query), state);
}
forward() {
if (this._historyIndex < (this._history.length - 1)) {
this._historyIndex++;
this._subject.emit(
{'url': this.path(), 'state': this.getState(), 'pop': true, 'type': 'popstate'});
}
}
back() {
if (this._historyIndex > 0) {
this._historyIndex--;
this._subject.emit(
{'url': this.path(), 'state': this.getState(), 'pop': true, 'type': 'popstate'});
}<|fim▁hole|> }
historyGo(relativePosition: number = 0): void {
const nextPageIndex = this._historyIndex + relativePosition;
if (nextPageIndex >= 0 && nextPageIndex < this._history.length) {
this._historyIndex = nextPageIndex;
this._subject.emit(
{'url': this.path(), 'state': this.getState(), 'pop': true, 'type': 'popstate'});
}
}
onUrlChange(fn: (url: string, state: unknown) => void): VoidFunction {
this._urlChangeListeners.push(fn);
if (!this._urlChangeSubscription) {
this._urlChangeSubscription = this.subscribe(v => {
this._notifyUrlChangeListeners(v.url, v.state);
});
}
return () => {
const fnIndex = this._urlChangeListeners.indexOf(fn);
this._urlChangeListeners.splice(fnIndex, 1);
if (this._urlChangeListeners.length === 0) {
this._urlChangeSubscription?.unsubscribe();
this._urlChangeSubscription = null;
}
};
}
/** @internal */
_notifyUrlChangeListeners(url: string = '', state: unknown) {
this._urlChangeListeners.forEach(fn => fn(url, state));
}
subscribe(
onNext: (value: any) => void, onThrow?: ((error: any) => void)|null,
onReturn?: (() => void)|null): SubscriptionLike {
return this._subject.subscribe({next: onNext, error: onThrow, complete: onReturn});
}
normalize(url: string): string {
return null!;
}
private pushHistory(path: string, query: string, state: any) {
if (this._historyIndex > 0) {
this._history.splice(this._historyIndex + 1);
}
this._history.push(new LocationState(path, query, state));
this._historyIndex = this._history.length - 1;
}
}
class LocationState {
constructor(public path: string, public query: string, public state: any) {}
}<|fim▁end|> | |
<|file_name|>buttons.js<|end_file_name|><|fim▁begin|>var buttons = function(req, res, next) {
var request = require('request');
var cheerio = require('cheerio');
var Case = require('case');
// var url = "http://clas.asu.edu";
var url = req.body.page;
var parsedResults = [];
//testing url argument site buttons casing
request(url, function (error, response, html) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(html);
$('.btn').each(function(i, element){
var text = $(this).text().trim();
var casing = Case.of($(this).text().trim());
if ( (casing == "sentence") || (casing == "header") ){
var passfail = "PASS";
} else {
var passfail = "FAIL";
}
var testResults = {
text: text,
casing: casing,
passfail: passfail
};<|fim▁hole|> parsedResults.push(testResults);
});
req.pf = parsedResults;
next();
};
});
};
module.exports = buttons;<|fim▁end|> | |
<|file_name|>model-selector-test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
moduleForComponent('model-selector', 'Unit | Component | model selector', {
// Specify the other units that are required for this test
// needs: ['component:foo', 'helper:bar'],
unit: true
});
test('it renders', function(assert) {
assert.expect(2);
// Creates the component instance
var component = this.subject();
assert.equal(component._state, 'preRender');
// Renders the component to the page
this.render();
assert.equal(component._state, 'inDOM');
});<|fim▁end|> | import { moduleForComponent, test } from 'ember-qunit'; |
<|file_name|>InsertAnchor.js<|end_file_name|><|fim▁begin|>//>>built
define(
"dojox/editor/plugins/nls/cs/InsertAnchor", //begin v1.x content
({
insertAnchor: "Vložit kotvu",
title: "Vlastnosti kotvy",
anchor: "Název:",
text: "Popis:",<|fim▁hole|> cancel: "Storno"
})
//end v1.x content
);<|fim▁end|> | set: "Nastavit", |
<|file_name|>wk02_twitter_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-#
"""
Basic Twitter Authentication
requirements: Python 2.5+ tweepy (easy_install tweepy | pip install tweepy)
"""
__author__ = 'Bernie Hogan'
__version__= '1.0'
import string
import codecs
import os
import pickle
import copy
import sys
import json
import webbrowser
import tweepy
from tweepy import Cursor
import twitterhelpers as th
def getFollowerCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
return user.followers_count
def getFollowingCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
print user
print dir(user)
return user.friends_count
if __name__=='__main__':
CONSUMER_KEY = th.CONSUMER_KEY<|fim▁hole|>
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
ACCESS_TOKEN_SECRET = th.ACCESS_TOKEN_SECRET
ACCESS_TOKEN = th.ACCESS_TOKEN
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
print "Now you have received an access token."
print "Or rather, your account has authorized this application to use the twitter api."
print "You have this many hits to the API left this hour: "
# print json.dumps(api.rate_limit_status(), indent = 1) #['remaining_hits']
print getFollowerCount(api, "blurky")
print getFollowingCount(api, "blurky")<|fim▁end|> | CONSUMER_SECRET = th.CONSUMER_SECRET |
<|file_name|>game.js<|end_file_name|><|fim▁begin|>import {
store
} from '../store.js';
import {
selectGameCurrentState,
selectGameChest,
selectGameName
} from '../selectors.js';
import {
deepCopy,
getProperty,
setPropertyInClone
} from '../util.js';
export const UPDATE_GAME_ROUTE = 'UPDATE_GAME_ROUTE';
export const UPDATE_GAME_STATIC_INFO = "UPDATE_GAME_STATIC_INFO";
export const UPDATE_GAME_CURRENT_STATE = "UPDATE_GAME_CURRENT_STATE";
export const updateGameRoute = (pageExtra) => {
const pieces = pageExtra.split("/");
//remove the trailing slash
if (!pieces[pieces.length - 1]) pieces.pop();
if (pieces.length != 2) {
console.warn("URL for game didn't have expected number of pieces");
return null;
}
return {
type: UPDATE_GAME_ROUTE,
name: pieces[0],
id: pieces[1],
}
}
export const updateGameStaticInfo = (chest, playersInfo, hasEmptySlots, open, visible, isOwner) => {
return {
type: UPDATE_GAME_STATIC_INFO,
chest,
playersInfo,
hasEmptySlots,
open,
visible,
isOwner
}
}
//currentState should be the unexpanded state (as passed in from server). Timer
//infos should be game.ActiveTimers. originalWallClockTime should be the time
//the state was received from the server (so that we can compute how much time
//has elapsed from what the server reported). This will install the currentState
//in, but also set up callbacks to update timer.TimeLeft for any timers in the
//state automatically.
export const installGameState = (currentState, timerInfos, originalWallClockTime) => (dispatch, getState) => {
const state = getState();
const chest = selectGameChest(state);
const gameName = selectGameName(state);
let [expandedState, pathsToTick] = expandState(currentState, timerInfos, chest, gameName);
dispatch(updateGameState(expandedState, pathsToTick, originalWallClockTime));
if (pathsToTick.length) window.requestAnimationFrame(doTick);
}
const updateGameState = (expandedCurrentState, pathsToTick, originalWallClockTime) => {
return {
type: UPDATE_GAME_CURRENT_STATE,
currentState: expandedCurrentState,
pathsToTick,
originalWallClockTime
}
}
//return [expandedState, pathsToTick]
const expandState = (currentState, timerInfos, chest, gameName) => {
//Takes the currentState and returns an object where all of the Stacks are replaced by actual references to the component they reference.
var pathsToTick = [];
let newState = deepCopy(currentState);
expandLeafState(newState, newState.Game, ["Game"], pathsToTick, timerInfos, chest, gameName)
for (var i = 0; i < newState.Players.length; i++) {
expandLeafState(newState, newState.Players[i], ["Players", i], pathsToTick, timerInfos, chest, gameName)
}
<|fim▁hole|> return [newState, pathsToTick];
}
const expandLeafState = (wholeState, leafState, pathToLeaf, pathsToTick, timerInfos, chest, gameName) => {
//Returns an expanded version of leafState. leafState should have keys that are either bools, floats, strings, or Stacks.
var entries = Object.entries(leafState);
for (var i = 0; i < entries.length; i++) {
let item = entries[i];
let key = item[0];
let val = item[1];
//Note: null is typeof "object"
if (val && typeof val == "object") {
if (val.Deck) {
expandStack(val, wholeState, chest, gameName);
} else if (val.IsTimer) {
expandTimer(val, pathToLeaf.concat([key]), pathsToTick, timerInfos);
}
}
}
//Copy in Player computed state if it exists, for convenience. Do it after expanding properties
if (pathToLeaf && pathToLeaf.length == 2 && pathToLeaf[0] == "Players") {
if (wholeState.Computed && wholeState.Computed.Players && wholeState.Computed.Players.length) {
leafState.Computed = wholeState.Computed.Players[pathToLeaf[1]];
}
}
}
const expandStack = (stack, wholeState, chest, gameName) => {
if (!stack.Deck) {
//Meh, I guess it's not a stack
return;
}
let components = Array(stack.Indexes.length).fill(null);
for (var i = 0; i < stack.Indexes.length; i++) {
let index = stack.Indexes[i];
if (index == -1) {
components[i] = null;
continue;
}
//TODO: this should be a constant
if(index == -2) {
//TODO: to handle this appropriately we'd need to know how to
//produce a GenericComponent for each Deck clientside.
components[i] = {};
} else {
components[i] = componentForDeckAndIndex(stack.Deck, index, wholeState, chest);
}
if (stack.IDs) {
components[i].ID = stack.IDs[i];
}
components[i].Deck = stack.Deck;
components[i].GameName = gameName;
}
stack.GameName = gameName;
stack.Components = components;
}
const expandTimer = (timer, pathToLeaf, pathsToTick, timerInfo) => {
//Always make sure these default to a number so databinding can use them.
timer.TimeLeft = 0;
timer.originalTimeLeft = 0;
if (!timerInfo) return;
let info = timerInfo[timer.ID];
if (!info) return;
timer.TimeLeft = info.TimeLeft;
timer.originalTimeLeft = timer.TimeLeft;
pathsToTick.push(pathToLeaf);
}
const componentForDeckAndIndex = (deckName, index, wholeState, chest) => {
let deck = chest.Decks[deckName];
if (!deck) return null;
let result = {...deck[index]};
if (wholeState && wholeState.Components) {
if (wholeState.Components[deckName]) {
result.DynamicValues = wholeState.Components[deckName][index];
}
}
return result
}
const doTick = () => {
tick();
const state = store.getState();
const pathsToTick = state.game ? state.game.pathsToTick : [];
if (pathsToTick.length > 0) {
window.requestAnimationFrame(doTick);
}
}
const tick = () => {
const state = store.getState();
const currentState = selectGameCurrentState(state);
if (!currentState) return;
const pathsToTick = state.game ? state.game.pathsToTick : [];
const originalWallClockStartTime = state.game ? state.game.originalWallClockTime : 0;
if (pathsToTick.length == 0) return;
let newPaths = [];
//We'll use util.setPropertyInClone, so the newState will diverge from
//currentState as we write to it, but can start out the same.
let newState = currentState;
for (let i = 0; i < pathsToTick.length; i++) {
let currentPath = pathsToTick[i];
let timer = getProperty(newState, currentPath);
let now = Date.now();
let difference = now - originalWallClockStartTime;
let result = Math.max(0, timer.originalTimeLeft - difference);
newState = setPropertyInClone(newState, currentPath.concat(["TimeLeft"]), result);
//If we still have time to tick on this, then make sure it's still
//in the list of things to tick.
if (timer.TimeLeft > 0) {
newPaths.push(currentPath);
}
}
if (newPaths.length == pathsToTick.length) {
//If the length of pathsToTick didn't change, don't change it, so that
//strict equality matches in the new state will work.
newPaths = pathsToTick;
}
store.dispatch(updateGameState(newState, newPaths, originalWallClockStartTime));
}<|fim▁end|> | |
<|file_name|>gen_test2123.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd
import scipy as sp
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os, sys
try:
import cPickle as pickle
except:
import pickle
#connect echoRD Tools
pathdir='../echoRD' #path to echoRD
lib_path = os.path.abspath(pathdir)
#sys.path.append(lib_path)
sys.path.append('/home/ka/ka_iwg/ka_oj4748/echoRD/echoRD')
import vG_conv as vG
from hydro_tools import plotparticles_t,hydroprofile,plotparticles_specht
# Prepare echoRD
#connect to echoRD
import run_echoRD as rE
#connect and load project
[dr,mc,mcp,pdyn,cinf,vG]=rE.loadconnect(pathdir='../',mcinif='mcini_gen2',experimental=True)
mc = mcp.mcpick_out(mc,'gen_test2.pickle')
runname='gen_test2123'
mc.inimf='07moist.dat'
mc.advectref='Shipitalo'
mc.soilmatrix=pd.read_csv(mc.matrixbf, sep=' ')
mc.soilmatrix['m'] = np.fmax(1-1/mc.soilmatrix.n,0.1)
mc.md_macdepth=mc.md_depth[np.fmax(2,np.sum(np.ceil(mc.md_contact),axis=1).astype(int))]
mc.md_macdepth[mc.md_macdepth<=0.]=0.065
precTS=pd.read_csv(mc.precf, sep=',',skiprows=3)
precTS.tstart=60
precTS.tend=60+1800
precTS.total=0.06
precTS.intense=precTS.total/(precTS.tend-precTS.tstart)
<|fim▁hole|>mc.gridcellA=mc.mgrid.vertfac*mc.mgrid.latfac
mc.particleA=abs(mc.gridcellA.values)/(2*mc.part_sizefac) #assume average ks at about 0.5 as reference of particle size
mc.particleD=2.*np.sqrt(mc.particleA/np.pi)
mc.particleV=3./4.*np.pi*(mc.particleD/2.)**3.
mc.particleV/=np.sqrt(abs(mc.gridcellA.values)) #assume grid size as 3rd dimension
mc.particleD/=np.sqrt(abs(mc.gridcellA.values))
mc.particlemass=dr.waterdensity(np.array(20),np.array(-9999))*mc.particleV #assume 20C as reference for particle mass
#DEBUG: a) we assume 2D=3D; b) change 20C to annual mean T?
mc=dr.ini_bins(mc)
mc=dr.mc_diffs(mc,np.max(np.max(mc.mxbin)))
[mc,particles,npart]=dr.particle_setup(mc)
#define bin assignment mode for infiltration particles
mc.LTEdef='instant'#'ks' #'instant' #'random'
mc.LTEmemory=mc.soilgrid.ravel()*0.
#new reference
mc.maccon=np.where(mc.macconnect.ravel()>0)[0] #index of all connected cells
mc.md_macdepth=np.abs(mc.md_macdepth)
mc.prects=False
#theta=mc.zgrid[:,1]*0.+0.273
#[mc,particles,npart]=rE.particle_setup_obs(theta,mc,vG,dr,pdyn)
[thS,npart]=pdyn.gridupdate_thS(particles.lat,particles.z,mc)
#[A,B]=plotparticles_t(particles,thS/100.,mc,vG,store=True)
# Run Model
mc.LTEpercentile=70 #new parameter
t_end=24.*3600.
saveDT=True
#1: MDA
#2: MED
#3: rand
infiltmeth='MDA'
#3: RWdiff
#4: Ediss
#exfiltmeth='RWdiff'
exfiltmeth='Ediss'
#5: film_uconst
#6: dynamic u
film=True
#7: maccoat1
#8: maccoat10
#9: maccoat100
macscale=1. #scale the macropore coating
clogswitch=False
infiltscale=False
#mc.dt=0.11
#mc.splitfac=5
#pdyn.part_diffusion_binned_pd(particles,npart,thS,mc)
#import profile
#%prun -D diff_pd_prof.prof pdyn.part_diffusion_binned_pd(particles,npart,thS,mc)
wdir='/beegfs/work/ka_oj4748/gen_tests'
drained=pd.DataFrame(np.array([]))
leftover=0
output=60. #mind to set also in TXstore.index definition
dummy=np.floor(t_end/output)
t=0.
ix=0
TSstore=np.zeros((int(dummy),mc.mgrid.cells[0],2))
try:
#unpickle:
with open(''.join([wdir,'/results/Z',runname,'_Mstat.pick']),'rb') as handle:
pickle_l = pickle.load(handle)
dummyx = pickle.loads(pickle_l)
particles = pickle.loads(dummyx[0])
[leftover,drained,t,TSstore,ix] = pickle.loads(dummyx[1])
ix+=1
print('resuming into stored run at t='+str(t)+'...')
except:
print('starting new run...')
#loop through plot cycles
for i in np.arange(dummy.astype(int))[ix:]:
plotparticles_specht(particles,mc,pdyn,vG,runname,t,i,saving=True,relative=False,wdir=wdir)
[particles,npart,thS,leftover,drained,t]=rE.CAOSpy_rundx1(i*output,(i+1)*output,mc,pdyn,cinf,precTS,particles,leftover,drained,6.,splitfac=4,prec_2D=False,maccoat=macscale,saveDT=saveDT,clogswitch=clogswitch,infilt_method=infiltmeth,exfilt_method=exfiltmeth,film=film,infiltscale=infiltscale)
TSstore[i,:,:]=rE.part_store(particles,mc)
#if i/5.==np.round(i/5.):
with open(''.join([wdir,'/results/Z',runname,'_Mstat.pick']),'wb') as handle:
pickle.dump(pickle.dumps([pickle.dumps(particles),pickle.dumps([leftover,drained,t,TSstore,i])]), handle, protocol=2)<|fim▁end|> | #use modified routines for binned retention definitions
mc.part_sizefac=500 |
<|file_name|>Helpers.ts<|end_file_name|><|fim▁begin|>import builder = require('botbuilder');
export module Helpers {
export class API {
public static async DownloadJson(url:string, post:boolean=false, options:any=undefined): Promise<string>{
return new Promise<string>(resolve => {
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var xhr = new XMLHttpRequest();
xhr.onload = function (){
try {
resolve(xhr.responseText);
}
catch(e){
console.log("Error while calling api: " + e.message);
}
};
xhr.open(options ? "POST" : "GET", url, true);
xhr.setRequestHeader('Content-Type', 'application/json')
xhr.send(JSON.stringify(options));<|fim▁hole|> }
}
export enum SearchType { "code", "documentation" };
}<|fim▁end|> | }); |
<|file_name|>ir_filters.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
from odoo import api, fields, models, _
from odoo.exceptions import UserError
class IrFilters(models.Model):
_name = 'ir.filters'
_description = 'Filters'
_order = 'model_id, name, id desc'
name = fields.Char(string='Filter Name', translate=True, required=True)
user_id = fields.Many2one('res.users', string='User', ondelete='cascade', default=lambda self: self._uid,
help="The user this filter is private to. When left empty the filter is public "
"and available to all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Text(default='[]', required=True)
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
is_default = fields.Boolean(string='Default filter')
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
active = fields.Boolean(default=True)
@api.model
def _list_all_models(self):
self._cr.execute("SELECT model, name FROM ir_model ORDER BY name")
return self._cr.fetchall()
@api.multi
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)') % self.name)
return super(IrFilters, self).copy(default)
@api.multi
def _get_eval_domain(self):
self.ensure_one()
return ast.literal_eval(self.domain)
@api.model
def _get_action_domain(self, action_id=None):
"""Return a domain component for matching filters that are visible in the
same context (menu/view) as the given action."""
if action_id:
# filters specific to this menu + global ones
return [('action_id', 'in', [action_id, False])]
# only global ones
return [('action_id', '=', False)]
@api.model
def get_filters(self, model, action_id=None):
"""Obtain the list of filters available for the user on the given model.
:param action_id: optional ID of action to restrict filters to this action
plus global filters. If missing only global filters are returned.
The action does not have to correspond to the model, it may only be
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
action_domain = self._get_action_domain(action_id)
filters = self.search(action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])])
user_context = self.env.user.context_get()
return filters.with_context(user_context).read(['name', 'is_default', 'domain', 'context', 'user_id', 'sort'])
@api.model
def _check_global_default(self, vals, matching_filters):
""" _check_global_default(dict, list(dict), dict) -> None
Checks if there is a global default for the model_id requested.
If there is, and the default is different than the record being written
(-> we're not updating the current global default), raise an error
to avoid users unknowingly overwriting existing global defaults (they
have to explicitly remove the current default before setting a new one)
This method should only be called if ``vals`` is trying to set<|fim▁hole|> """
domain = self._get_action_domain(vals.get('action_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
('is_default', '=', True),
])
if not defaults:
return
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default") % {'model': vals.get('model_id')})
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
current_filters = self.get_filters(vals['model_id'], action_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
# f.user_id is False and vals.user_id is False or missing,
# or f.user_id.id == vals.user_id
if (f['user_id'] and f['user_id'][0]) == vals.get('user_id')]
if vals.get('is_default'):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
('is_default', '=', True),
])
if defaults:
defaults.write({'is_default': False})
else:
self._check_global_default(vals, matching_filters)
# When a filter exists for the same (name, model, user) triple, we simply
# replace its definition (considering action_id irrelevant here)
if matching_filters:
matching_filter = self.browse(matching_filters[0]['id'])
matching_filter.write(vals)
return matching_filter
return self.create(vals)
_sql_constraints = [
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (name, model_id, user_id, action_id)', 'Filter names must be unique'),
]
@api.model_cr_context
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
self._cr.execute("DROP INDEX IF EXISTS ir_filters_name_model_uid_unique_index") # drop old index w/o action
self._cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = 'ir_filters_name_model_uid_unique_action_index'")
if not self._cr.fetchone():
self._cr.execute("""CREATE UNIQUE INDEX "ir_filters_name_model_uid_unique_action_index" ON ir_filters
(lower(name), model_id, COALESCE(user_id,-1), COALESCE(action_id,-1))""")
return result<|fim▁end|> | ``is_default``
:raises odoo.exceptions.UserError: if there is an existing default and
we're not updating it |
<|file_name|>edge.py<|end_file_name|><|fim▁begin|>from pseudoregion import *
class Edge(PseudoRegion):
"""EDGE Fringe field and other kicks for hard-edged field models
1) edge type (A4) {SOL, DIP, HDIP, DIP3, QUAD, SQUA, SEX, BSOL, FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]<|fim▁hole|> The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg's HRDEND routine to find the change in transverse
position and transverse momentum due to the fringe field.
"""
def __init__(
self,
edge_type,
model,
model_parameters_list,
name=None,
metadata=None):
PseudoRegion.__init__(self, name, metadata)
self.edge_type = edge_type
self.model = model
self.model_parameters = model_parameters
class Edge(Field):
"""
EDGE
1) edge type (A4) {SOL, DIP, HDIP,DIP3,QUAD,SQUA,SEX, BSOL,FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by the factor 1 / (1+δ)
p4: if not 0 => apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole ( DIP ), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg’s HRDEND routine to find the change in
transverse position and transverse momentum due to the fringe field.
"""
begtag = 'EDGE'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 6,
'for001_format': {
'line_splits': [
1,
5]}},
'sol': {
'desc': 'Solenoid',
'doc': '',
'icool_model_name': 'SOL',
'parms': {
'model': {
'pos': 1,
'type': 'String',
'doc': ''},
'bs': {
'pos': 3,
'type': 'Real',
'doc': 'p1: BS [T] '
'If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the '
'exit edge. (You can use this to get a tapered field profile)'}}},
}
def __init__(self, **kwargs):
Field.__init__(self, 'EDGE', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'EDGE':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self)<|fim▁end|> | p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by factor 1/(1+delta)
p4: if not 0 ==> apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole (DIP), you can approximate a rectangular dipole field. |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CodeBug Tether documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 20 15:23:09 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions<|fim▁hole|># Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'CodeBug Tether'
copyright = '2015, OpenLX'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# The short X.Y version.
# NOTE: this is the version of the firmware download too so don't change
# this with minor changes in the software if the firmware hasn't changed!
version = '0.8.5'
# The full version, including alpha/beta/rc tags.
release = '0.8.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# html_theme = 'haiku'
# html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'codebug_tetherdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'codebug_tether.tex', 'CodeBug Tether Documentation',
'Thomas Preston', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'codebug_tether', 'CodeBug Tether Documentation',
['Thomas Preston'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'codebug_tether', 'CodeBug Tether Documentation',
'Thomas Preston', 'codebug_tether', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
todo_include_todos = True
# A string of reStructuredText that will be included at the end of every
# source file that is read. This is the right place to add substitutions
# that should be available in every file.
rst_epilog = """
.. |firmwaredownload| raw:: html
<a href="https://github.com/codebugtools/codebug_tether/blob/master/firmware/codebug_tether_v{version}.cbg?raw=true">download</a>
""".format(version=version)<|fim▁end|> | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath']
|
<|file_name|>Hydra.py<|end_file_name|><|fim▁begin|>'''
Created on Feb 3, 2013
@author: bpurgaso
'''
from twisted.words.protocols import irc
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet import threads
from ConfigManager import ConfigManager
from Authenticator import Authenticator
from subprocess import PIPE, STDOUT, Popen
class bot(irc.IRCClient):
"""
irc bots, yay
"""
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def reloadConfig(self):
self.config = self.configManager.getConfig()
def signedOn(self):
#Initial Setup
self.configManager = self.factory.configManager
self.configManager.registerListener(self)
self.config = self.configManager.getConfig()
self.auth = self.factory.auth
print "Signed on as %s." % (self.nickname)
for i in self.config['channels'].keys():
if self.config['channels'][i]['autojoin']:
irc.IRCClient.join(self, i, self.config['channels'][i]['key'])
def joined(self, channel):
print "Joined %s." % (channel)
def irc_INVITE(self, prefix, params):
""" called by twisted,
if the bot was invited
"""
channel = params[-1].lower().replace('#', '')
if channel not in self.config['channels'].keys():
self.auth.createChannelEntry(channel)
self.join(channel, self.config['channels'][channel]['key'])
def privmsg(self, user, channel, msg):
'''
Called whenever an inbound message arrives
'''
print user, channel, msg
user = user.rsplit('!', 1)[0]
# Check to see if they're sending me a private message
if channel == self.nickname:
channel = user
index = 0
else:
index = 1
# See if the message directed at me
if msg.startswith(self.nickname + ":") or index == 0:
'''
embedded commands go here
'''
command = msg.rsplit()[index].lower()
#REGISTER
if command == 'register':
if self.auth.isUserAuthorized('register', user):
self.msg(channel, self.auth.registerUser(user, 'default'))
else:
self.msg(channel, "You aren't authorized for register.")
#PROMOTE
elif command == 'promote':
if self.auth.isUserAuthorized('promote', user):
try:
target_uname = msg.rsplit()[index + 1].lower()
target_group = msg.rsplit()[index + 2].lower()
if self.auth.getPowerOfUser(user) <=\
self.auth.getPowerOfGroup(target_group):
self.postToIRC((channel, [self.auth.registerUser(\
target_uname, target_group)]))
else:
self.postToIRC((channel, ['%s, your power level'\
' is'\
' insufficient.' % user]))
except:
self.postToIRC((channel, ['Check your formatting and'\
' try again.']))
else:
self.msg(channel, "You aren't authorized for register.")
#WHOAMI
elif command == 'whoami':
if self.auth.isUserAuthorized('whoami', user):
self.postToIRC((channel, [self.auth.whoami(user)]))
else:
self.msg(channel, "You aren't authorized for register.")
#OPME
elif command == 'opme':
if self.auth.isUserAuthorized('opme', user):
self.mode(channel, set, 'o', None, user)
else:
self.msg(channel, "You aren't authorized for opme.")
#AUTOOP
elif command == 'autoop':
if self.auth.isUserAuthorized('autoop', user):
if msg.rsplit()[2].lower() == 'on':
self.postToIRC((channel, self.auth.toggleAutoOp(\
user, channel, True)))
else:
self.postToIRC((channel, self.auth.toggleAutoOp(\
user, channel, False)))
else:
self.msg(channel, "You aren't authorized for autoop.")
#HELP
elif command == 'help':
if self.auth.isUserAuthorized('help', user):
for i in self.auth.getAvailableCommandsForUser(user):
self.msg(user, '%s: %s' %\
(i, self.auth.getHelpForCommand(i)))
self.msg(channel, 'I\'ve sent you a pm.')
else:
self.msg(channel, "You aren't authorized for help.")
#RELOAD
elif command == 'reload':
if self.auth.isUserAuthorized('reload', user):
self.configManager.reload()
self.msg(channel, "Configuration Reloaded")
if not self.auth.sanityCheck(False):
self.msg(channel, "Configuration Sanity is suspect, "\
"rolling back.")
else:
self.msg(channel, "You aren't authorized for reload.")
#KICK
elif command == 'kick':
if self.auth.isUserAuthorized('kick', user):
if self.nickname not in msg.rsplit()[index + 1:]:
for i in msg.rsplit()[index + 1:]:
self.kick(channel, i, 'Later broseph.')
else:
self.msg(channel, "Nope, not happening.")
else:
self.kick(channel, user, 'Sorry bro, nothing personal.')
else:
'''
External script execution goes here
'''
if self.auth.isUserAuthorized(msg.rsplit()[index].lower(),\
user):
#kick off the async call
#channel, command, params
self.invokeCommand(channel,\
command,\
(" ".join(msg.rsplit()[index + 1:])))
else:
self.msg(channel, "You aren't authorized for %s." %\
(command))
else:
'''
filter processing go here
'''
pass
def invokeCommand(self, channel, command, params):
tmp = threads.deferToThread(self.__shellCall, channel, command, params)
tmp.addCallback(self.postToIRC)
def __shellCall(self, channel, command, params):
command = self.sanitize(command)
params = self.sanitize(params)
command = "exec python ./bin/%s.py %s 2> /dev/null" % (command, params)
self.p = Popen(
command,
stderr=STDOUT,
stdout=PIPE,
close_fds=True,
shell=True)
out, err = self.p.communicate() # @UnusedVariable
return (channel, out.splitlines())
def sanitize(self, s):
for i in self.config['sanitize']:
s = s.replace(i, '')<|fim▁hole|> def postToIRC(self, tpl):
for i in tpl[1]:
self.msg(tpl[0], i)
def userJoined(self, user, channel):
channel_dict = channel.replace('#', '')
if self.config['channels'][channel_dict]['enable_autoop'] and\
user in self.config['channels'][channel_dict]['autoop']:
self.mode(channel, set, 'o', None, user)
if self.config['channels'][channel_dict]['enable_greeting']:
self.msg(channel, "%s: %s" % (user,\
self.config['channels'][channel_dict]['greeting']))
def kickedFrom(self, channel, kicker, message):
""" called by twisted,
if the bot was kicked
"""
channel = channel.replace('#', '')
if channel in self.config['channels'].keys() and\
self.config['channels'][channel]['autojoin']:
self.join(channel, self.config['channels'][channel]['key'])
self.msg(kicker, "Why would you do that to me brah?")
class botFactory(protocol.ClientFactory):
"""
Factory for producing "bot"
"""
protocol = bot
def __init__(self, channel, configManager, auth):
self.startChannel = channel
self.configManager = configManager
self.config = self.configManager.getConfig()
self.auth = auth
#required
self.nickname = self.config['nick']
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting." % (reason)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason)
class Hydra(object):
'''
The big bad scary bot
'''
def __init__(self):
self.startChannel = '#hydra'
self.configManager = ConfigManager()
self.config = self.configManager.getConfig()
self.configManager.registerListener(self)
self.auth = Authenticator(self.configManager)
n = self.config['network']
p = self.config['port']
b = botFactory(self.startChannel, self.configManager, self.auth)
reactor.connectTCP(n, p, b) # @UndefinedVariable
reactor.run() # @UndefinedVariable
def reloadConfig(self):
self.config = self.configManager.getConfig()
### dummy code below
h = Hydra()<|fim▁end|> | return s
|
<|file_name|>compile_and_link.rs<|end_file_name|><|fim▁begin|>#[cfg(test)]
mod compile_and_link_tests {
use lucet_wasi_sdk::*;
use std::path::PathBuf;
use tempfile::TempDir;
fn test_file(name: &str) -> PathBuf {
let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
p.push("tests");
p.push(name);
assert!(p.exists(), "test file does not exist");
p
}
#[test]
fn compile_a() {
let tmp = TempDir::new().expect("create temporary directory");
let compiler = Compile::new(test_file("a.c"));
let objfile = tmp.path().join("a.o");
compiler.compile(objfile.clone()).expect("compile a.c");
assert!(objfile.exists(), "object file created");
let mut linker = Link::new(&[objfile]);
linker.cflag("-nostartfiles");
linker.link_opt(LinkOpt::NoDefaultEntryPoint);
let wasmfile = tmp.path().join("a.wasm");
linker.link(wasmfile.clone()).expect("link a.wasm");
assert!(wasmfile.exists(), "wasm file created");
}
#[test]
fn compile_b() {
let tmp = TempDir::new().expect("create temporary directory");
let compiler = Compile::new(test_file("b.c"));
let objfile = tmp.path().join("b.o");
compiler.compile(objfile.clone()).expect("compile b.c");
assert!(objfile.exists(), "object file created");
let mut linker = Link::new(&[objfile]);
linker.cflag("-nostartfiles");
linker.link_opt(LinkOpt::NoDefaultEntryPoint);
linker.link_opt(LinkOpt::AllowUndefinedAll);
let wasmfile = tmp.path().join("b.wasm");
linker.link(wasmfile.clone()).expect("link b.wasm");
assert!(wasmfile.exists(), "wasm file created");
}
#[test]<|fim▁hole|> fn compile_a_and_b() {
let tmp = TempDir::new().expect("create temporary directory");
let mut linker = Link::new(&[test_file("a.c"), test_file("b.c")]);
linker.cflag("-nostartfiles");
linker.link_opt(LinkOpt::NoDefaultEntryPoint);
let wasmfile = tmp.path().join("ab.wasm");
linker.link(wasmfile.clone()).expect("link ab.wasm");
assert!(wasmfile.exists(), "wasm file created");
}
#[test]
fn compile_to_lucet() {
let tmp = TempDir::new().expect("create temporary directory");
let mut lucetc = Lucetc::new(&[test_file("a.c"), test_file("b.c")]);
lucetc.cflag("-nostartfiles");
lucetc.link_opt(LinkOpt::NoDefaultEntryPoint);
let so_file = tmp.path().join("ab.so");
lucetc.build(&so_file).expect("compile ab.so");
assert!(so_file.exists(), "so file created");
}
}<|fim▁end|> | |
<|file_name|>graph.go<|end_file_name|><|fim▁begin|>package graph
import (
"compress/gzip"
"crypto/sha256"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"runtime"
"strings"
"syscall"
"time"
"github.com/Sirupsen/logrus"
"github.com/docker/distribution/digest"
"github.com/docker/docker/autogen/dockerversion"
"github.com/docker/docker/daemon/graphdriver"
"github.com/docker/docker/image"
"github.com/docker/docker/pkg/archive"
"github.com/docker/docker/pkg/progressreader"
"github.com/docker/docker/pkg/streamformatter"
"github.com/docker/docker/pkg/stringid"
"github.com/docker/docker/pkg/truncindex"
"github.com/docker/docker/runconfig"
"github.com/docker/docker/utils"
)
// A Graph is a store for versioned filesystem images and the relationship between them.
type Graph struct {
Root string
idIndex *truncindex.TruncIndex
driver graphdriver.Driver
}
// NewGraph instantiates a new graph at the given root path in the filesystem.
// `root` will be created if it doesn't exist.
func NewGraph(root string, driver graphdriver.Driver) (*Graph, error) {
abspath, err := filepath.Abs(root)
if err != nil {
return nil, err
}
// Create the root directory if it doesn't exists
if err := os.MkdirAll(root, 0700); err != nil && !os.IsExist(err) {
return nil, err
}
graph := &Graph{
Root: abspath,
idIndex: truncindex.NewTruncIndex([]string{}),
driver: driver,
}
if err := graph.restore(); err != nil {
return nil, err
}
return graph, nil
}
func (graph *Graph) restore() error {
dir, err := ioutil.ReadDir(graph.Root)
if err != nil {
return err
}
var ids = []string{}
for _, v := range dir {
id := v.Name()
if graph.driver.Exists(id) {
ids = append(ids, id)
}
}
graph.idIndex = truncindex.NewTruncIndex(ids)
logrus.Debugf("Restored %d elements", len(dir))
return nil
}
// FIXME: Implement error subclass instead of looking at the error text
// Note: This is the way golang implements os.IsNotExists on Plan9
func (graph *Graph) IsNotExist(err error, id string) bool {
return err != nil && (strings.Contains(strings.ToLower(err.Error()), "does not exist") || strings.Contains(strings.ToLower(err.Error()), "no such")) && strings.Contains(err.Error(), id)
}
// Exists returns true if an image is registered at the given id.
// If the image doesn't exist or if an error is encountered, false is returned.
func (graph *Graph) Exists(id string) bool {
if _, err := graph.Get(id); err != nil {
return false
}
return true
}
// Get returns the image with the given id, or an error if the image doesn't exist.
func (graph *Graph) Get(name string) (*image.Image, error) {
id, err := graph.idIndex.Get(name)
if err != nil {
return nil, fmt.Errorf("could not find image: %v", err)
}
img, err := image.LoadImage(graph.ImageRoot(id))
if err != nil {
return nil, err
}
if img.ID != id {
return nil, fmt.Errorf("Image stored at '%s' has wrong id '%s'", id, img.ID)
}
img.SetGraph(graph)
if img.Size < 0 {
size, err := graph.driver.DiffSize(img.ID, img.Parent)
if err != nil {
return nil, fmt.Errorf("unable to calculate size of image id %q: %s", img.ID, err)
}
img.Size = size
if err := img.SaveSize(graph.ImageRoot(id)); err != nil {
return nil, err
}
}
return img, nil
}
// Create creates a new image and registers it in the graph.
func (graph *Graph) Create(layerData archive.ArchiveReader, containerID, containerImage, comment, author string, containerConfig, config *runconfig.Config) (*image.Image, error) {
img := &image.Image{
ID: stringid.GenerateRandomID(),
Comment: comment,
Created: time.Now().UTC(),
DockerVersion: dockerversion.VERSION,
Author: author,
Config: config,
Architecture: runtime.GOARCH,
OS: runtime.GOOS,
}
if containerID != "" {
img.Parent = containerImage
img.Container = containerID
img.ContainerConfig = *containerConfig
}
if err := graph.Register(img, layerData); err != nil {
return nil, err
}
return img, nil
}
// Register imports a pre-existing image into the graph.
func (graph *Graph) Register(img *image.Image, layerData archive.ArchiveReader) (err error) {
defer func() {
// If any error occurs, remove the new dir from the driver.
// Don't check for errors since the dir might not have been created.
// FIXME: this leaves a possible race condition.
if err != nil {
graph.driver.Remove(img.ID)
}
}()
if err := utils.ValidateID(img.ID); err != nil {
return err
}
// (This is a convenience to save time. Race conditions are taken care of by os.Rename)
if graph.Exists(img.ID) {
return fmt.Errorf("Image %s already exists", img.ID)
}
// Ensure that the image root does not exist on the filesystem
// when it is not registered in the graph.
// This is common when you switch from one graph driver to another
if err := os.RemoveAll(graph.ImageRoot(img.ID)); err != nil && !os.IsNotExist(err) {
return err
}
// If the driver has this ID but the graph doesn't, remove it from the driver to start fresh.
// (the graph is the source of truth).
// Ignore errors, since we don't know if the driver correctly returns ErrNotExist.
// (FIXME: make that mandatory for drivers).
graph.driver.Remove(img.ID)
tmp, err := graph.Mktemp("")
defer os.RemoveAll(tmp)
if err != nil {
return fmt.Errorf("Mktemp failed: %s", err)
}
// Create root filesystem in the driver
if err := graph.driver.Create(img.ID, img.Parent); err != nil {
return fmt.Errorf("Driver %s failed to create image rootfs %s: %s", graph.driver, img.ID, err)
}
// Apply the diff/layer
img.SetGraph(graph)
if err := image.StoreImage(img, layerData, tmp); err != nil {
return err
}
// Commit
if err := os.Rename(tmp, graph.ImageRoot(img.ID)); err != nil {
return err
}
graph.idIndex.Add(img.ID)
return nil
}
// TempLayerArchive creates a temporary archive of the given image's filesystem layer.
// The archive is stored on disk and will be automatically deleted as soon as has been read.
// If output is not nil, a human-readable progress bar will be written to it.
// FIXME: does this belong in Graph? How about MktempFile, let the caller use it for archives?
func (graph *Graph) TempLayerArchive(id string, sf *streamformatter.StreamFormatter, output io.Writer) (*archive.TempArchive, error) {
image, err := graph.Get(id)
if err != nil {
return nil, err
}
tmp, err := graph.Mktemp("")
if err != nil {
return nil, err
}
a, err := image.TarLayer()
if err != nil {
return nil, err
}
progressReader := progressreader.New(progressreader.Config{
In: a,
Out: output,
Formatter: sf,
Size: 0,
NewLines: false,
ID: stringid.TruncateID(id),
Action: "Buffering to disk",
})
defer progressReader.Close()
return archive.NewTempArchive(progressReader, tmp)
}
// Mktemp creates a temporary sub-directory inside the graph's filesystem.
func (graph *Graph) Mktemp(id string) (string, error) {
dir := path.Join(graph.Root, "_tmp", stringid.GenerateRandomID())
if err := os.MkdirAll(dir, 0700); err != nil {
return "", err
}
return dir, nil
}
func (graph *Graph) newTempFile() (*os.File, error) {
tmp, err := graph.Mktemp("")
if err != nil {
return nil, err
}
return ioutil.TempFile(tmp, "")
}
func bufferToFile(f *os.File, src io.Reader) (int64, digest.Digest, error) {
var (
h = sha256.New()
w = gzip.NewWriter(io.MultiWriter(f, h))
)
_, err := io.Copy(w, src)
w.Close()
if err != nil {
return 0, "", err
}
if err = f.Sync(); err != nil {
return 0, "", err
}
n, err := f.Seek(0, os.SEEK_CUR)
if err != nil {
return 0, "", err
}
if _, err := f.Seek(0, 0); err != nil {
return 0, "", err
}
return n, digest.NewDigest("sha256", h), nil
}
// setupInitLayer populates a directory with mountpoints suitable
// for bind-mounting dockerinit into the container. The mountpoint is simply an
// empty file at /.dockerinit
//
// This extra layer is used by all containers as the top-most ro layer. It protects
// the container from unwanted side-effects on the rw layer.
func SetupInitLayer(initLayer string) error {
for pth, typ := range map[string]string{
"/dev/pts": "dir",
"/dev/shm": "dir",
"/proc": "dir",
"/sys": "dir",
"/.dockerinit": "file",
"/.dockerenv": "file",
"/etc/resolv.conf": "file",
"/etc/hosts": "file",
"/etc/hostname": "file",
"/dev/console": "file",
"/etc/mtab": "/proc/mounts",
} {
parts := strings.Split(pth, "/")
prev := "/"
for _, p := range parts[1:] {
prev = path.Join(prev, p)
syscall.Unlink(path.Join(initLayer, prev))
}
if _, err := os.Stat(path.Join(initLayer, pth)); err != nil {
if os.IsNotExist(err) {
if err := os.MkdirAll(path.Join(initLayer, path.Dir(pth)), 0755); err != nil {
return err
}
switch typ {
case "dir":
if err := os.MkdirAll(path.Join(initLayer, pth), 0755); err != nil {
return err
}
case "file":
f, err := os.OpenFile(path.Join(initLayer, pth), os.O_CREATE, 0755)
if err != nil {
return err
}
f.Close()
default:
if err := os.Symlink(typ, path.Join(initLayer, pth)); err != nil {
return err
}
}
} else {
return err
}
}
}
// Layer is ready to use, if it wasn't before.
return nil
}
// Check if given error is "not empty".
// Note: this is the way golang does it internally with os.IsNotExists.
func isNotEmpty(err error) bool {
switch pe := err.(type) {
case nil:
return false
case *os.PathError:
err = pe.Err
case *os.LinkError:
err = pe.Err
}
return strings.Contains(err.Error(), " not empty")
}
// Delete atomically removes an image from the graph.
func (graph *Graph) Delete(name string) error {
id, err := graph.idIndex.Get(name)
if err != nil {
return err
}
tmp, err := graph.Mktemp("")
graph.idIndex.Delete(id)
if err == nil {
err = os.Rename(graph.ImageRoot(id), tmp)
// On err make tmp point to old dir and cleanup unused tmp dir
if err != nil {
os.RemoveAll(tmp)
tmp = graph.ImageRoot(id)
}
} else {
// On err make tmp point to old dir for cleanup
tmp = graph.ImageRoot(id)
}
// Remove rootfs data from the driver
graph.driver.Remove(id)
// Remove the trashed image directory
return os.RemoveAll(tmp)
}
// Map returns a list of all images in the graph, addressable by ID.
func (graph *Graph) Map() (map[string]*image.Image, error) {
images := make(map[string]*image.Image)
err := graph.walkAll(func(image *image.Image) {
images[image.ID] = image
})
if err != nil {
return nil, err
}
return images, nil
}
// walkAll iterates over each image in the graph, and passes it to a handler.
// The walking order is undetermined.
func (graph *Graph) walkAll(handler func(*image.Image)) error {
files, err := ioutil.ReadDir(graph.Root)
if err != nil {
return err
}
for _, st := range files {
if img, err := graph.Get(st.Name()); err != nil {
// Skip image
continue
} else if handler != nil {
handler(img)
}
}
return nil
}
// ByParent returns a lookup table of images by their parent.
// If an image of id ID has 3 children images, then the value for key ID
// will be a list of 3 images.
// If an image has no children, it will not have an entry in the table.
func (graph *Graph) ByParent() (map[string][]*image.Image, error) {
byParent := make(map[string][]*image.Image)
err := graph.walkAll(func(img *image.Image) {
parent, err := graph.Get(img.Parent)
if err != nil {<|fim▁hole|> } else {
byParent[parent.ID] = []*image.Image{img}
}
})
return byParent, err
}
// Heads returns all heads in the graph, keyed by id.
// A head is an image which is not the parent of another image in the graph.
func (graph *Graph) Heads() (map[string]*image.Image, error) {
heads := make(map[string]*image.Image)
byParent, err := graph.ByParent()
if err != nil {
return nil, err
}
err = graph.walkAll(func(image *image.Image) {
// If it's not in the byParent lookup table, then
// it's not a parent -> so it's a head!
if _, exists := byParent[image.ID]; !exists {
heads[image.ID] = image
}
})
return heads, err
}
func (graph *Graph) ImageRoot(id string) string {
return path.Join(graph.Root, id)
}
func (graph *Graph) Driver() graphdriver.Driver {
return graph.driver
}<|fim▁end|> | return
}
if children, exists := byParent[parent.ID]; exists {
byParent[parent.ID] = append(children, img) |
<|file_name|>ODataMetaModel.qunit.js<|end_file_name|><|fim▁begin|>/*!
* ${copyright}
*/
sap.ui.require([
"jquery.sap.global",
"sap/ui/base/SyncPromise",
"sap/ui/model/BindingMode",
"sap/ui/model/ChangeReason",
"sap/ui/model/ClientListBinding",
"sap/ui/model/Context",
"sap/ui/model/ContextBinding",
"sap/ui/model/Filter",
"sap/ui/model/MetaModel",
"sap/ui/model/PropertyBinding",
"sap/ui/model/Sorter",
"sap/ui/model/odata/OperationMode",
"sap/ui/model/odata/type/Int64",
"sap/ui/model/odata/type/Raw",
"sap/ui/model/odata/v4/AnnotationHelper",
"sap/ui/model/odata/v4/Context",
"sap/ui/model/odata/v4/lib/_Helper",
"sap/ui/model/odata/v4/ODataMetaModel",
"sap/ui/model/odata/v4/ODataModel",
"sap/ui/model/odata/v4/ValueListType",
"sap/ui/test/TestUtils",
"sap/ui/thirdparty/URI"
], function (jQuery, SyncPromise, BindingMode, ChangeReason, ClientListBinding, BaseContext,
ContextBinding, Filter, MetaModel, PropertyBinding, Sorter, OperationMode, Int64, Raw,
AnnotationHelper, Context, _Helper, ODataMetaModel, ODataModel, ValueListType, TestUtils,
URI) {
/*global QUnit, sinon */
/*eslint max-nested-callbacks: 0, no-loop-func: 0, no-warning-comments: 0 */
"use strict";
// Common := com.sap.vocabularies.Common.v1
// tea_busi := com.sap.gateway.default.iwbep.tea_busi.v0001
// tea_busi_product.v0001 := com.sap.gateway.default.iwbep.tea_busi_product.v0001
// tea_busi_supplier.v0001 := com.sap.gateway.default.iwbep.tea_busi_supplier.v0001
// UI := com.sap.vocabularies.UI.v1
var mMostlyEmptyScope = {
"$EntityContainer" : "empty.DefaultContainer",
"$Version" : "4.0",
"empty." : {
"$kind" : "Schema"
},
"empty.DefaultContainer" : {
"$kind" : "EntityContainer"
}
},
sODataMetaModel = "sap.ui.model.odata.v4.ODataMetaModel",
mProductScope = {
"$EntityContainer" : "tea_busi_product.v0001.DefaultContainer",
"$Reference" : {
"../../../../default/iwbep/tea_busi_supplier/0001/$metadata" : {
"$Include" : [
"tea_busi_supplier.v0001."
]
}
},
"$Version" : "4.0",
"tea_busi_product.v0001." : {
"$kind" : "Schema",
"$Annotations" : { // Note: simulate result of _MetadataRequestor#read
"tea_busi_product.v0001.Category/CategoryName" : {
"@Common.Label" : "CategoryName from tea_busi_product.v0001."
}
}
},
"tea_busi_product.v0001.Category" : {
"$kind" : "EntityType",
"CategoryName" : {
"$kind" : "Property",
"$Type" : "Edm.String"
}
},
"tea_busi_product.v0001.DefaultContainer" : {
"$kind" : "EntityContainer"
},
"tea_busi_product.v0001.Product" : {
"$kind" : "EntityType",
"Name" : {
"$kind" : "Property",
"$Type" : "Edm.String"
},
"PRODUCT_2_CATEGORY" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi_product.v0001.Category"
},
"PRODUCT_2_SUPPLIER" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi_supplier.v0001.Supplier"
}
}
},
sSampleServiceUrl
= "/sap/opu/odata4/sap/zui5_testv4/default/sap/zui5_epm_sample/0002/",
mScope = {
"$Annotations" : {
"name.space.Id" : {
"@Common.Label" : "ID"
},
"tea_busi.DefaultContainer" : {
"@DefaultContainer" : {}
},
"tea_busi.DefaultContainer/T€AMS" : {
"@T€AMS" : {}
},
"tea_busi.TEAM" : {
"@Common.Text" : {
"$Path" : "Name"
},
"@[email protected]" : {
"$EnumMember" : "UI.TextArrangementType/TextLast"
},
"@UI.Badge" : {
"@Common.Label" : "Label inside",
"$Type" : "UI.BadgeType",
"HeadLine" : {
"$Type" : "UI.DataField",
"Value" : {
"$Path" : "Name"
}
},
"Title" : {
"$Type" : "UI.DataField",
"Value" : {
"$Path" : "Team_Id"
}
}
},
"@[email protected]" : "Best Badge Ever!",
"@UI.LineItem" : [{
"@UI.Importance" : {
"$EnumMember" : "UI.ImportanceType/High"
},
"$Type" : "UI.DataField",
"Label" : "Team ID",
"[email protected]" : "Team ID's Label",
"Value" : {
"$Path" : "Team_Id"
}
}]
},
"tea_busi.TEAM/Team_Id" : {
"@Common.Label" : "Team ID",
"@Common.Text" : {
"$Path" : "Name"
},
"@[email protected]" : {
"$EnumMember" : "UI.TextArrangementType/TextLast"
}
},
"tea_busi.Worker" : {
"@UI.Facets" : [{
"$Type" : "UI.ReferenceFacet",
"Target" : {
// term cast
"$AnnotationPath" : "@UI.LineItem"
}
}, {
"$Type" : "UI.ReferenceFacet",
"Target" : {
// term cast at navigation property itself
"$AnnotationPath" : "[email protected]"
}
}, {
"$Type" : "UI.ReferenceFacet",
"Target" : {
// navigation property and term cast
"$AnnotationPath" : "EMPLOYEE_2_TEAM/@UI.LineItem"
}
}, {
"$Type" : "UI.ReferenceFacet",
"Target" : {
// type cast, navigation properties and term cast (at its type)
"$AnnotationPath"
: "tea_busi.TEAM/TEAM_2_EMPLOYEES/EMPLOYEE_2_TEAM/@UI.LineItem"
}
}],
"@UI.LineItem" : [{
"$Type" : "UI.DataField",
"Label" : "Team ID",
"Value" : {
"$Path" : "EMPLOYEE_2_TEAM/Team_Id"
}
}]
},
"tea_busi.Worker/EMPLOYEE_2_TEAM" : {
"@Common.Label" : "Employee's Team"
}
},
"$EntityContainer" : "tea_busi.DefaultContainer",
"empty." : {
"$kind" : "Schema"
},
"name.space." : {
"$kind" : "Schema"
},
"tea_busi." : {
"$kind" : "Schema",
"@Schema" : {}
},
"empty.Container" : {
"$kind" : "EntityContainer"
},
"name.space.BadContainer" : {
"$kind" : "EntityContainer",
"DanglingActionImport" : {
"$kind" : "ActionImport",
"$Action" : "not.Found"
},
"DanglingFunctionImport" : {
"$kind" : "FunctionImport",
"$Function" : "not.Found"
}
},
"name.space.Broken" : {
"$kind" : "Term",
"$Type" : "not.Found"
},
"name.space.BrokenFunction" : [{
"$kind" : "Function",
"$ReturnType" : {
"$Type" : "not.Found"
}
}],
"name.space.BrokenOverloads" : [{
"$kind" : "Operation"
}],
"name.space.DerivedPrimitiveFunction" : [{
"$kind" : "Function",
"$ReturnType" : {
"$Type" : "name.space.Id"
}
}],
"name.space.EmptyOverloads" : [],
"name.space.Id" : {
"$kind" : "TypeDefinition",
"$UnderlyingType" : "Edm.String",
"$MaxLength" : 10
},
"name.space.Term" : { // only case with a qualified name and a $Type
"$kind" : "Term",
"$Type" : "tea_busi.Worker"
},
"name.space.OverloadedAction" : [{
"$kind" : "Action",
"$IsBound" : true,
"$Parameter" : [{
// "$Name" : "_it",
"$Type" : "tea_busi.EQUIPMENT"
}],
"$ReturnType" : {
"$Type" : "tea_busi.EQUIPMENT"
}
}, {
"$kind" : "Action",
"$IsBound" : true,
"$Parameter" : [{
// "$Name" : "_it",
"$Type" : "tea_busi.TEAM"
}],
"$ReturnType" : {
"$Type" : "tea_busi.TEAM"
}
}, { // "An unbound action MAY have the same name as a bound action."
"$kind" : "Action",
"$ReturnType" : {
"$Type" : "tea_busi.ComplexType_Salary"
}
}, {
"$kind" : "Action",
"$IsBound" : true,
"$Parameter" : [{
// "$Name" : "_it",
"$Type" : "tea_busi.Worker"
}],
"$ReturnType" : {
"$Type" : "tea_busi.Worker"
}
}],
"name.space.OverloadedFunction" : [{
"$kind" : "Function",
"$ReturnType" : {
"$Type" : "Edm.String"
}
}, {
"$kind" : "Function",
"$ReturnType" : {
"$Type" : "Edm.String"
}
}],
"name.space.VoidAction" : [{
"$kind" : "Action"
}],
"tea_busi.AcChangeManagerOfTeam" : [{
"$kind" : "Action",
"$ReturnType" : {
"$Type" : "tea_busi.TEAM",
"@Common.Label" : "Hail to the Chief"
}
}],
"tea_busi.ComplexType_Salary" : {
"$kind" : "ComplexType",
"AMOUNT" : {
"$kind" : "Property",
"$Type" : "Edm.Decimal"
},
"CURRENCY" : {
"$kind" : "Property",
"$Type" : "Edm.String"
}
},
"tea_busi.ContainedC" : {
"$kind" : "EntityType",
"$Key" : ["Id"],
"Id" : {
"$kind" : "Property",
"$Type" : "Edm.String"
},
"C_2_EMPLOYEE" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.Worker"
},
"C_2_S" : {
"$ContainsTarget" : true,
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.ContainedS"
}
},
"tea_busi.ContainedS" : {
"$kind" : "EntityType",
"$Key" : ["Id"],
"Id" : {
"$kind" : "Property",
"$Type" : "Edm.String"
},
"S_2_C" : {
"$ContainsTarget" : true,
"$kind" : "NavigationProperty",
"$isCollection" : true,
"$Type" : "tea_busi.ContainedC"
},
"S_2_EMPLOYEE" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.Worker"
}
},
"tea_busi.DefaultContainer" : {
"$kind" : "EntityContainer",
"ChangeManagerOfTeam" : {
"$kind" : "ActionImport",
"$Action" : "tea_busi.AcChangeManagerOfTeam"
},
"EMPLOYEES" : {
"$kind" : "EntitySet",
"$NavigationPropertyBinding" : {
"EMPLOYEE_2_TEAM" : "T€AMS",
"EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS"
},
"$Type" : "tea_busi.Worker"
},
"EQUIPM€NTS" : {
"$kind" : "EntitySet",
"$Type" : "tea_busi.EQUIPMENT"
},
"GetEmployeeMaxAge" : {
"$kind" : "FunctionImport",
"$Function" : "tea_busi.FuGetEmployeeMaxAge"
},
"Me" : {
"$kind" : "Singleton",
"$NavigationPropertyBinding" : {
"EMPLOYEE_2_TEAM" : "T€AMS",
"EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS"
},
"$Type" : "tea_busi.Worker"
},
"OverloadedAction" : {
"$kind" : "ActionImport",
"$Action" : "name.space.OverloadedAction"
},
"TEAMS" : {
"$kind" : "EntitySet",
"$NavigationPropertyBinding" : {
"TEAM_2_EMPLOYEES" : "EMPLOYEES",
"TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "EMPLOYEES"
},
"$Type" : "tea_busi.TEAM"
},
"T€AMS" : {
"$kind" : "EntitySet",
"$NavigationPropertyBinding" : {
"TEAM_2_EMPLOYEES" : "EMPLOYEES"
},
"$Type" : "tea_busi.TEAM"
},
"VoidAction" : {
"$kind" : "ActionImport",
"$Action" : "name.space.VoidAction"
}
},
"tea_busi.EQUIPMENT" : {
"$kind" : "EntityType",
"$Key" : ["ID"],
"ID" : {
"$kind" : "Property",
"$Type" : "Edm.Int32",
"$Nullable" : false
}
},
"tea_busi.FuGetEmployeeMaxAge" : [{
"$kind" : "Function",
"$ReturnType" : {
"$Type" : "Edm.Int16"
}
}],
"tea_busi.TEAM" : {
"$kind" : "EntityType",
"$Key" : ["Team_Id"],
"Team_Id" : {
"$kind" : "Property",
"$Type" : "name.space.Id",
"$Nullable" : false,
"$MaxLength" : 10
},
"Name" : {
"$kind" : "Property",
"$Type" : "Edm.String",
"$Nullable" : false,
"$MaxLength" : 40
},
"TEAM_2_EMPLOYEES" : {
"$kind" : "NavigationProperty",
"$isCollection" : true,
"$OnDelete" : "None",
"[email protected]" : "None of my business",
"$ReferentialConstraint" : {
"foo" : "bar",
"[email protected]" : "Just a Gigolo"
},
"$Type" : "tea_busi.Worker"
},
"TEAM_2_CONTAINED_S" : {
"$ContainsTarget" : true,
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.ContainedS"
},
"TEAM_2_CONTAINED_C" : {
"$ContainsTarget" : true,
"$kind" : "NavigationProperty",
"$isCollection" : true,
"$Type" : "tea_busi.ContainedC"
},
// Note: "value" is a symbolic name for an operation's return type iff. it is
// primitive
"value" : {
"$kind" : "Property",
"$Type" : "Edm.String"
}
},
"tea_busi.Worker" : {
"$kind" : "EntityType",
"$Key" : ["ID"],
"ID" : {
"$kind" : "Property",
"$Type" : "Edm.String",
"$Nullable" : false,
"$MaxLength" : 4
},
"AGE" : {
"$kind" : "Property",
"$Type" : "Edm.Int16",
"$Nullable" : false
},
"EMPLOYEE_2_CONTAINED_S" : {
"$ContainsTarget" : true,
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.ContainedS"
},
"EMPLOYEE_2_EQUIPM€NTS" : {
"$kind" : "NavigationProperty",
"$isCollection" : true,
"$Type" : "tea_busi.EQUIPMENT",
"$Nullable" : false
},
"EMPLOYEE_2_TEAM" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi.TEAM",
"$Nullable" : false
},
"SALÃRY" : {
"$kind" : "Property",
"$Type" : "tea_busi.ComplexType_Salary"
}
},
"$$Loop" : "$$Loop/", // some endless loop
"$$Term" : "name.space.Term" // replacement for any reference to the term
},
oContainerData = mScope["tea_busi.DefaultContainer"],
aOverloadedAction = mScope["name.space.OverloadedAction"],
mSupplierScope = {
"$Version" : "4.0",
"tea_busi_supplier.v0001." : {
"$kind" : "Schema"
},
"tea_busi_supplier.v0001.Supplier" : {
"$kind" : "EntityType",
"Supplier_Name" : {
"$kind" : "Property",
"$Type" : "Edm.String"
}
}
},
oTeamData = mScope["tea_busi.TEAM"],
oTeamLineItem = mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"],
oWorkerData = mScope["tea_busi.Worker"],
mXServiceScope = {
"$Version" : "4.0",
"$Annotations" : {}, // simulate ODataMetaModel#_mergeAnnotations
"$EntityContainer" : "tea_busi.v0001.DefaultContainer",
"$Reference" : {
// Note: Do not reference tea_busi_supplier directly from here! We want to test the
// special case that it is only indirectly referenced.
"../../../../default/iwbep/tea_busi_foo/0001/$metadata" : {
"$Include" : [
"tea_busi_foo.v0001."
]
},
"../../../../default/iwbep/tea_busi_product/0001/$metadata" : {
"$Include" : [
"ignore.me.",
"tea_busi_product.v0001."
]
},
"/empty/$metadata" : {
"$Include" : [
"empty.",
"I.still.haven't.found.what.I'm.looking.for."
]
}
},
"tea_busi.v0001." : {
"$kind" : "Schema"
},
"tea_busi.v0001.DefaultContainer" : {
"$kind" : "EntityContainer",
"EQUIPM€NTS" : {
"$kind" : "EntitySet",
"$Type" : "tea_busi.v0001.EQUIPMENT"
}
},
"tea_busi.v0001.EQUIPMENT" : {
"$kind" : "EntityType",
"EQUIPMENT_2_PRODUCT" : {
"$kind" : "NavigationProperty",
"$Type" : "tea_busi_product.v0001.Product"
}
}
},
aAllScopes = [
mMostlyEmptyScope,
mProductScope,
mScope,
mSupplierScope,
mXServiceScope
];
/**
* Checks the "get*" and "request*" methods corresponding to the named "fetch*" method,
* using the given arguments.
*
* @param {object} oTestContext
* the QUnit "this" object
* @param {object} assert
* the QUnit "assert" object
* @param {string} sMethodName
* method name "fetch*"
* @param {object[]} aArguments
* method arguments
* @param {boolean} [bThrow=false]
* whether the "get*" method throws if the promise is not fulfilled
* @returns {Promise}
* the "request*" method's promise
*/
function checkGetAndRequest(oTestContext, assert, sMethodName, aArguments, bThrow) {
var oExpectation,
sGetMethodName = sMethodName.replace("fetch", "get"),
oMetaModel = oTestContext.oMetaModel,
oReason = new Error("rejected"),
oRejectedPromise = Promise.reject(oReason),
sRequestMethodName = sMethodName.replace("fetch", "request"),
oResult = {},
oSyncPromise = SyncPromise.resolve(oRejectedPromise);
// resolve...
oExpectation = oTestContext.mock(oMetaModel).expects(sMethodName).exactly(4);
oExpectation = oExpectation.withExactArgs.apply(oExpectation, aArguments);
oExpectation.returns(SyncPromise.resolve(oResult));
// get: fulfilled
assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), oResult);
// reject...
oExpectation.returns(oSyncPromise);
oTestContext.mock(Promise).expects("resolve")
.withExactArgs(sinon.match.same(oSyncPromise))
.returns(oRejectedPromise); // return any promise (this is not unwrapping!)
// request (promise still pending!)
assert.strictEqual(oMetaModel[sRequestMethodName].apply(oMetaModel, aArguments),
oRejectedPromise);
// get: pending
if (bThrow) {
assert.throws(function () {
oMetaModel[sGetMethodName].apply(oMetaModel, aArguments);
}, new Error("Result pending"));
} else {
assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), undefined,
"pending");
}
return oSyncPromise.catch(function () {
// get: rejected
if (bThrow) {
assert.throws(function () {
oMetaModel[sGetMethodName].apply(oMetaModel, aArguments);
}, oReason);
} else {
assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments),
undefined, "rejected");
}
});
}
/**
* Returns a clone, that is a deep copy, of the given object.
*
* @param {object} o
* any serializable object
* @returns {object}
* a deep copy of <code>o</code>
*/
function clone(o) {
return JSON.parse(JSON.stringify(o));
}
/**
* Runs the given test for each name/value pair in the given fixture. The name is interpreted
* as a path "[<sContextPath>'|']<sMetaPath>" and cut accordingly. The test is called with
* an almost resolved sPath (just '|' replaced by '/').
*
* @param {object} mFixture
* map<string, any>
* @param {function} fnTest
* function(string sPath, any vResult, string sContextPath, string sMetaPath)
*/
function forEach(mFixture, fnTest) {
var sPath;
for (sPath in mFixture) {
var i = sPath.indexOf("|"),
sContextPath = "",
sMetaPath = sPath.slice(i + 1),
vValue = mFixture[sPath];
if (i >= 0) {
sContextPath = sPath.slice(0, i);
sPath = sContextPath + "/" + sMetaPath;
}
fnTest(sPath, vValue, sContextPath, sMetaPath);
}
}
//*********************************************************************************************
QUnit.module("sap.ui.model.odata.v4.ODataMetaModel", {
// remember copy to ensure test isolation
mOriginalScopes : clone(aAllScopes),
afterEach : function (assert) {
assert.deepEqual(aAllScopes, this.mOriginalScopes, "metadata unchanged");
},
/*
* Allow warnings if told to; always suppress debug messages.
*/
allowWarnings : function (assert, bWarn) {
this.mock(jQuery.sap.log).expects("isLoggable").atLeast(1)
.withExactArgs(sinon.match.number, sODataMetaModel)
.callsFake(function (iLogLevel) {
switch (iLogLevel) {
case jQuery.sap.log.Level.DEBUG:
return false;
case jQuery.sap.log.Level.WARNING:
return bWarn;
default:
return true;
}
});
},
beforeEach : function () {
var oMetadataRequestor = {
read : function () { throw new Error(); }
},
sUrl = "/a/b/c/d/e/$metadata";
this.oLogMock = this.mock(jQuery.sap.log);
this.oLogMock.expects("warning").never();
this.oLogMock.expects("error").never();
this.oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl);
this.oMetaModelMock = this.mock(this.oMetaModel);
this.oModel = {
reportError : function () {
throw new Error("Unsupported operation");
},
resolve : ODataModel.prototype.resolve
};
},
/*
* Expect the given debug message with the given path, but only if debug level is on.
*/
expectDebug : function (bDebug, sMessage, sPath) {
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug);
this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0)
.withExactArgs(sMessage, sPath, sODataMetaModel);
},
/*
* Expects "fetchEntityContainer" to be called at least once on the current meta model,
* returning a clone of the given scope.
*
* @param {object} mScope
*/
expectFetchEntityContainer : function (mScope) {
mScope = clone(mScope);
this.oMetaModel.validate("n/a", mScope); // fill mSchema2MetadataUrl!
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(mScope));
}
});
//*********************************************************************************************
QUnit.test("basics", function (assert) {
var sAnnotationUri = "my/annotation.xml",
aAnnotationUris = [ sAnnotationUri, "uri2.xml"],
oModel = {},
oMetadataRequestor = this.oMetaModel.oRequestor,
sUrl = "/~/$metadata",
oMetaModel;
// code under test
assert.strictEqual(ODataMetaModel.prototype.$$valueAsPromise, true);
// code under test
oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl);
assert.ok(oMetaModel instanceof MetaModel);
assert.strictEqual(oMetaModel.aAnnotationUris, undefined);
assert.ok(oMetaModel.hasOwnProperty("aAnnotationUris"), "own property aAnnotationUris");
assert.strictEqual(oMetaModel.oRequestor, oMetadataRequestor);
assert.strictEqual(oMetaModel.sUrl, sUrl);
assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneTime);
assert.strictEqual(oMetaModel.toString(),
"sap.ui.model.odata.v4.ODataMetaModel: /~/$metadata");
// code under test
oMetaModel.setDefaultBindingMode(BindingMode.OneWay);
assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneWay);
// code under test
oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, aAnnotationUris);
assert.strictEqual(oMetaModel.aAnnotationUris, aAnnotationUris, "arrays are passed");
// code under test
oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, sAnnotationUri);
assert.deepEqual(oMetaModel.aAnnotationUris, [sAnnotationUri],
"single annotation is wrapped");
// code under test
oMetaModel = new ODataMetaModel(null, null, null, oModel);
// code under test
assert.strictEqual(oMetaModel.getAdapterFactoryModulePath(),
"sap/ui/model/odata/v4/meta/ODataAdapterFactory");
});
//*********************************************************************************************
QUnit.test("forbidden", function (assert) {
assert.throws(function () { //TODO implement
this.oMetaModel.bindTree();
}, new Error("Unsupported operation: v4.ODataMetaModel#bindTree"));
assert.throws(function () {
this.oMetaModel.getOriginalProperty();
}, new Error("Unsupported operation: v4.ODataMetaModel#getOriginalProperty"));
assert.throws(function () { //TODO implement
this.oMetaModel.isList();
}, new Error("Unsupported operation: v4.ODataMetaModel#isList"));
assert.throws(function () {
this.oMetaModel.refresh();
}, new Error("Unsupported operation: v4.ODataMetaModel#refresh"));
assert.throws(function () {
this.oMetaModel.setLegacySyntax(); // argument does not matter!
}, new Error("Unsupported operation: v4.ODataMetaModel#setLegacySyntax"));
assert.throws(function () {
this.oMetaModel.setDefaultBindingMode(BindingMode.TwoWay);
});
});
//*********************************************************************************************
[
undefined,
["/my/annotation.xml"],
["/my/annotation.xml", "/another/annotation.xml"]
].forEach(function (aAnnotationURI) {
var title = "fetchEntityContainer - " + JSON.stringify(aAnnotationURI);
QUnit.test(title, function (assert) {
var oRequestorMock = this.mock(this.oMetaModel.oRequestor),
aReadResults,
mRootScope = {},
oSyncPromise,
that = this;
function expectReads(bPrefetch) {
oRequestorMock.expects("read")
.withExactArgs(that.oMetaModel.sUrl, false, bPrefetch)
.returns(Promise.resolve(mRootScope));
aReadResults = [];
(aAnnotationURI || []).forEach(function (sAnnotationUrl) {
var oAnnotationResult = {};
aReadResults.push(oAnnotationResult);
oRequestorMock.expects("read")
.withExactArgs(sAnnotationUrl, true, bPrefetch)
.returns(Promise.resolve(oAnnotationResult));
});
}
this.oMetaModel.aAnnotationUris = aAnnotationURI;
this.oMetaModelMock.expects("_mergeAnnotations").never();
expectReads(true);
// code under test
assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null);
// bPrefetch => no caching
expectReads(true);
// code under test
assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null);
// now test [bPrefetch=false]
expectReads();
this.oMetaModelMock.expects("_mergeAnnotations")
.withExactArgs(mRootScope, aReadResults);
// code under test
oSyncPromise = this.oMetaModel.fetchEntityContainer();
// pending
assert.strictEqual(oSyncPromise.isPending(), true);
// already caching
assert.strictEqual(this.oMetaModel.fetchEntityContainer(), oSyncPromise);
assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), oSyncPromise,
"now bPrefetch makes no difference");
return oSyncPromise.then(function (mRootScope0) {
assert.strictEqual(mRootScope0, mRootScope);
// still caching
assert.strictEqual(that.oMetaModel.fetchEntityContainer(), oSyncPromise);
});
});
});
//TODO later support "$Extends" : "<13.1.2 EntityContainer Extends>"
//*********************************************************************************************
QUnit.test("fetchEntityContainer: _mergeAnnotations fails", function (assert) {
var oError = new Error();
this.mock(this.oMetaModel.oRequestor).expects("read")
.withExactArgs(this.oMetaModel.sUrl, false, undefined)
.returns(Promise.resolve({}));
this.oMetaModelMock.expects("_mergeAnnotations").throws(oError);
return this.oMetaModel.fetchEntityContainer().then(function () {
assert.ok(false, "unexpected success");
}, function (oError0) {
assert.strictEqual(oError0, oError);
});
});
//*********************************************************************************************
QUnit.test("getMetaContext", function (assert) {
var oMetaContext;
this.oMetaModelMock.expects("getMetaPath")
.withExactArgs("/Foo/-1/bar")
.returns("/Foo/bar");
// code under test
oMetaContext = this.oMetaModel.getMetaContext("/Foo/-1/bar");
assert.strictEqual(oMetaContext.getModel(), this.oMetaModel);
assert.strictEqual(oMetaContext.getPath(), "/Foo/bar");
});
//*********************************************************************************************
QUnit.test("getMetaPath", function (assert) {
var sMetaPath = {},
sPath = {};
this.mock(_Helper).expects("getMetaPath")
.withExactArgs(sinon.match.same(sPath)).returns(sMetaPath);
assert.strictEqual(this.oMetaModel.getMetaPath(sPath), sMetaPath);
});
//*********************************************************************************************
forEach({
// absolute path
"/" : "/",
"/foo/bar|/" : "/", // context is ignored
// relative path
"" : undefined, // w/o context --> important for MetaModel#createBindingContext etc.
"|foo/bar" : undefined, // w/o context
"/|" : "/",
"/|foo/bar" : "/foo/bar",
"/foo|bar" : "/foo/bar",
"/foo/bar|" : "/foo/bar",
"/foo/|bar" : "/foo/bar",
// trailing slash is preserved
"/foo/bar/" : "/foo/bar/",
"/foo|bar/" : "/foo/bar/",
// relative path that starts with a dot
"/foo/bar|./" : "/foo/bar/",
"/foo|./bar/" : "/foo/bar/",
"/foo/|./bar/" : "/foo/bar/",
// annotations
"/foo|@bar" : "/foo@bar",
"/foo/|@bar" : "/foo/@bar",
"/foo|./@bar" : "/foo/@bar",
"/foo/|./@bar" : "/foo/@bar",
// technical properties
"/foo|$kind" : "/foo/$kind",
"/foo/|$kind" : "/foo/$kind",
"/foo|./$kind" : "/foo/$kind",
"/foo/|./$kind" : "/foo/$kind"
}, function (sPath, sResolvedPath, sContextPath, sMetaPath) {
QUnit.test("resolve: " + sContextPath + " > " + sMetaPath, function (assert) {
var oContext = sContextPath && this.oMetaModel.getContext(sContextPath);
assert.strictEqual(this.oMetaModel.resolve(sMetaPath, oContext), sResolvedPath);
});
});
//TODO make sure that Context objects are only created for absolute paths?!
//*********************************************************************************************
[".bar", ".@bar", ".$kind"].forEach(function (sPath) {
QUnit.test("resolve: unsupported relative path " + sPath, function (assert) {
var oContext = this.oMetaModel.getContext("/foo");
assert.raises(function () {
this.oMetaModel.resolve(sPath, oContext);
}, new Error("Unsupported relative path: " + sPath));
});
});
//*********************************************************************************************
QUnit.test("resolve: undefined", function (assert) {
assert.strictEqual(
this.oMetaModel.resolve(undefined, this.oMetaModel.getContext("/")),
"/");
});
//*********************************************************************************************
//TODO better map meta model path to pure JSON path (look up inside JsonModel)?
// what about @sapui.name then, which requires a literal as expected result?
// --> we could distinguish "/<path>" from "<literal>"
forEach({
// "JSON" drill-down ----------------------------------------------------------------------
"/$EntityContainer" : "tea_busi.DefaultContainer",
"/tea_busi./$kind" : "Schema",
"/tea_busi.DefaultContainer/$kind" : "EntityContainer",
// trailing slash: object vs. name --------------------------------------------------------
"/" : oContainerData,
"/$EntityContainer/" : oContainerData,
"/T€AMS/" : oTeamData,
"/T€AMS/$Type/" : oTeamData,
// scope lookup ("17.3 QualifiedName") ----------------------------------------------------
"/$EntityContainer/$kind" : "EntityContainer",
"/$EntityContainer/T€AMS/$Type" : "tea_busi.TEAM",
"/$EntityContainer/T€AMS/$Type/Team_Id" : oTeamData.Team_Id,
// "17.3 QualifiedName", e.g. type cast ---------------------------------------------------
"/tea_busi." : mScope["tea_busi."], // access to schema
"/tea_busi.DefaultContainer/EMPLOYEES/tea_busi.Worker/AGE" : oWorkerData.AGE,
// implicit $Type insertion ---------------------------------------------------------------
"/T€AMS/Team_Id" : oTeamData.Team_Id,
"/T€AMS/TEAM_2_EMPLOYEES" : oTeamData.TEAM_2_EMPLOYEES,
"/T€AMS/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE,
// scope lookup, then implicit $Type insertion!
"/$$Term/AGE" : oWorkerData.AGE,
// "17.2 SimpleIdentifier": lookup inside current schema child ----------------------------
"/T€AMS" : oContainerData["T€AMS"],
"/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/" : oWorkerData,
"/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/$Type" : "tea_busi.Worker",
"/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE,
// operations -----------------------------------------------------------------------------
"/OverloadedAction" : oContainerData["OverloadedAction"],
"/OverloadedAction/$Action" : "name.space.OverloadedAction",
"/ChangeManagerOfTeam/" : oTeamData,
//TODO mScope[mScope["..."][0].$ReturnType.$Type] is where the next OData simple identifier
// would live in case of entity/complex type, but we would like to avoid warnings for
// primitive types - how to tell the difference?
// "/GetEmployeeMaxAge/" : "Edm.Int16",
// Note: "value" is a symbolic name for the whole return type iff. it is primitive
"/GetEmployeeMaxAge/value" : mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType,
"/GetEmployeeMaxAge/value/$Type" : "Edm.Int16", // path may continue!
"/tea_busi.FuGetEmployeeMaxAge/value"
: mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType,
"/name.space.DerivedPrimitiveFunction/value"
//TODO merge facets of return type and type definition?!
: mScope["name.space.DerivedPrimitiveFunction"][0].$ReturnType,
"/ChangeManagerOfTeam/value" : oTeamData.value,
// action overloads -----------------------------------------------------------------------
//TODO @$ui5.overload: support for split segments? etc.
"/OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([aOverloadedAction[2]]),
"/OverloadedAction/@$ui5.overload/0" : aOverloadedAction[2],
// Note: trailing slash does not make a difference in "JSON" drill-down
"/OverloadedAction/@$ui5.overload/0/$ReturnType/" : aOverloadedAction[2].$ReturnType,
"/OverloadedAction/@$ui5.overload/0/$ReturnType/$Type" : "tea_busi.ComplexType_Salary",
"/OverloadedAction/" : mScope["tea_busi.ComplexType_Salary"],
"/name.space.OverloadedAction" : aOverloadedAction,
"/T€AMS/NotFound/name.space.OverloadedAction" : aOverloadedAction,
"/name.space.OverloadedAction/1" : aOverloadedAction[1],
"/OverloadedAction/$Action/1" : aOverloadedAction[1],
"/OverloadedAction/@$ui5.overload/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT,
"/OverloadedAction/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT,
"/T€AMS/name.space.OverloadedAction/Team_Id" : oTeamData.Team_Id,
"/T€AMS/name.space.OverloadedAction/@$ui5.overload"
: sinon.match.array.deepEquals([aOverloadedAction[1]]),
"/name.space.OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([]),
// only "Action" and "Function" is expected as $kind, but others are not filtered out!
"/name.space.BrokenOverloads"
: sinon.match.array.deepEquals(mScope["name.space.BrokenOverloads"]),
// annotations ----------------------------------------------------------------------------
"/@DefaultContainer"
: mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"],
"/tea_busi.DefaultContainer@DefaultContainer"
: mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"],
"/tea_busi.DefaultContainer/@DefaultContainer" // w/o $Type, slash makes no difference!
: mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"],
"/$EntityContainer@DefaultContainer" // Note: we could change this
: mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"],
"/$EntityContainer/@DefaultContainer" // w/o $Type, slash makes no difference!
: mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"],
"/T€AMS/$Type/@UI.LineItem" : oTeamLineItem,
"/T€AMS/@UI.LineItem" : oTeamLineItem,
"/T€AMS/@UI.LineItem/0/Label" : oTeamLineItem[0].Label,
"/T€AMS/@UI.LineItem/0/@UI.Importance" : oTeamLineItem[0]["@UI.Importance"],
"/T€AMS@T€AMS"
: mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"]["@T€AMS"],
"/T€AMS/@Common.Text"
: mScope.$Annotations["tea_busi.TEAM"]["@Common.Text"],
"/T€AMS/@[email protected]"
: mScope.$Annotations["tea_busi.TEAM"]["@[email protected]"],
"/T€AMS/[email protected]"
: mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"],
"/T€AMS/[email protected]@UI.TextArrangement"
: mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@[email protected]"],
"/tea_busi./@Schema" : mScope["tea_busi."]["@Schema"],
// inline annotations
"/ChangeManagerOfTeam/$Action/0/$ReturnType/@Common.Label" : "Hail to the Chief",
"/T€AMS/TEAM_2_EMPLOYEES/[email protected]" : "None of my business",
"/T€AMS/TEAM_2_EMPLOYEES/$ReferentialConstraint/[email protected]" : "Just a Gigolo",
"/T€AMS/@UI.LineItem/0/[email protected]" : "Team ID's Label",
"/T€AMS/@[email protected]" : "Best Badge Ever!", // annotation of annotation
"/T€AMS/@UI.Badge/@Common.Label" : "Label inside", // annotation of record
// "@" to access to all annotations, e.g. for iteration
"/T€AMS@" : mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"],
"/T€AMS/@" : mScope.$Annotations["tea_busi.TEAM"],
"/T€AMS/Team_Id@" : mScope.$Annotations["tea_busi.TEAM/Team_Id"],
// "14.5.12 Expression edm:Path"
// Note: see integration test "{field>Value/[email protected]}"
"/T€AMS/@UI.LineItem/0/Value/[email protected]"
: mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"],
"/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label"
: mScope.$Annotations["name.space.Id"]["@Common.Label"],
"/EMPLOYEES/@UI.LineItem/0/Value/[email protected]"
: mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"],
// "14.5.2 Expression edm:AnnotationPath"
"/EMPLOYEES/@UI.Facets/0/Target/$AnnotationPath/"
: mScope.$Annotations["tea_busi.Worker"]["@UI.LineItem"],
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/"
: mScope.$Annotations["tea_busi.Worker/EMPLOYEE_2_TEAM"]["@Common.Label"],
"/EMPLOYEES/@UI.Facets/2/Target/$AnnotationPath/"
: mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"],
"/EMPLOYEES/@UI.Facets/3/Target/$AnnotationPath/"
: mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"],
// @sapui.name ----------------------------------------------------------------------------
"/@sapui.name" : "tea_busi.DefaultContainer",
"/[email protected]" : "tea_busi.DefaultContainer",
"/tea_busi.DefaultContainer/@sapui.name" : "tea_busi.DefaultContainer", // no $Type here!
"/$EntityContainer/@sapui.name" : "tea_busi.DefaultContainer",
"/T€[email protected]" : "T€AMS",
"/T€AMS/@sapui.name" : "tea_busi.TEAM",
"/T€AMS/[email protected]" : "Team_Id",
"/T€AMS/[email protected]" : "TEAM_2_EMPLOYEES",
"/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/@sapui.name" : "tea_busi.Worker",
"/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/[email protected]" : "AGE",
"/T€AMS@T€[email protected]" : "@T€AMS",
"/T€AMS@/@T€[email protected]" : "@T€AMS",
"/T€AMS@T€AMS/@sapui.name" : "@T€AMS", // no $Type inside @T€AMS, / makes no difference!
"/T€AMS@/@T€AMS/@sapui.name" : "@T€AMS", // dito
"/T€AMS/@UI.LineItem/0/@UI.Importance/@sapui.name" : "@UI.Importance", // in "JSON" mode
"/T€AMS/Team_Id@/@[email protected]" : "@Common.Label" // avoid indirection here!
}, function (sPath, vResult) {
QUnit.test("fetchObject: " + sPath, function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
if (vResult && typeof vResult === "object" && "test" in vResult) {
// Sinon.JS matcher
assert.ok(vResult.test(oSyncPromise.getResult()), vResult);
} else {
assert.strictEqual(oSyncPromise.getResult(), vResult);
}
// self-guard to avoid that a complex right-hand side evaluates to undefined
assert.notStrictEqual(vResult, undefined, "use this test for defined results only!");
});
});
//TODO annotations at enum member ".../<10.2.1 Member Name>@..." (Note: "<10.2.2 Member Value>"
// might be a string! Avoid indirection!)
//TODO special cases where inline and external targeting annotations need to be merged!
//TODO support also external targeting from a different schema!
//TODO MySchema.MyFunction/MyParameter --> requires search in array?!
//TODO $count?
//TODO "For annotations targeting a property of an entity type or complex type, the path
// expression is evaluated starting at the outermost entity type or complex type named in the
// Target of the enclosing edm:Annotations element, i.e. an empty path resolves to the
// outermost type, and the first segment of a non-empty path MUST be a property or navigation
// property of the outermost type, a type cast, or a term cast." --> consequences for us?
//*********************************************************************************************
[
// "JSON" drill-down ----------------------------------------------------------------------
"/$missing",
"/tea_busi.DefaultContainer/$missing",
"/tea_busi.DefaultContainer/missing", // "17.2 SimpleIdentifier" treated like any property
"/tea_busi.FuGetEmployeeMaxAge/0/tea_busi.FuGetEmployeeMaxAge", // "0" switches to JSON
"/tea_busi.TEAM/$Key/this.is.missing",
"/tea_busi.Worker/missing", // entity container (see above) treated like any schema child
// scope lookup ("17.3 QualifiedName") ----------------------------------------------------
"/$EntityContainer/$missing",
"/$EntityContainer/missing",
// implicit $Type insertion ---------------------------------------------------------------
"/T€AMS/$Key", // avoid $Type insertion for following $ segments
"/T€AMS/missing",
"/T€AMS/$missing",
// annotations ----------------------------------------------------------------------------
"/tea_busi.Worker@missing",
"/tea_busi.Worker/@missing",
// "@" to access to all annotations, e.g. for iteration
"/tea_busi.Worker/@/@missing",
// operations -----------------------------------------------------------------------------
"/VoidAction/"
].forEach(function (sPath) {
QUnit.test("fetchObject: " + sPath + " --> undefined", function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), undefined);
});
});
//*********************************************************************************************
QUnit.test("fetchObject: Invalid relative path w/o context", function (assert) {
var sMetaPath = "some/relative/path",
oSyncPromise;
this.oLogMock.expects("error").withExactArgs("Invalid relative path w/o context", sMetaPath,
sODataMetaModel);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sMetaPath, null);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), null);
});
//*********************************************************************************************
["/empty.Container/@", "/T€AMS/Name@"].forEach(function (sPath) {
QUnit.test("fetchObject returns {} (anonymous empty object): " + sPath, function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.deepEqual(oSyncPromise.getResult(), {}); // strictEqual would not work!
});
});
//*********************************************************************************************
QUnit.test("fetchObject without $Annotations", function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mMostlyEmptyScope));
// code under test
oSyncPromise = this.oMetaModel.fetchObject("/@DefaultContainer");
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.deepEqual(oSyncPromise.getResult(), undefined); // strictEqual would not work!
});
//TODO if no annotations exist for an external target, avoid {} internally unless "@" is used?
//*********************************************************************************************
[false, true].forEach(function (bWarn) {
forEach({
"/$$Loop/" : "Invalid recursion at /$$Loop",
// Invalid segment (warning) ----------------------------------------------------------
"//$Foo" : "Invalid empty segment",
"/tea_busi./$Annotations" : "Invalid segment: $Annotations", // entrance forbidden!
// Unknown ... ------------------------------------------------------------------------
"/not.Found" : "Unknown qualified name not.Found",
"/Me/not.Found" : "Unknown qualified name not.Found", // no "at /.../undefined"!
"/not.Found@missing" : "Unknown qualified name not.Found",
"/." : "Unknown child . of tea_busi.DefaultContainer",
"/Foo" : "Unknown child Foo of tea_busi.DefaultContainer",
"/$EntityContainer/$kind/" : "Unknown child EntityContainer"
+ " of tea_busi.DefaultContainer at /$EntityContainer/$kind",
// implicit $Action, $Function, $Type insertion
"/name.space.BadContainer/DanglingActionImport/" : "Unknown qualified name not.Found"
+ " at /name.space.BadContainer/DanglingActionImport/$Action",
"/name.space.BadContainer/DanglingFunctionImport/" :
"Unknown qualified name not.Found"
+ " at /name.space.BadContainer/DanglingFunctionImport/$Function",
"/name.space.Broken/" :
"Unknown qualified name not.Found at /name.space.Broken/$Type",
"/name.space.BrokenFunction/" : "Unknown qualified name not.Found"
+ " at /name.space.BrokenFunction/0/$ReturnType/$Type",
//TODO align with "/GetEmployeeMaxAge/" : "Edm.Int16"
"/GetEmployeeMaxAge/@sapui.name" : "Unknown qualified name Edm.Int16"
+ " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type",
"/GetEmployeeMaxAge/value/@sapui.name" : "Unknown qualified name Edm.Int16"
+ " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type",
// implicit scope lookup
"/name.space.Broken/$Type/" :
"Unknown qualified name not.Found at /name.space.Broken/$Type",
"/tea_busi.DefaultContainer/$kind/@sapui.name" : "Unknown child EntityContainer"
+ " of tea_busi.DefaultContainer at /tea_busi.DefaultContainer/$kind",
// Unsupported path before @sapui.name ------------------------------------------------
"/[email protected]" : "Unsupported path before @sapui.name",
"/tea_busi.FuGetEmployeeMaxAge/[email protected]" : "Unsupported path before @sapui.name",
"/tea_busi.TEAM/$Key/not.Found/@sapui.name" : "Unsupported path before @sapui.name",
"/GetEmployeeMaxAge/[email protected]" : "Unsupported path before @sapui.name",
// Unsupported path after @sapui.name -------------------------------------------------
"/@sapui.name/foo" : "Unsupported path after @sapui.name",
"/$EntityContainer/T€AMS/@sapui.name/foo" : "Unsupported path after @sapui.name",
// Unsupported path after @@... -------------------------------------------------------
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@this.is.ignored/foo"
: "Unsupported path after @@this.is.ignored",
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/@@this.is.ignored@foo"
: "Unsupported path after @@this.is.ignored",
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@[email protected]"
: "Unsupported path after @@this.is.ignored",
// ...is not a function but... --------------------------------------------------------
"/@@sap.ui.model.odata.v4.AnnotationHelper.invalid"
: "sap.ui.model.odata.v4.AnnotationHelper.invalid is not a function but: undefined",
"/@@sap.ui.model.odata.v4.AnnotationHelper"
: "sap.ui.model.odata.v4.AnnotationHelper is not a function but: "
+ sap.ui.model.odata.v4.AnnotationHelper,
// Unsupported overloads --------------------------------------------------------------
"/name.space.EmptyOverloads/" : "Unsupported overloads",
"/name.space.OverloadedAction/" : "Unsupported overloads",
"/name.space.OverloadedFunction/" : "Unsupported overloads"
}, function (sPath, sWarning) {
QUnit.test("fetchObject fails: " + sPath + ", warn = " + bWarn, function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn);
this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs(sWarning, sPath, sODataMetaModel);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.deepEqual(oSyncPromise.getResult(), undefined);
});
});
});
//*********************************************************************************************
[false, true].forEach(function (bDebug) {
forEach({
// Invalid segment (debug) ------------------------------------------------------------
"/$Foo/@bar" : "Invalid segment: @bar",
"/$Foo/$Bar" : "Invalid segment: $Bar",
"/$Foo/$Bar/$Baz" : "Invalid segment: $Bar",
"/$EntityContainer/T€AMS/Team_Id/$MaxLength/." : "Invalid segment: .",
"/$EntityContainer/T€AMS/Team_Id/$Nullable/." : "Invalid segment: .",
"/$EntityContainer/T€AMS/Team_Id/NotFound/Invalid" : "Invalid segment: Invalid"
}, function (sPath, sMessage) {
QUnit.test("fetchObject fails: " + sPath + ", debug = " + bDebug, function (assert) {
var oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug);
this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0)
.withExactArgs(sMessage, sPath, sODataMetaModel);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.deepEqual(oSyncPromise.getResult(), undefined);
});
});
});
//*********************************************************************************************
[
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath",
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/"
].forEach(function (sPath) {
QUnit.test("fetchObject: " + sPath + "@@...isMultiple", function (assert) {
var oContext,
oInput,
fnIsMultiple = this.mock(AnnotationHelper).expects("isMultiple"),
oResult = {},
oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput
.returns(SyncPromise.resolve(mScope));
oInput = this.oMetaModel.getObject(sPath);
fnIsMultiple
.withExactArgs(oInput, sinon.match({
context : sinon.match.object,
schemaChildName : "tea_busi.Worker"
})).returns(oResult);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath
+ "@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple");
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), oResult);
oContext = fnIsMultiple.args[0][1].context;
assert.ok(oContext instanceof BaseContext);
assert.strictEqual(oContext.getModel(), this.oMetaModel);
assert.strictEqual(oContext.getPath(), sPath);
assert.strictEqual(oContext.getObject(), oInput);
});
});
//*********************************************************************************************
(function () {
var sPath,
sPathPrefix,
mPathPrefix2SchemaChildName = {
"/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath" : "tea_busi.Worker",
"/T€AMS/@UI.LineItem/0/Value/[email protected]" : "tea_busi.TEAM",
"/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label" : "name.space.Id"
},
sSchemaChildName;
for (sPathPrefix in mPathPrefix2SchemaChildName) {
sPath = sPathPrefix + "@@.computedAnnotation";
sSchemaChildName = mPathPrefix2SchemaChildName[sPathPrefix];
QUnit.test("fetchObject: " + sPath, function (assert) {
var fnComputedAnnotation,
oContext,
oInput,
oResult = {},
oScope = {
computedAnnotation : function () {}
},
oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput
.returns(SyncPromise.resolve(mScope));
oInput = this.oMetaModel.getObject(sPathPrefix);
fnComputedAnnotation = this.mock(oScope).expects("computedAnnotation");
fnComputedAnnotation
.withExactArgs(oInput, sinon.match({
context : sinon.match.object,
schemaChildName : sSchemaChildName
})).returns(oResult);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath, null, {scope : oScope});
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), oResult);
oContext = fnComputedAnnotation.args[0][1].context;
assert.ok(oContext instanceof BaseContext);
assert.strictEqual(oContext.getModel(), this.oMetaModel);
assert.strictEqual(oContext.getPath(), sPathPrefix);
assert.strictEqual(oContext.getObject(), oInput);
});
}
}());
//*********************************************************************************************
[false, true].forEach(function (bWarn) {
QUnit.test("fetchObject: " + "...@@... throws", function (assert) {
var oError = new Error("This call failed intentionally"),
sPath = "/@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple",
oSyncPromise;
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
this.mock(AnnotationHelper).expects("isMultiple")
.throws(oError);
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn);
this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0).withExactArgs(
"Error calling sap.ui.model.odata.v4.AnnotationHelper.isMultiple: " + oError,
sPath, sODataMetaModel);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), undefined);
});
});
//*********************************************************************************************
[false, true].forEach(function (bDebug) {
QUnit.test("fetchObject: cross-service reference, bDebug = " + bDebug, function (assert) {
var mClonedProductScope = clone(mProductScope),
aPromises = [],
oRequestorMock = this.mock(this.oMetaModel.oRequestor),
that = this;
/*
* Expect the given debug message with the given path.
*/
function expectDebug(sMessage, sPath) {
that.expectDebug(bDebug, sMessage, sPath);
}
/*
* Code under test: ODataMetaModel#fetchObject with the given path should yield the
* given expected result.
*/
function codeUnderTest(sPath, vExpectedResult) {
aPromises.push(that.oMetaModel.fetchObject(sPath).then(function (vResult) {
assert.strictEqual(vResult, vExpectedResult);
}));
}
this.expectFetchEntityContainer(mXServiceScope);
oRequestorMock.expects("read")
.withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata")
.returns(Promise.resolve(mClonedProductScope));
oRequestorMock.expects("read")
.withExactArgs("/a/default/iwbep/tea_busi_supplier/0001/$metadata")
.returns(Promise.resolve(mSupplierScope));
oRequestorMock.expects("read")
.withExactArgs("/empty/$metadata")
.returns(Promise.resolve(mMostlyEmptyScope));
expectDebug("Namespace tea_busi_product.v0001. found in $Include"
+ " of /a/default/iwbep/tea_busi_product/0001/$metadata"
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name");
expectDebug("Reading /a/default/iwbep/tea_busi_product/0001/$metadata"
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name");
expectDebug("Waiting for tea_busi_product.v0001."
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name");
codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name",
mClonedProductScope["tea_busi_product.v0001.Product"].Name);
expectDebug("Waiting for tea_busi_product.v0001."
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName");
codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName",
mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName);
expectDebug("Waiting for tea_busi_product.v0001.",
"/tea_busi_product.v0001.Category/CategoryName");
codeUnderTest("/tea_busi_product.v0001.Category/CategoryName",
mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName);
expectDebug("Waiting for tea_busi_product.v0001.",
"/tea_busi_product.v0001.Category/[email protected]");
codeUnderTest("/tea_busi_product.v0001.Category/[email protected]",
"CategoryName from tea_busi_product.v0001.");
expectDebug("Waiting for tea_busi_product.v0001."
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name");
codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name",
mSupplierScope["tea_busi_supplier.v0001.Supplier"].Supplier_Name);
expectDebug("Namespace empty. found in $Include of /empty/$metadata",
"/empty.DefaultContainer");
expectDebug("Reading /empty/$metadata", "/empty.DefaultContainer");
expectDebug("Waiting for empty.",
"/empty.DefaultContainer");
codeUnderTest("/empty.DefaultContainer", mMostlyEmptyScope["empty.DefaultContainer"]);
// Note: these are logged asynchronously!
expectDebug("Including tea_busi_product.v0001."
+ " from /a/default/iwbep/tea_busi_product/0001/$metadata"
+ " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name");
expectDebug("Including empty. from /empty/$metadata",
"/empty.DefaultContainer");
expectDebug("Namespace tea_busi_supplier.v0001. found in $Include"
+ " of /a/default/iwbep/tea_busi_supplier/0001/$metadata"
+ " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name");
expectDebug("Reading /a/default/iwbep/tea_busi_supplier/0001/$metadata"
+ " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name");
expectDebug("Waiting for tea_busi_supplier.v0001."
+ " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name");
expectDebug("Including tea_busi_supplier.v0001."
+ " from /a/default/iwbep/tea_busi_supplier/0001/$metadata"
+ " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type",
"/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name");
return Promise.all(aPromises);
});
});
//TODO Decision: It is an error if a namespace is referenced multiple times with different URIs.
// This should be checked even when load-on-demand is used.
// (It should not even be included multiple times with the same URI!)
//TODO Check that no namespace is included which is already present!
//TODO API to load "transitive closure"
//TODO support for sync. XML Templating
//*********************************************************************************************
[false, true].forEach(function (bWarn) {
var sTitle = "fetchObject: missing cross-service reference, bWarn = " + bWarn;
QUnit.test(sTitle, function (assert) {
var sPath = "/not.found",
oSyncPromise;
this.expectFetchEntityContainer(mMostlyEmptyScope);
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn);
this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("Unknown qualified name not.found", sPath, sODataMetaModel);
// code under test
oSyncPromise = this.oMetaModel.fetchObject(sPath);
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.deepEqual(oSyncPromise.getResult(), undefined);
});
});
//*********************************************************************************************
[false, true].forEach(function (bWarn) {
var sTitle = "fetchObject: referenced metadata does not contain included schema, bWarn = "
+ bWarn;
QUnit.test(sTitle, function (assert) {
var sSchemaName = "I.still.haven't.found.what.I'm.looking.for.",
sQualifiedName = sSchemaName + "Child",
sPath = "/" + sQualifiedName;
this.expectFetchEntityContainer(mXServiceScope);
this.mock(this.oMetaModel.oRequestor).expects("read")
.withExactArgs("/empty/$metadata")
.returns(Promise.resolve(mMostlyEmptyScope));
this.allowWarnings(assert, bWarn);
this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("/empty/$metadata does not contain " + sSchemaName, sPath,
sODataMetaModel);
this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("Unknown qualified name " + sQualifiedName, sPath, sODataMetaModel);
// code under test
return this.oMetaModel.fetchObject(sPath).then(function (vResult) {
assert.deepEqual(vResult, undefined);
});
});
});
//*********************************************************************************************
[false, true].forEach(function (bWarn) {
var sTitle = "fetchObject: cross-service reference, respect $Include; bWarn = " + bWarn;
QUnit.test(sTitle, function (assert) {
var mScope0 = {
"$Version" : "4.0",
"$Reference" : {
"../../../../default/iwbep/tea_busi_product/0001/$metadata" : {
"$Include" : [
"not.found.",
"tea_busi_product.v0001.",
"tea_busi_supplier.v0001."
]
}
}
},
mReferencedScope = {
"$Version" : "4.0",
"must.not.be.included." : {
"$kind" : "Schema"
},
"tea_busi_product.v0001." : {
"$kind" : "Schema"
},
"tea_busi_supplier.v0001." : {
"$kind" : "Schema"
}
},
oRequestorMock = this.mock(this.oMetaModel.oRequestor),
that = this;
this.expectFetchEntityContainer(mScope0);
oRequestorMock.expects("read")
.withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata")
.returns(Promise.resolve(mReferencedScope));
this.allowWarnings(assert, bWarn);
// code under test
return this.oMetaModel.fetchObject("/tea_busi_product.v0001.").then(function (vResult) {
var oSyncPromise;
assert.strictEqual(vResult, mReferencedScope["tea_busi_product.v0001."]);
assert.ok(that.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."]
["/a/default/iwbep/tea_busi_product/0001/$metadata"],
"document marked as read");
that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("Unknown qualified name must.not.be.included.",
"/must.not.be.included.", sODataMetaModel);
assert.strictEqual(that.oMetaModel.getObject("/must.not.be.included."),
undefined,
"must not include schemata which are not mentioned in edmx:Include");
assert.strictEqual(that.oMetaModel.getObject("/tea_busi_supplier.v0001."),
mReferencedScope["tea_busi_supplier.v0001."]);
// now check that "not.found." does not trigger another read(),
// does finish synchronously and logs a warning
that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata"
+ " does not contain not.found.",
"/not.found.", sODataMetaModel);
that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0)
.withExactArgs("Unknown qualified name not.found.",
"/not.found.", sODataMetaModel);
// code under test
oSyncPromise = that.oMetaModel.fetchObject("/not.found.");
assert.strictEqual(oSyncPromise.isFulfilled(), true);
assert.strictEqual(oSyncPromise.getResult(), undefined);
});
});
});
//*********************************************************************************************
QUnit.test("fetchObject: cross-service reference - validation failure", function (assert) {
var oError = new Error(),
mReferencedScope = {},
sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata";
this.expectFetchEntityContainer(mXServiceScope);
this.mock(this.oMetaModel.oRequestor).expects("read").withExactArgs(sUrl)
.returns(Promise.resolve(mReferencedScope));
this.oMetaModelMock.expects("validate")
.withExactArgs(sUrl, mReferencedScope)
.throws(oError);
return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () {
assert.ok(false);
}, function (oError0) {
assert.strictEqual(oError0, oError);
});
});
//*********************************************************************************************
QUnit.test("fetchObject: cross-service reference - document loaded from different URI",
function (assert) {
var sMessage = "A schema cannot span more than one document: schema is referenced by"
+ " following URLs: /a/default/iwbep/tea_busi_product/0001/$metadata,"
+ " /second/reference",
sSchema = "tea_busi_product.v0001.";
this.expectFetchEntityContainer(mXServiceScope);
this.oLogMock.expects("error")
.withExactArgs(sMessage, sSchema, sODataMetaModel);
// simulate 2 references for a schema
this.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."]["/second/reference"] = false;
// code under test
return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () {
assert.ok(false);
}, function (oError0) {
assert.strictEqual(oError0.message, sSchema + ": " + sMessage);
});
});
//*********************************************************************************************
QUnit.test("fetchObject: cross-service reference - duplicate include", function (assert) {
var oRequestorMock = this.mock(this.oMetaModel.oRequestor),
// root service includes both A and B, A also includes B
mScope0 = {
"$Version" : "4.0",
"$Reference" : {
"/A/$metadata" : {
"$Include" : [
"A."
]
},
"/B/$metadata" : {
"$Include" : [
"B."
]
}
}
},
mScopeA = {
"$Version" : "4.0",
"$Reference" : {
"/B/$metadata" : {
"$Include" : [
"B.",
"B.B." // includes additional namespace from already read document
]
}
},
"A." : {
"$kind" : "Schema"
}
},
mScopeB = {
"$Version" : "4.0",
"B." : {
"$kind" : "Schema"
},
"B.B." : {
"$kind" : "Schema"
}
},
that = this;
this.expectFetchEntityContainer(mScope0);
oRequestorMock.expects("read").withExactArgs("/A/$metadata")
.returns(Promise.resolve(mScopeA));
oRequestorMock.expects("read").withExactArgs("/B/$metadata")
.returns(Promise.resolve(mScopeB));
return this.oMetaModel.fetchObject("/B.")
.then(function (vResult) {
assert.strictEqual(vResult, mScopeB["B."]);
// code under test - we must not overwrite our "$ui5.read" promise!
return that.oMetaModel.fetchObject("/A.")
.then(function (vResult) {
assert.strictEqual(vResult, mScopeA["A."]);
// Note: must not trigger read() again!
return that.oMetaModel.fetchObject("/B.B.")
.then(function (vResult) {
assert.strictEqual(vResult, mScopeB["B.B."]);
});
});
});
});
//TODO Implement consistency checks that the same namespace is always included from the same
// reference URI, no matter which referencing document.
//*********************************************************************************************
[undefined, false, true].forEach(function (bSupportReferences) {
var sTitle = "fetchObject: cross-service reference - supportReferences: "
+ bSupportReferences;
QUnit.test(sTitle, function (assert) {
var mClonedProductScope = clone(mProductScope),
oModel = new ODataModel({ // code under test
serviceUrl : "/a/b/c/d/e/",
supportReferences : bSupportReferences,
synchronizationMode : "None"
}),
sPath = "/tea_busi_product.v0001.Product",
sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata";
this.oMetaModel = oModel.getMetaModel();
this.oMetaModelMock = this.mock(this.oMetaModel);
bSupportReferences = bSupportReferences !== false; // default is true!
assert.strictEqual(this.oMetaModel.bSupportReferences, bSupportReferences);
this.expectFetchEntityContainer(mXServiceScope);
this.mock(this.oMetaModel.oRequestor).expects("read")
.exactly(bSupportReferences ? 1 : 0)
.withExactArgs(sUrl)
.returns(Promise.resolve(mClonedProductScope));
this.allowWarnings(assert, true);
this.oLogMock.expects("warning").exactly(bSupportReferences ? 0 : 1)
.withExactArgs("Unknown qualified name " + sPath.slice(1), sPath, sODataMetaModel);
// code under test
return this.oMetaModel.fetchObject(sPath).then(function (vResult) {
assert.strictEqual(vResult, bSupportReferences
? mClonedProductScope["tea_busi_product.v0001.Product"]
: undefined);
});
});
});
//*********************************************************************************************
QUnit.test("getObject, requestObject", function (assert) {
return checkGetAndRequest(this, assert, "fetchObject", ["sPath", {/*oContext*/}]);
});
//*********************************************************************************************
[{
$Type : "Edm.Boolean"
},{
$Type : "Edm.Byte"
}, {
$Type : "Edm.Date"
}, {
$Type : "Edm.DateTimeOffset"
},{
$Precision : 7,
$Type : "Edm.DateTimeOffset",
__constraints : {precision : 7}
}, {
$Type : "Edm.Decimal"
}, {
$Precision : 20,
$Scale : 5,
$Type : "Edm.Decimal",
__constraints : {maximum : "100.00", maximumExclusive : true, minimum : "0.00",
precision : 20, scale : 5}
}, {
$Precision : 20,
$Scale : "variable",
$Type : "Edm.Decimal",
__constraints : {precision : 20, scale : Infinity}
}, {
$Type : "Edm.Double"
}, {
$Type : "Edm.Guid"
}, {
$Type : "Edm.Int16"
}, {
$Type : "Edm.Int32"
}, {
$Type : "Edm.Int64"
}, {
$Type : "Edm.SByte"
}, {
$Type : "Edm.Single"
}, {
$Type : "Edm.Stream"
}, {
$Type : "Edm.String"
}, {
$MaxLength : 255,
$Type : "Edm.String",
__constraints : {maxLength : 255}
}, {
$Type : "Edm.String",
__constraints : {isDigitSequence : true}
}, {
$Type : "Edm.TimeOfDay"
}, {
$Precision : 3,
$Type : "Edm.TimeOfDay",
__constraints : {precision : 3}
}].forEach(function (oProperty0) {
// Note: take care not to modify oProperty0, clone it first!
[false, true].forEach(function (bNullable) {
// Note: JSON.parse(JSON.stringify(...)) cannot clone Infinity!
var oProperty = jQuery.extend(true, {}, oProperty0),
oConstraints = oProperty.__constraints;
delete oProperty.__constraints;
if (!bNullable) {
oProperty.$Nullable = false;
oConstraints = oConstraints || {};
oConstraints.nullable = false;
}
QUnit.test("fetchUI5Type: " + JSON.stringify(oProperty), function (assert) {
// Note: just spy on fetchModule() to make sure that the real types are used
// which check correctness of constraints
var fnFetchModuleSpy = this.spy(this.oMetaModel, "fetchModule"),
sPath = "/EMPLOYEES/0/ENTRYDATE",
oMetaContext = this.oMetaModel.getMetaContext(sPath),
that = this;
this.oMetaModelMock.expects("fetchObject").twice()
.withExactArgs(undefined, oMetaContext)
.returns(SyncPromise.resolve(oProperty));
if (oProperty.$Type === "Edm.String") { // simulate annotation for strings
this.oMetaModelMock.expects("fetchObject")
.withExactArgs("@com.sap.vocabularies.Common.v1.IsDigitSequence",
oMetaContext)
.returns(
SyncPromise.resolve(oConstraints && oConstraints.isDigitSequence));
} else if (oProperty.$Type === "Edm.Decimal") { // simulate annotation for decimals
this.oMetaModelMock.expects("fetchObject")
.withExactArgs("@Org.OData.Validation.V1.Minimum/$Decimal", oMetaContext)
.returns(
SyncPromise.resolve(oConstraints && oConstraints.minimum));
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(
"@Org.OData.Validation.V1.Minimum@Org.OData.Validation.V1.Exclusive",
oMetaContext)
.returns(
SyncPromise.resolve(oConstraints && oConstraints.minimumExlusive));
this.oMetaModelMock.expects("fetchObject")
.withExactArgs("@Org.OData.Validation.V1.Maximum/$Decimal", oMetaContext)
.returns(
SyncPromise.resolve(oConstraints && oConstraints.maximum));
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(
"@Org.OData.Validation.V1.Maximum@Org.OData.Validation.V1.Exclusive",
oMetaContext)
.returns(
SyncPromise.resolve(oConstraints && oConstraints.maximumExclusive));
}
// code under test
return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) {
var sExpectedTypeName = "sap.ui.model.odata.type."
+ oProperty.$Type.slice(4)/*cut off "Edm."*/;
assert.strictEqual(fnFetchModuleSpy.callCount, 1);
assert.ok(fnFetchModuleSpy.calledOn(that.oMetaModel));
assert.ok(fnFetchModuleSpy.calledWithExactly(sExpectedTypeName),
fnFetchModuleSpy.printf("%C"));
assert.strictEqual(oType.getName(), sExpectedTypeName);
assert.deepEqual(oType.oConstraints, oConstraints);
assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached");
});
});
});
});
//TODO later: support for facet DefaultValue?
//*********************************************************************************************
QUnit.test("fetchUI5Type: $count", function (assert) {
var sPath = "/T€AMS/$count",
oType;
// code under test
oType = this.oMetaModel.fetchUI5Type(sPath).getResult();
assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Int64");
assert.strictEqual(this.oMetaModel.getUI5Type(sPath), oType, "cached");
});
//*********************************************************************************************
QUnit.test("fetchUI5Type: collection", function (assert) {
var sPath = "/EMPLOYEES/0/foo",
that = this;
this.oMetaModelMock.expects("fetchObject").thrice()
.withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath))
.returns(SyncPromise.resolve({
$isCollection : true,
$Nullable : false, // must not be turned into a constraint for Raw!
$Type : "Edm.String"
}));
this.oLogMock.expects("warning").withExactArgs(
"Unsupported collection type, using sap.ui.model.odata.type.Raw",
sPath, sODataMetaModel);
return Promise.all([
// code under test
this.oMetaModel.fetchUI5Type(sPath).then(function (oType) {
assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw");
assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached");
}),
// code under test
this.oMetaModel.fetchUI5Type(sPath).then(function (oType) {
assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw");
})
]);
});
//*********************************************************************************************
//TODO make Edm.Duration work with OData V4
["acme.Type", "Edm.Duration", "Edm.GeographyPoint"].forEach(function (sQualifiedName) {
QUnit.test("fetchUI5Type: unsupported type " + sQualifiedName, function (assert) {
var sPath = "/EMPLOYEES/0/foo",
that = this;
this.oMetaModelMock.expects("fetchObject").twice()
.withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath))
.returns(SyncPromise.resolve({
$Nullable : false, // must not be turned into a constraint for Raw!
$Type : sQualifiedName
}));
this.oLogMock.expects("warning").withExactArgs(
"Unsupported type '" + sQualifiedName + "', using sap.ui.model.odata.type.Raw",
sPath, sODataMetaModel);
// code under test
return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) {
assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw");
assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached");
});
});
});
//*********************************************************************************************
QUnit.test("fetchUI5Type: invalid path", function (assert) {
var sPath = "/EMPLOYEES/0/invalid",
that = this;
this.oMetaModelMock.expects("fetchObject").twice()
.withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath))
.returns(SyncPromise.resolve(/*no property metadata for path*/));
this.oLogMock.expects("warning").twice().withExactArgs(
"No metadata for path '" + sPath + "', using sap.ui.model.odata.type.Raw",
undefined, sODataMetaModel);
// code under test
return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) {
assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw");
// code under test
assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "Type is cached");
});
});
//*********************************************************************************************
QUnit.test("getUI5Type, requestUI5Type", function (assert) {
return checkGetAndRequest(this, assert, "fetchUI5Type", ["sPath"], true);
});
//*********************************************************************************************
[{ // simple entity from a set
dataPath : "/TEAMS/0",
canonicalUrl : "/TEAMS(~1)",
requests : [{
entityType : "tea_busi.TEAM",
predicate : "(~1)"
}]
}, { // simple entity in transient context
dataPath : "/TEAMS/-1",
canonicalUrl : "/TEAMS(~1)",
requests : [{
entityType : "tea_busi.TEAM",
// TODO a transient entity does not necessarily have all key properties, but this is
// required to create a dependent cache
predicate : "(~1)"
}]
}, { // simple entity by key predicate
dataPath : "/TEAMS('4%3D2')",
canonicalUrl : "/TEAMS('4%3D2')",
requests : []
}, { // simple singleton
dataPath : "/Me",
canonicalUrl : "/Me",
requests : []
}, { // navigation to root entity
dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1",
canonicalUrl : "/EMPLOYEES(~1)",
requests : [{
entityType : "tea_busi.Worker",
predicate : "(~1)"
}]
}, { // navigation to root entity
dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES/1",
canonicalUrl : "/EMPLOYEES(~1)",
requests : [{
entityType : "tea_busi.Worker",
predicate : "(~1)"
}]
}, { // navigation to root entity with key predicate
dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')",
canonicalUrl : "/EMPLOYEES('23')",
requests : []
}, { // multiple navigation to root entity
dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM",
canonicalUrl : "/T%E2%82%ACAMS(~1)",
requests : [{
entityType : "tea_busi.TEAM",
predicate : "(~1)"
}]
}, { // navigation from entity set to single contained entity
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S",
canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S",
requests : [{
entityType : "tea_busi.TEAM",
path : "/TEAMS/0",
predicate : "(~1)"
}]
}, { // navigation from singleton to single contained entity
dataPath : "/Me/EMPLOYEE_2_CONTAINED_S",
canonicalUrl : "/Me/EMPLOYEE_2_CONTAINED_S",
requests : []
}, { // navigation to contained entity within a collection
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/1",
canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)",
requests : [{
entityType : "tea_busi.TEAM",
path : "/TEAMS/0",
predicate : "(~1)"
}, {
entityType : "tea_busi.ContainedC",
path : "/TEAMS/0/TEAM_2_CONTAINED_C/1",
predicate : "(~2)"
}]
}, { // navigation to contained entity with a key predicate
dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')",
canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')",
requests : []
}, { // navigation from contained entity to contained entity
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1",
canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S/S_2_C(~2)",
requests : [{
entityType : "tea_busi.TEAM",
path : "/TEAMS/0",
predicate : "(~1)"
}, {
entityType : "tea_busi.ContainedC",
path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1",
predicate : "(~2)"
}]
}, { // navigation from contained to root entity
// must be appended nevertheless since we only have a type, but no set
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE",
canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)/C_2_EMPLOYEE",
requests : [{
entityType : "tea_busi.TEAM",
path : "/TEAMS/0",
predicate : "(~1)"
}, {
entityType : "tea_busi.ContainedC",
path : "/TEAMS/0/TEAM_2_CONTAINED_C/5",
predicate : "(~2)"
}]
}, { // navigation from entity w/ key predicate to contained to root entity
dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE",
canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C(~1)/C_2_EMPLOYEE",
requests : [{
entityType : "tea_busi.ContainedC",
path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5",
predicate : "(~1)"
}]
}, { // decode entity set initially, encode it finally
dataPath : "/T%E2%82%ACAMS/0",
canonicalUrl : "/T%E2%82%ACAMS(~1)",
requests : [{
entityType : "tea_busi.TEAM",
predicate : "(~1)"
}]
}, { // decode navigation property, encode entity set when building sCandidate
dataPath : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)",
canonicalUrl : "/EQUIPM%E2%82%ACNTS(42)",
requests : []
}].forEach(function (oFixture) {
QUnit.test("fetchCanonicalPath: " + oFixture.dataPath, function (assert) {
var oContext = Context.create(this.oModel, undefined, oFixture.dataPath),
oContextMock = this.mock(oContext),
oPromise;
this.oMetaModelMock.expects("getMetaPath").withExactArgs(oFixture.dataPath)
.returns("metapath");
this.oMetaModelMock.expects("fetchObject").withExactArgs("metapath")
.returns(SyncPromise.resolve());
this.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
oFixture.requests.forEach(function (oRequest) {
var oEntityInstance = {"@$ui5._" : {"predicate" : oRequest.predicate}};
oContextMock.expects("fetchValue")
.withExactArgs(oRequest.path || oFixture.dataPath)
.returns(SyncPromise.resolve(oEntityInstance));
});
// code under test
oPromise = this.oMetaModel.fetchCanonicalPath(oContext);
assert.ok(!oPromise.isRejected());
return oPromise.then(function (sCanonicalUrl) {
assert.strictEqual(sCanonicalUrl, oFixture.canonicalUrl);
});
});
});
//*********************************************************************************************
[{ // simple singleton
path : "/Me|ID",
editUrl : "Me"
}, { // simple entity by key predicate
path : "/TEAMS('42')|Name",
editUrl : "TEAMS('42')"
}, { // simple entity from a set
path : "/TEAMS/0|Name",
fetchPredicates : {
"/TEAMS/0" : "tea_busi.TEAM"
},
editUrl : "TEAMS(~0)"
}, { // simple entity from a set, complex property
path : "/EMPLOYEES/0|SAL%C3%83RY/CURRENCY",
fetchPredicates : {
"/EMPLOYEES/0" : "tea_busi.Worker"
},
editUrl : "EMPLOYEES(~0)"
}, { // navigation to root entity
path : "/TEAMS/0/TEAM_2_EMPLOYEES/1|ID",
fetchPredicates : {
"/TEAMS/0/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker"
},
editUrl : "EMPLOYEES(~0)"
}, { // navigation to root entity
path : "/TEAMS('42')/TEAM_2_EMPLOYEES/1|ID",
fetchPredicates : {
"/TEAMS('42')/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker"
},
editUrl : "EMPLOYEES(~0)"
}, { // navigation to root entity with key predicate
path : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')|ID",
editUrl : "EMPLOYEES('23')"
}, { // multiple navigation to root entity
path : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM|Name",
fetchPredicates : {
"/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM" : "tea_busi.TEAM"
},
editUrl : "T%E2%82%ACAMS(~0)"
}, { // navigation from entity set to single contained entity
path : "/TEAMS/0/TEAM_2_CONTAINED_S|Id",
fetchPredicates : {
"/TEAMS/0" : "tea_busi.TEAM"
},
editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S"
}, { // navigation from singleton to single contained entity
path : "/Me/EMPLOYEE_2_CONTAINED_S|Id",
editUrl : "Me/EMPLOYEE_2_CONTAINED_S"
}, { // navigation to contained entity within a collection
path : "/TEAMS/0/TEAM_2_CONTAINED_C/1|Id",
fetchPredicates : {
"/TEAMS/0" : "tea_busi.TEAM",
"/TEAMS/0/TEAM_2_CONTAINED_C/1" : "tea_busi.ContainedC"
},
editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_C(~1)"
}, { // navigation to contained entity with a key predicate
path : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')|Id",
editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C('foo')"
}, { // navigation from contained entity to contained entity
path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1|Id",
fetchPredicates : {
"/TEAMS/0" : "tea_busi.TEAM",
"/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1" : "tea_busi.ContainedC"
},
editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S/S_2_C(~1)"
}, { // navigation from contained to root entity, resolved via navigation property binding path
path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE|ID",
fetchPredicates : {
"/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "tea_busi.Worker"
},
editUrl : "EMPLOYEES(~0)"
}, { // navigation from entity w/ key predicate to contained to root entity
path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE|ID",
fetchPredicates : {
"/TEAMS('42')/TEAM_2_CONTAINED_C/5" : "tea_busi.ContainedC"
},
editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C(~0)/C_2_EMPLOYEE"
}, { // decode entity set initially, encode it finally
path : "/T%E2%82%ACAMS/0|Name",
fetchPredicates : {
"/T%E2%82%ACAMS/0" : "tea_busi.TEAM"
},
editUrl : "T%E2%82%ACAMS(~0)"
}, { // decode navigation property, encode entity set
path : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)|ID",
editUrl : "EQUIPM%E2%82%ACNTS(42)"
}].forEach(function (oFixture) {
QUnit.test("fetchUpdateData: " + oFixture.path, function (assert) {
var i = oFixture.path.indexOf("|"),
sContextPath = oFixture.path.slice(0, i),
sPropertyPath = oFixture.path.slice(i + 1),
oContext = Context.create(this.oModel, undefined, sContextPath),
oContextMock = this.mock(oContext),
oPromise,
that = this;
this.oMetaModelMock.expects("getMetaPath")
.withExactArgs(oFixture.path.replace("|", "/")).returns("~");
this.oMetaModelMock.expects("fetchObject").withExactArgs("~")
.returns(SyncPromise.resolve(Promise.resolve()).then(function () {
that.oMetaModelMock.expects("fetchEntityContainer")
.returns(SyncPromise.resolve(mScope));
Object.keys(oFixture.fetchPredicates || {}).forEach(function (sPath, i) {
var oEntityInstance = {"@$ui5._" : {"predicate" : "(~" + i + ")"}};
// Note: the entity instance is delivered asynchronously
oContextMock.expects("fetchValue")
.withExactArgs(sPath)
.returns(SyncPromise.resolve(Promise.resolve(oEntityInstance)));
});
}));
// code under test
oPromise = this.oMetaModel.fetchUpdateData(sPropertyPath, oContext);
assert.ok(!oPromise.isRejected());
return oPromise.then(function (oResult) {
assert.strictEqual(oResult.editUrl, oFixture.editUrl);
assert.strictEqual(oResult.entityPath, sContextPath);
assert.strictEqual(oResult.propertyPath, sPropertyPath);
});
});
});
//TODO support collection properties (-> path containing index not leading to predicate)
//TODO prefer instance annotation at payload for "odata.editLink"?!
//TODO target URLs like "com.sap.gateway.default.iwbep.tea_busi_product.v0001.Container/Products(...)"?
//TODO type casts, operations?
//*********************************************************************************************
QUnit.test("fetchUpdateData: transient entity", function(assert) {
var oContext = Context.create(this.oModel, undefined, "/TEAMS/-1"),
sPropertyPath = "Name";
this.oMetaModelMock.expects("fetchEntityContainer").twice()
.returns(SyncPromise.resolve(mScope));
this.mock(oContext).expects("fetchValue").withExactArgs("/TEAMS/-1")
.returns(SyncPromise.resolve({"@$ui5._" : {"transient" : "update"}}));
// code under test
return this.oMetaModel.fetchUpdateData(sPropertyPath, oContext).then(function (oResult) {
assert.deepEqual(oResult, {
entityPath : "/TEAMS/-1",
editUrl : undefined,
propertyPath : "Name"
});
});
});
//*********************************************************************************************
QUnit.test("fetchUpdateData: fetchObject fails", function(assert) {
var oModel = this.oModel,
oContext = {
getModel : function () { return oModel; }
},
oExpectedError = new Error(),
oMetaModelMock = this.mock(this.oMetaModel),
sPath = "some/invalid/path/to/a/property";
this.mock(oModel).expects("resolve")
.withExactArgs(sPath, sinon.match.same(oContext))
.returns("~1");
oMetaModelMock.expects("getMetaPath").withExactArgs("~1").returns("~2");
oMetaModelMock.expects("fetchObject").withExactArgs("~2")
.returns(Promise.reject(oExpectedError));
// code under test
return this.oMetaModel.fetchUpdateData(sPath, oContext).then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError, oExpectedError);
});
});
//*********************************************************************************************
[{
dataPath : "/Foo/Bar",
message : "Not an entity set: Foo",
warning : "Unknown child Foo of tea_busi.DefaultContainer"
}, {
dataPath : "/TEAMS/0/Foo/Bar",
message : "Not a (navigation) property: Foo"
}, {
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S",
instance : undefined,
message : "No instance to calculate key predicate at /TEAMS/0"
}, {
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S",
instance : {},
message : "No key predicate known at /TEAMS/0"
}, {
dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S",
instance : new Error("failed to load team"),
message : "failed to load team at /TEAMS/0"
}].forEach(function (oFixture) {
QUnit.test("fetchUpdateData: " + oFixture.message, function (assert) {
var oContext = Context.create(this.oModel, undefined, oFixture.dataPath),
oPromise;
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(mScope));
if ("instance" in oFixture) {
this.mock(oContext).expects("fetchValue")
.returns(oFixture.instance instanceof Error
? SyncPromise.reject(oFixture.instance)
: SyncPromise.resolve(oFixture.instance));
}
if (oFixture.warning) {
this.oLogMock.expects("isLoggable")
.withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel)
.returns(true);
this.oLogMock.expects("warning")
.withExactArgs(oFixture.warning, oFixture.dataPath, sODataMetaModel);
}
this.mock(this.oModel).expects("reportError")
.withExactArgs(oFixture.message, sODataMetaModel, sinon.match({
message : oFixture.dataPath + ": " + oFixture.message,
name : "Error"
}));
oPromise = this.oMetaModel.fetchUpdateData("", oContext);
assert.ok(oPromise.isRejected());
assert.strictEqual(oPromise.getResult().message,
oFixture.dataPath + ": " + oFixture.message);
oPromise.caught(); // avoid "Uncaught (in promise)"
});
});
//*********************************************************************************************
QUnit.test("fetchCanonicalPath: success", function(assert) {
var oContext = {};
this.mock(this.oMetaModel).expects("fetchUpdateData")
.withExactArgs("", sinon.match.same(oContext))
.returns(SyncPromise.resolve(Promise.resolve({
editUrl : "edit('URL')",
propertyPath : ""
})));
// code under test
return this.oMetaModel.fetchCanonicalPath(oContext).then(function (oCanonicalPath) {
assert.strictEqual(oCanonicalPath, "/edit('URL')");
});
});
//*********************************************************************************************
QUnit.test("fetchCanonicalPath: not an entity", function(assert) {
var oContext = {
getPath : function () { return "/TEAMS('4711')/Name"; }
};
this.mock(this.oMetaModel).expects("fetchUpdateData")
.withExactArgs("", sinon.match.same(oContext))
.returns(SyncPromise.resolve(Promise.resolve({
entityPath : "/TEAMS('4711')",
editUrl : "TEAMS('4711')",
propertyPath : "Name"
})));
// code under test
return this.oMetaModel.fetchCanonicalPath(oContext).then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError.message, "Context " + oContext.getPath()
+ " does not point to an entity. It should be " + "/TEAMS('4711')");
});
});
//*********************************************************************************************
QUnit.test("fetchCanonicalPath: fetchUpdateData fails", function(assert) {
var oContext = {},
oExpectedError = new Error();
this.mock(this.oMetaModel).expects("fetchUpdateData")
.withExactArgs("", sinon.match.same(oContext))
.returns(SyncPromise.resolve(Promise.reject(oExpectedError)));
// code under test
return this.oMetaModel.fetchCanonicalPath(oContext).then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError, oExpectedError);
});
});
//*********************************************************************************************
QUnit.test("getProperty = getObject", function (assert) {
assert.strictEqual(this.oMetaModel.getProperty, this.oMetaModel.getObject);
});
//*********************************************************************************************
QUnit.test("bindProperty", function (assert) {
var oBinding,
oContext = {},
mParameters = {},
sPath = "foo";
// code under test
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
assert.ok(oBinding instanceof PropertyBinding);
assert.ok(oBinding.hasOwnProperty("vValue"));
assert.strictEqual(oBinding.getContext(), oContext);
assert.strictEqual(oBinding.getModel(), this.oMetaModel);
assert.strictEqual(oBinding.getPath(), sPath);
assert.strictEqual(oBinding.mParameters, mParameters, "mParameters available internally");
assert.strictEqual(oBinding.getValue(), undefined);
// code under test: must not call getProperty() again!
assert.strictEqual(oBinding.getExternalValue(), undefined);
// code under test
assert.throws(function () {
oBinding.setExternalValue("foo");
}, /Unsupported operation: ODataMetaPropertyBinding#setValue/);
});
//*********************************************************************************************
[undefined, {}, {$$valueAsPromise : false}].forEach(function (mParameters, i) {
QUnit.test("ODataMetaPropertyBinding#checkUpdate: " + i, function (assert) {
var oBinding,
oContext = {},
sPath = "foo",
oValue = {},
oPromise = SyncPromise.resolve(Promise.resolve(oValue));
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters))
.returns(oPromise);
this.mock(oBinding).expects("_fireChange")
.withExactArgs({reason : ChangeReason.Change});
// code under test
oBinding.checkUpdate();
assert.strictEqual(oBinding.getValue(), undefined);
oPromise.then(function () {
assert.strictEqual(oBinding.getValue(), oValue);
});
return oPromise;
});
});
//*********************************************************************************************
QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise=true, sync",
function (assert) {
var oBinding,
oContext = {},
mParameters = {$$valueAsPromise : true},
sPath = "foo",
oValue = {},
oPromise = SyncPromise.resolve(oValue);
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters))
.returns(oPromise);
this.mock(oBinding).expects("_fireChange").withExactArgs({reason : ChangeReason.Change});
// code under test
oBinding.checkUpdate();
assert.strictEqual(oBinding.getValue(), oValue, "Value sync");
return oPromise;
});
//*********************************************************************************************
QUnit.test("ODataMetaPropertyBinding#checkUpdate: no event", function (assert) {
var oBinding,
oContext = {},
mParameters = {},
sPath = "foo",
oValue = {},
oPromise = SyncPromise.resolve(Promise.resolve(oValue));
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
oBinding.vValue = oValue;
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters))
.returns(oPromise);
this.mock(oBinding).expects("_fireChange").never();
// code under test
oBinding.checkUpdate();
return oPromise;
});
//*********************************************************************************************
QUnit.test("ODataMetaPropertyBinding#checkUpdate: bForceUpdate, sChangeReason",
function (assert) {
var oBinding,
oContext = {},
mParameters = {},
sPath = "foo",
oValue = {},
oPromise = SyncPromise.resolve(Promise.resolve(oValue));
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
oBinding.vValue = oValue;
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters))
.returns(oPromise);
this.mock(oBinding).expects("_fireChange").withExactArgs({reason : "Foo"});
// code under test
oBinding.checkUpdate(true, "Foo");
return oPromise;
});
//*********************************************************************************************
QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise = true", function (assert) {
var oBinding,
oContext = {},
mParameters = {
$$valueAsPromise : true
},
sPath = "foo",
oValue = {},
oPromise = SyncPromise.resolve(Promise.resolve(oValue));
oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters);
oBinding.vValue = oValue;
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters))
.returns(oPromise);
this.mock(oBinding).expects("_fireChange")
.withExactArgs({reason : "Foo"})
.twice()
.onFirstCall().callsFake(function () {
assert.ok(oBinding.getValue().isPending(), "Value is still a pending SyncPromise");
})
.onSecondCall().callsFake(function () {
assert.strictEqual(oBinding.getValue(), oValue, "Value resolved");
});
// code under test
oBinding.checkUpdate(false, "Foo");
assert.ok(oBinding.getValue().isPending(), "Value is a pending SyncPromise");
return oBinding.getValue().then(function (oResult) {
assert.strictEqual(oResult, oValue);
assert.strictEqual(oBinding.getValue(), oValue);
});
});
//*********************************************************************************************
QUnit.test("ODataMetaPropertyBinding#setContext", function (assert) {
var oBinding,
oBindingMock,
oContext = {};
oBinding = this.oMetaModel.bindProperty("Foo", oContext);
oBindingMock = this.mock(oBinding);
oBindingMock.expects("checkUpdate").never();
// code under test
oBinding.setContext(oContext);
oBindingMock.expects("checkUpdate").withExactArgs(false, ChangeReason.Context);
// code under test
oBinding.setContext(undefined);
assert.strictEqual(oBinding.getContext(), undefined);
oBinding = this.oMetaModel.bindProperty("/Foo");
this.mock(oBinding).expects("checkUpdate").never();
// code under test
oBinding.setContext(oContext);
});
//*********************************************************************************************
["ENTRYDATE", "/EMPLOYEES/ENTRYDATE"].forEach(function (sPath) {
QUnit.test("bindContext: " + sPath, function (assert) {
var bAbsolutePath = sPath[0] === "/",
oBinding,
oBoundContext,
iChangeCount = 0,
oContext = this.oMetaModel.getMetaContext("/EMPLOYEES"),
oContextCopy = this.oMetaModel.getMetaContext("/EMPLOYEES"),
oNewContext = this.oMetaModel.getMetaContext("/T€AMS");
// without context
oBinding = this.oMetaModel.bindContext(sPath, null);
assert.ok(oBinding instanceof ContextBinding);
assert.strictEqual(oBinding.getModel(), this.oMetaModel);
assert.strictEqual(oBinding.getPath(), sPath);
assert.strictEqual(oBinding.getContext(), null);
assert.strictEqual(oBinding.isInitial(), true);
assert.strictEqual(oBinding.getBoundContext(), null);
// with context
oBinding = this.oMetaModel.bindContext(sPath, oContextCopy);
assert.ok(oBinding instanceof ContextBinding);
assert.strictEqual(oBinding.getModel(), this.oMetaModel);
assert.strictEqual(oBinding.getPath(), sPath);
assert.strictEqual(oBinding.getContext(), oContextCopy);
assert.strictEqual(oBinding.isInitial(), true);
assert.strictEqual(oBinding.getBoundContext(), null);
// setContext **********
oBinding.attachChange(function (oEvent) {
assert.strictEqual(oEvent.getId(), "change");
iChangeCount += 1;
});
// code under test
oBinding.setContext(oContext);
assert.strictEqual(iChangeCount, 0, "still initial");
assert.strictEqual(oBinding.isInitial(), true);
assert.strictEqual(oBinding.getBoundContext(), null);
assert.strictEqual(oBinding.getContext(), oContext);
// code under test
oBinding.initialize();
assert.strictEqual(iChangeCount, 1, "ManagedObject relies on 'change' event!");
assert.strictEqual(oBinding.isInitial(), false);
oBoundContext = oBinding.getBoundContext();
assert.strictEqual(oBoundContext.getModel(), this.oMetaModel);
assert.strictEqual(oBoundContext.getPath(),
bAbsolutePath ? sPath : oContext.getPath() + "/" + sPath);
// code under test - same context
oBinding.setContext(oContext);
assert.strictEqual(iChangeCount, 1, "context unchanged");
assert.strictEqual(oBinding.getBoundContext(), oBoundContext);
// code under test
oBinding.setContext(oContextCopy);
assert.strictEqual(iChangeCount, 1, "context unchanged");
assert.strictEqual(oBinding.getBoundContext(), oBoundContext);
// code under test
// Note: checks equality on resolved path, not simply object identity of context!
oBinding.setContext(oNewContext);
if (bAbsolutePath) {
assert.strictEqual(iChangeCount, 1, "context unchanged");
assert.strictEqual(oBinding.getBoundContext(), oBoundContext);
} else {
assert.strictEqual(iChangeCount, 2, "context changed");
oBoundContext = oBinding.getBoundContext();
assert.strictEqual(oBoundContext.getModel(), this.oMetaModel);
assert.strictEqual(oBoundContext.getPath(), oNewContext.getPath() + "/" + sPath);
}
// code under test
oBinding.setContext(null);
if (bAbsolutePath) {
assert.strictEqual(iChangeCount, 1, "context unchanged");
assert.strictEqual(oBinding.getBoundContext(), oBoundContext);
} else {
assert.strictEqual(iChangeCount, 3, "context changed");
assert.strictEqual(oBinding.isInitial(), false);
assert.strictEqual(oBinding.getBoundContext(), null);
}
});
});
//*********************************************************************************************
QUnit.test("bindList", function (assert) {
var oBinding,
oContext = this.oMetaModel.getContext("/EMPLOYEES"),
aFilters = [],
sPath = "@",
aSorters = [];
// avoid request to backend during initialization
this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve());
// code under test
oBinding = this.oMetaModel.bindList(sPath, oContext, aSorters, aFilters);
assert.ok(oBinding instanceof ClientListBinding);
assert.strictEqual(oBinding.getModel(), this.oMetaModel);
assert.strictEqual(oBinding.getPath(), sPath);
assert.strictEqual(oBinding.getContext(), oContext);
assert.strictEqual(oBinding.aSorters, aSorters);
assert.strictEqual(oBinding.aApplicationFilters, aFilters);
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#setContexts", function (assert) {
var oBinding,
oBindingMock,
oContext = this.oMetaModel.getContext("/EMPLOYEES"),
aContexts = [],
sPath = "path";
// avoid request to backend during initialization
this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve());
oBinding = this.oMetaModel.bindList(sPath, oContext);
oBindingMock = this.mock(oBinding);
oBindingMock.expects("updateIndices").withExactArgs();
oBindingMock.expects("applyFilter").withExactArgs();
oBindingMock.expects("applySort").withExactArgs();
oBindingMock.expects("_getLength").withExactArgs().returns(42);
// code under test
oBinding.setContexts(aContexts);
assert.strictEqual(oBinding.oList, aContexts);
assert.strictEqual(oBinding.iLength, 42);
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#update (sync)", function (assert) {
var oBinding,
oBindingMock,
oContext = this.oMetaModel.getContext("/EMPLOYEES"),
aContexts = [{}],
sPath = "path";
// avoid request to backend during initialization
this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve());
oBinding = this.oMetaModel.bindList(sPath, oContext);
oBindingMock = this.mock(oBinding);
oBindingMock.expects("fetchContexts").withExactArgs()
.returns(SyncPromise.resolve(aContexts));
oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts));
oBindingMock.expects("_fireChange").never();
// code under test
oBinding.update();
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#update (async)", function (assert) {
var oBinding,
oBindingMock,
oContext = this.oMetaModel.getContext("/EMPLOYEES"),
aContexts = [{}],
sPath = "path",
oFetchPromise = SyncPromise.resolve(Promise.resolve()).then(function () {
// This is expected to happen after the promise is resolved
oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts));
oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change});
return aContexts;
});
// avoid request to backend during initialization
this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve());
oBinding = this.oMetaModel.bindList(sPath, oContext);
oBindingMock = this.mock(oBinding);
oBindingMock.expects("fetchContexts").withExactArgs().returns(oFetchPromise);
oBindingMock.expects("setContexts").withExactArgs([]);
oBindingMock.expects("_fireChange").never(); // initially
// code under test
oBinding.update();
return oFetchPromise;
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#checkUpdate", function (assert) {
var oBinding,
oBindingMock,
oContext = this.oMetaModel.getContext("/"),
sPath = "";
// avoid request to backend during initialization
this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve());
oBinding = this.oMetaModel.bindList(sPath, oContext);
oBindingMock = this.mock(oBinding);
this.mock(oBinding).expects("update").thrice().callsFake(function () {
this.oList = [{/*a context*/}];
});
oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change});
// code under test
oBinding.checkUpdate();
// code under test: The second call must call update, but not fire an event
oBinding.checkUpdate();
oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change});
// code under test: Must fire a change event
oBinding.checkUpdate(true);
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#getContexts, getCurrentContexts", function (assert) {
var oBinding,
oMetaModel = this.oMetaModel, // instead of "that = this"
oContext = oMetaModel.getMetaContext("/EMPLOYEES"),
sPath = "";
function assertContextPaths(aContexts, aPaths) {
assert.notOk("diff" in aContexts, "extended change detection is ignored");
assert.deepEqual(aContexts.map(function (oContext) {
assert.strictEqual(oContext.getModel(), oMetaModel);
return oContext.getPath().replace("/EMPLOYEES/", "");
}), aPaths);
assert.deepEqual(oBinding.getCurrentContexts(), aContexts);
}
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(mScope));
oBinding = oMetaModel.bindList(sPath, oContext);
// code under test: should be ignored
oBinding.enableExtendedChangeDetection();
assertContextPaths(oBinding.getContexts(0, 2), ["ID", "AGE"]);
assertContextPaths(oBinding.getContexts(1, 2), ["AGE", "EMPLOYEE_2_CONTAINED_S"]);
assertContextPaths(oBinding.getContexts(), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S",
"EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]);
assertContextPaths(oBinding.getContexts(0, 10), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S",
"EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]);
oMetaModel.setSizeLimit(2);
assertContextPaths(oBinding.getContexts(), ["ID", "AGE"]);
oBinding.attachEvent("sort", function () {
assert.ok(false, "unexpected sort event");
});
oMetaModel.setSizeLimit(100);
oBinding.sort(new Sorter("@sapui.name"));
assertContextPaths(oBinding.getContexts(), ["AGE", "EMPLOYEE_2_CONTAINED_S",
"EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "ID", "SALÃRY"]);
oBinding.attachEvent("filter", function () {
assert.ok(false, "unexpected filter event");
});
oBinding.filter(new Filter("$kind", "EQ", "Property"));
assertContextPaths(oBinding.getContexts(), ["AGE", "ID", "SALÃRY"]);
});
//*********************************************************************************************
[{
contextPath : undefined,
metaPath : "@",
result : []
}, {
// <template:repeat list="{entitySet>}" ...>
// Iterate all OData path segments, i.e. (navigation) properties.
// Implicit $Type insertion happens here!
//TODO support for $BaseType
contextPath : "/EMPLOYEES",
metaPath : "",
result : [
"/EMPLOYEES/ID",
"/EMPLOYEES/AGE",
"/EMPLOYEES/EMPLOYEE_2_CONTAINED_S",
"/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS",
"/EMPLOYEES/EMPLOYEE_2_TEAM",
"/EMPLOYEES/SALÃRY"
]
}, {
// <template:repeat list="{meta>EMPLOYEES}" ...>
// same as before, but with non-empty path
contextPath : "/",
metaPath : "EMPLOYEES",
result : [
"/EMPLOYEES/ID",
"/EMPLOYEES/AGE",
"/EMPLOYEES/EMPLOYEE_2_CONTAINED_S",
"/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS",
"/EMPLOYEES/EMPLOYEE_2_TEAM",
"/EMPLOYEES/SALÃRY"
]
}, {
// <template:repeat list="{meta>/}" ...>
// Iterate all OData path segments, i.e. entity sets and imports.
// Implicit scope lookup happens here!
metaPath : "/",
result :[
"/ChangeManagerOfTeam",
"/EMPLOYEES",
"/EQUIPM€NTS",
"/GetEmployeeMaxAge",
"/Me",
"/OverloadedAction",
"/TEAMS",
"/T€AMS",
"/VoidAction"
]
}, {
// <template:repeat list="{property>@}" ...>
// Iterate all external targeting annotations.
contextPath : "/T€AMS/Team_Id",
metaPath : "@",
result : [
"/T€AMS/[email protected]",
"/T€AMS/[email protected]",
"/T€AMS/[email protected]@UI.TextArrangement"
]
}, {
// <template:repeat list="{property>@}" ...>
// Iterate all external targeting annotations.
contextPath : "/T€AMS/Name",
metaPath : "@",
result : []
}, {
// <template:repeat list="{field>./@}" ...>
// Iterate all inline annotations.
contextPath : "/T€AMS/$Type/@UI.LineItem/0",
metaPath : "./@",
result : [
"/T€AMS/$Type/@UI.LineItem/0/@UI.Importance"
]
}, {
// <template:repeat list="{at>}" ...>
// Iterate all inline annotations (edge case with empty relative path).
contextPath : "/T€AMS/$Type/@UI.LineItem/0/@",
metaPath : "",
result : [
"/T€AMS/$Type/@UI.LineItem/0/@UI.Importance"
]
}, {
contextPath : undefined,
metaPath : "/Unknown",
result : [],
warning : ["Unknown child Unknown of tea_busi.DefaultContainer", "/Unknown/"]
}].forEach(function (oFixture) {
var sPath = oFixture.contextPath
? oFixture.contextPath + "|"/*make cut more visible*/ + oFixture.metaPath
: oFixture.metaPath;
QUnit.test("ODataMetaListBinding#fetchContexts (sync): " + sPath, function (assert) {
var oBinding,
oMetaModel = this.oMetaModel, // instead of "that = this"
oContext = oFixture.contextPath && oMetaModel.getContext(oFixture.contextPath);
if (oFixture.warning) {
// Note that _getContexts is called twice in this test: once from bindList via the
// constructor, once directly from the test
this.oLogMock.expects("isLoggable").twice()
.withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel)
.returns(true);
this.oLogMock.expects("warning").twice()
.withExactArgs(oFixture.warning[0], oFixture.warning[1], sODataMetaModel);
}
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(0)
.returns(SyncPromise.resolve(mScope));
oBinding = this.oMetaModel.bindList(oFixture.metaPath, oContext);
// code under test
assert.deepEqual(oBinding.fetchContexts().getResult().map(function (oContext) {
assert.strictEqual(oContext.getModel(), oMetaModel);
return oContext.getPath();
}), oFixture.result);
});
});
//*********************************************************************************************
QUnit.test("ODataMetaListBinding#fetchContexts (async)", function (assert) {
var oBinding,
oMetaModel = this.oMetaModel,
sPath = "/foo";
// Note that fetchObject is called twice in this test: once from bindList via the
// constructor, once from fetchContexts
this.oMetaModelMock.expects("fetchObject").twice()
.withExactArgs(sPath + "/")
.returns(SyncPromise.resolve(Promise.resolve({bar: "", baz: ""})));
oBinding = this.oMetaModel.bindList(sPath);
return oBinding.fetchContexts().then(function (oResult) {
assert.deepEqual(oResult.map(function (oContext) {
assert.strictEqual(oContext.getModel(), oMetaModel);
return oContext.getPath();
}), ["/foo/bar", "/foo/baz"]);
});
});
//TODO iterate mix of inline and external targeting annotations
//TODO iterate annotations like "foo@..." for our special cases, e.g. annotations of annotation
//*********************************************************************************************
QUnit.test("events", function (assert) {
assert.throws(function () {
this.oMetaModel.attachParseError();
}, new Error("Unsupported event 'parseError': v4.ODataMetaModel#attachEvent"));
assert.throws(function () {
this.oMetaModel.attachRequestCompleted();
}, new Error("Unsupported event 'requestCompleted': v4.ODataMetaModel#attachEvent"));
assert.throws(function () {
this.oMetaModel.attachRequestFailed();
}, new Error("Unsupported event 'requestFailed': v4.ODataMetaModel#attachEvent"));
assert.throws(function () {
this.oMetaModel.attachRequestSent();
}, new Error("Unsupported event 'requestSent': v4.ODataMetaModel#attachEvent"));
});
//*********************************************************************************************
QUnit.test("validate: mSchema2MetadataUrl", function (assert) {
var mScope = {
"$Version" : "4.0",
"$Reference" : {
"/A/$metadata" : {
"$Include" : [
"A.", "A.A."
]
},
"/B/$metadata" : {
"$Include" : [
"B.", "B.B."
]
},
"/C/$metadata" : {
"$Include" : ["C."]
},
"../../../../default/iwbep/tea_busi_product/0001/$metadata" : {
"$Include" : [
"tea_busi_product."
]
}
}
},
sUrl = "/~/$metadata";
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {});
// simulate a previous reference to a schema with the _same_ reference URI --> allowed!
this.oMetaModel.mSchema2MetadataUrl["A."] = {"/A/$metadata" : false};
// simulate a previous reference to a schema with the _different_ reference URI
// --> allowed as long as the document is not yet read (and will never be read)
this.oMetaModel.mSchema2MetadataUrl["B.B."] = {"/B/V2/$metadata" : false};
// simulate a previous reference to a schema with the _same_ reference URI, already loaded
this.oMetaModel.mSchema2MetadataUrl["C."] = {"/C/$metadata" : true};
// code under test
assert.strictEqual(this.oMetaModel.validate(sUrl, mScope), mScope);
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {
"A." : {"/A/$metadata" : false},
"A.A." : {"/A/$metadata" : false},
"B." : {"/B/$metadata" : false},
"B.B." : {
"/B/$metadata" : false,
"/B/V2/$metadata" : false
},
"C." : {"/C/$metadata" : true},
"tea_busi_product." : {"/a/default/iwbep/tea_busi_product/0001/$metadata" : false}
});
});
//*********************************************************************************************
QUnit.test("getLastModified", function (assert) {
var mEmptyScope = {
"$Version" : "4.0"
},
mNewScope = {
"$Version" : "4.0",
"$Date" : "Tue, 18 Apr 2017 14:40:29 GMT"
},
iNow = Date.now(),
mOldScope = {
"$Version" : "4.0",
"$Date" : "Tue, 18 Apr 2017 14:40:29 GMT", // $LastModified wins!
"$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT"
},
mOldScopeClone = clone(mOldScope),
sUrl = "/~/$metadata"; // Note: in real life, each URL is read at most once!
// code under test (together with c'tor)
assert.strictEqual(this.oMetaModel.getLastModified().getTime(), 0, "initial value");
// code under test
assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScope), mOldScope);
assert.strictEqual(this.oMetaModel.getLastModified().toISOString(),
"2017-04-07T11:21:50.000Z", "old $LastModified is used");
assert.notOk("$LastModified" in mOldScope);
// code under test
assert.strictEqual(this.oMetaModel.validate(sUrl, mNewScope), mNewScope);
assert.strictEqual(this.oMetaModel.getLastModified().toISOString(),
"2017-04-18T14:40:29.000Z", "new $Date is used");
assert.notOk("$Date" in mNewScope);
// code under test
assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScopeClone), mOldScopeClone);
assert.strictEqual(this.oMetaModel.getLastModified().toISOString(),
"2017-04-18T14:40:29.000Z", "new $Date wins, old $LastModified is ignored");
assert.notOk("$LastModified" in mOldScopeClone);
// code under test
assert.strictEqual(this.oMetaModel.validate(sUrl, mEmptyScope), mEmptyScope);
assert.ok(this.oMetaModel.getLastModified().getTime() >= iNow,
"missing $Date/$LastModified is like 'now': " + this.oMetaModel.getLastModified());
});
//*********************************************************************************************
QUnit.test("getETags", function (assert) {
var sETag = 'W/"..."',
mETags,
that = this;
function codeUnderTest(sUrl, mScope) {
// code under test
assert.strictEqual(that.oMetaModel.validate(sUrl, mScope), mScope);
assert.notOk("$ETag" in mScope);
assert.notOk("$LastModified" in mScope);
}
// code under test (together with c'tor)
assert.deepEqual(this.oMetaModel.getETags(), {}, "initial value");
codeUnderTest("/~/A", {
"$Version" : "4.0",
"$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT"
});
codeUnderTest("/~/B", {
"$Version" : "4.0",
"$LastModified" : "Tue, 18 Apr 2017 14:40:29 GMT"
});
codeUnderTest("/~/C", {
"$Version" : "4.0"
});
codeUnderTest("/~/D", {
"$Version" : "4.0",
"$ETag" : sETag
});
// code under test
mETags = this.oMetaModel.getETags();
assert.deepEqual(mETags, {
"/~/A" : new Date(Date.UTC(2017, 3, 7, 11, 21, 50)),
"/~/B" : new Date(Date.UTC(2017, 3, 18, 14, 40, 29)),
"/~/C" : null,
"/~/D" : sETag // wins over null!
});
});
//*********************************************************************************************
[{
message : "Unsupported IncludeAnnotations",
scope : {
"$Version" : "4.0",
"$Reference" : {
"/A/$metadata" : {
"$Include" : [
"A."
]
},
"/B/$metadata" : {
"$IncludeAnnotations" : [{
"$TermNamespace" : "com.sap.vocabularies.Common.v1"
}]
}
}
}
}, {
message : "A schema cannot span more than one document: tea_busi."
+ " - is both included and defined",
scope : {
"$Version" : "4.0",
"$Reference" : {
"/B/$metadata" : {
"$Include" : [
"foo.", "tea_busi."
]
}
},
"tea_busi." : {
"$kind" : "Schema"
}
}
}, {
message : "A schema cannot span more than one document: existing."
+ " - expected reference URI /B/v1/$metadata but instead saw /B/v2/$metadata",
scope : {
"$Version" : "4.0",
"$Reference" : {
"/A/$metadata" : {
"$Include" : [
"foo.", "bar."
]
},
"/B/v2/$metadata" : {
"$Include" : [
"baz.", "existing."
]
}
}
}
}].forEach(function (oFixture) {
[false, true].forEach(function (bSupportReferences) {
var sMessage = oFixture.message,
sTitle = "validate: " + sMessage + ", supportReferences: " + bSupportReferences;
QUnit.test(sTitle, function (assert) {
var sUrl = "/~/$metadata",
that = this;
function codeUnderTest() {
var oResult = that.oMetaModel.validate(sUrl, oFixture.scope);
assert.strictEqual(oResult, oFixture.scope);
}
this.oMetaModel.bSupportReferences = bSupportReferences;
// simulate a schema that has been loaded or referenced before
this.oMetaModel.mSchema2MetadataUrl = {
// simulate schema that is already read
"existing." : {"/B/v1/$metadata" : true}
};
if (bSupportReferences) {
this.oLogMock.expects("error")
.withExactArgs(sMessage, sUrl, sODataMetaModel);
}
if (bSupportReferences) {
assert.throws(codeUnderTest, new Error(sUrl + ": " + sMessage));
} else {
codeUnderTest();
}
});
});
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations: without annotation files", function (assert) {
// Note: target elements have been omitted for brevity
var mExpectedAnnotations = {
"same.target" : {
"@Common.Description" : "",
"@Common.Label" : {
"old" : true // Note: no aggregation of properties here!
},
"@Common.Text" : ""
},
"another.target" : {
"@Common.Label" : ""
}
},
mScope = {
"A." : {
"$kind" : "Schema",
"$Annotations" : {
"same.target" : {
"@Common.Label" : {
"old" : true
},
"@Common.Text" : ""
}
}
},
"B." : {
"$kind" : "Schema",
"$Annotations" : {
"same.target" : {
"@Common.Description" : "",
"@Common.Label" : { // illegal overwrite within $metadata, ignored!
"new" : true
}
},
"another.target" : {
"@Common.Label" : ""
}
}
},
"B.B" : {}
};
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, mScope);
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {});
// code under test
this.oMetaModel._mergeAnnotations(mScope, []);
assert.deepEqual(mScope.$Annotations, mExpectedAnnotations,
"$Annotations have been shifted and merged from schemas to root");
assert.notOk("$Annotations" in mScope["A."], "$Annotations removed from schema");
assert.notOk("$Annotations" in mScope["B."], "$Annotations removed from schema");
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {
"A." : {"/a/b/c/d/e/$metadata" : false},
"B." : {"/a/b/c/d/e/$metadata" : false}
});
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations: validation failure for $metadata", function (assert) {
var oError = new Error(),
mScope = {};
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, mScope)
.throws(oError);
assert.throws(function () {
// code under test
this.oMetaModel._mergeAnnotations(mScope, []);
}, oError);
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations: validation failure in annotation file", function (assert) {
var oError = new Error(),
mScope = {},
mAnnotationScope1 = {},
mAnnotationScope2 = {};
this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"];
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, mScope);
this.oMetaModelMock.expects("validate")
.withExactArgs("n/a", mAnnotationScope1);
this.oMetaModelMock.expects("validate")
.withExactArgs("/my/annotation.xml", mAnnotationScope2)
.throws(oError);
assert.throws(function () {
// code under test
this.oMetaModel._mergeAnnotations(mScope, [mAnnotationScope1, mAnnotationScope2]);
}, oError);
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations: with annotation files (legacy)", function (assert) {
var sNamespace = "com.sap.gateway.default.iwbep.tea_busi.v0001.",
sWorker = sNamespace + "Worker/",
sBasicSalaryCurr = sWorker + "SALARY/BASIC_SALARY_CURR",
sBasicSalaryCurr2 = "another.schema.2.SALARY/BASIC_SALARY_CURR",
sBonusCurr = sWorker + "SALARY/BONUS_CURR",
sCommonLabel = "@com.sap.vocabularies.Common.v1.Label",
sCommonQuickInfo = "@com.sap.vocabularies.Common.v1.QuickInfo",
sCommonText = "@com.sap.vocabularies.Common.v1.Text",
sBaseUrl = "/" + window.location.pathname.split("/")[1]
+ "/test-resources/sap/ui/core/qunit/odata/v4/data/",
oMetadata = jQuery.sap.sjax({url : sBaseUrl + "metadata.json", dataType : 'json'}).data,
oExpectedResult = clone(oMetadata),
oAnnotation = jQuery.sap.sjax({
url : sBaseUrl + "legacy_annotations.json",
dataType : 'json'
}).data,
oAnnotationCopy = clone(oAnnotation);
// the examples are unrealistic and only need to work in 'legacy mode'
this.oMetaModel.bSupportReferences = false;
this.oMetaModel.aAnnotationUris = ["n/a"];
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, oMetadata);
this.oMetaModelMock.expects("validate")
.withExactArgs("n/a", oAnnotation);
oExpectedResult.$Annotations = oMetadata[sNamespace].$Annotations;
delete oExpectedResult[sNamespace].$Annotations;
// all entries with $kind are merged
oExpectedResult["my.schema.2.FuGetEmployeeMaxAge"] =
oAnnotationCopy["my.schema.2.FuGetEmployeeMaxAge"];
oExpectedResult["my.schema.2.Entity"] =
oAnnotationCopy["my.schema.2.Entity"];
oExpectedResult["my.schema.2.DefaultContainer"] =
oAnnotationCopy["my.schema.2.DefaultContainer"];
oExpectedResult["my.schema.2."] =
oAnnotationCopy["my.schema.2."];
oExpectedResult["another.schema.2."] =
oAnnotationCopy["another.schema.2."];
// update annotations
oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonLabel]
= oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonLabel];
oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonQuickInfo]
= oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonQuickInfo];
oExpectedResult.$Annotations[sBonusCurr][sCommonText]
= oAnnotationCopy["my.schema.2."].$Annotations[sBonusCurr][sCommonText];
oExpectedResult.$Annotations[sBasicSalaryCurr2]
= oAnnotationCopy["another.schema.2."].$Annotations[sBasicSalaryCurr2];
delete oExpectedResult["my.schema.2."].$Annotations;
delete oExpectedResult["another.schema.2."].$Annotations;
// code under test
this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation]);
assert.deepEqual(oMetadata, oExpectedResult, "merged metadata as expected");
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations: with annotation files", function (assert) {
var mScope0 = {
"$EntityContainer" : "tea_busi.DefaultContainer",
"$Reference" : {
"../../../../default/iwbep/tea_busi_foo/0001/$metadata" : {
"$Include" : [
"tea_busi_foo.v0001."
]
}
},
"$Version" : "4.0",
"tea_busi." : {
"$kind" : "Schema",
"$Annotations" : {
"tea_busi.DefaultContainer" : {
"@A" : "from $metadata",
"@B" : "from $metadata",
"@C" : "from $metadata"
},
"tea_busi.TEAM" : {
"@D" : ["from $metadata"],
"@E" : ["from $metadata"],
"@F" : ["from $metadata"]
}
}
},
"tea_busi.DefaultContainer" : {
"$kind" : "EntityContainer"
},
"tea_busi.EQUIPMENT" : {
"$kind" : "EntityType"
},
"tea_busi.TEAM" : {
"$kind" : "EntityType"
},
"tea_busi.Worker" : {
"$kind" : "EntityType"
}
},
mScope1 = {
"$Version" : "4.0",
"tea_busi_foo.v0001." : {
"$kind" : "Schema",
"$Annotations" : {
"tea_busi_foo.v0001.Product/Name" : {
"@Common.Label" : "from $metadata"
}
}
},
"tea_busi_foo.v0001.Product" : {
"$kind" : "EntityType",
"Name" : {
"$kind" : "Property",
"$Type" : "Edm.String"
}
}
},
mAnnotationScope1 = {
"$Version" : "4.0",
"foo." : {
"$kind" : "Schema",
"$Annotations" : {
"tea_busi.DefaultContainer" : {
"@B" : "from annotation #1",
"@C" : "from annotation #1"
},
"tea_busi.TEAM" : {
"@E" : ["from annotation #1"],
"@F" : ["from annotation #1"]
},
"tea_busi.Worker" : {
"@From.Annotation" : {
"$Type" : "some.Record",
"Label" : "from annotation #1"
},
"@From.Annotation1" : "from annotation #1"
}
}
}
},
mAnnotationScope2 = {
"$Version" : "4.0",
"bar." : {
"$kind" : "Schema",
"$Annotations" : {
"tea_busi.DefaultContainer" : {
"@C" : "from annotation #2"
},
"tea_busi.EQUIPMENT" : {
"@From.Annotation2" : "from annotation #2"
},
"tea_busi.TEAM" : {
"@F" : ["from annotation #2"]
},
"tea_busi.Worker" : {
"@From.Annotation" : {
"$Type" : "some.Record",
"Value" : "from annotation #2"
}
},
"tea_busi_foo.v0001.Product/Name" : {
"@Common.Label" : "from annotation #2"
}
}
}
},
mExpectedScope = {
"$Annotations" : {
"tea_busi.DefaultContainer" : {
"@A" : "from $metadata",
"@B" : "from annotation #1",
"@C" : "from annotation #2"
},
"tea_busi.EQUIPMENT" : {
"@From.Annotation2" : "from annotation #2"
},
"tea_busi.TEAM" : { // Note: no aggregation of array elements here!
"@D" : ["from $metadata"],
"@E" : ["from annotation #1"],
"@F" : ["from annotation #2"]
},
"tea_busi.Worker" : {
"@From.Annotation" : {
"$Type" : "some.Record",
// Note: no "Label" here!
"Value" : "from annotation #2"
},
"@From.Annotation1" : "from annotation #1"
},
"tea_busi_foo.v0001.Product/Name" : {
"@Common.Label" : "from annotation #2"
}
},
"$EntityContainer" : "tea_busi.DefaultContainer",
"$Reference" : {
"../../../../default/iwbep/tea_busi_foo/0001/$metadata" : {
"$Include" : [
"tea_busi_foo.v0001."
]
}
},
"$Version" : "4.0",
"bar." : {
"$kind" : "Schema"
},
"foo." : {
"$kind" : "Schema"
},
"tea_busi." : {
"$kind" : "Schema"
},
"tea_busi.DefaultContainer" : {
"$kind" : "EntityContainer"
},
"tea_busi.EQUIPMENT" : {
"$kind" : "EntityType"
},
"tea_busi.TEAM" : {
"$kind" : "EntityType"
},
"tea_busi.Worker" : {
"$kind" : "EntityType"
}
};
this.oMetaModel.aAnnotationUris = ["/URI/1", "/URI/2"];
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, mScope0);
this.oMetaModelMock.expects("validate")
.withExactArgs("/URI/1", mAnnotationScope1);
this.oMetaModelMock.expects("validate")
.withExactArgs("/URI/2", mAnnotationScope2);
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {});
// code under test
this.oMetaModel._mergeAnnotations(mScope0, [mAnnotationScope1, mAnnotationScope2]);
assert.deepEqual(mScope0, mExpectedScope);
assert.strictEqual(mScope0["tea_busi."].$Annotations, undefined);
assert.strictEqual(mAnnotationScope1["foo."].$Annotations, undefined);
assert.strictEqual(mAnnotationScope2["bar."].$Annotations, undefined);
assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {
"bar." : {"/URI/2" : false},
"foo." : {"/URI/1" : false},
"tea_busi." : {"/a/b/c/d/e/$metadata" : false}
});
// prepare to load "cross-service reference"
// simulate #validate of mScope0
this.oMetaModel.mSchema2MetadataUrl["tea_busi_foo.v0001."]
= {"/a/default/iwbep/tea_busi_foo/0001/$metadata" : false};
this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(mScope0));
this.mock(this.oMetaModel.oRequestor).expects("read")
.withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata")
.returns(Promise.resolve(mScope1));
this.oMetaModelMock.expects("validate")
.withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata", mScope1)
.returns(mScope1);
// code under test
return this.oMetaModel.fetchObject("/tea_busi_foo.v0001.Product/[email protected]")
.then(function (sLabel) {
assert.strictEqual(sLabel, "from annotation #2", "not overwritten by $metadata");
});
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations - error (legacy)", function (assert) {
var oAnnotation1 = {
"tea_busi.NewType1" : {
"$kind" : "EntityType"
}
},
oAnnotation2 = {
"tea_busi.NewType2" : {
"$kind" : "EntityType"
},
"tea_busi.ExistingType" : {
"$kind" : "EntityType"
}
},
sMessage = "A schema cannot span more than one document: tea_busi.ExistingType",
oMetadata = {
"tea_busi.ExistingType" : {
"$kind" : "EntityType"
}
};
this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"];
// legacy behavior: $Version is not checked, tea_busi.NewType2 is allowed
this.oMetaModel.bSupportReferences = false;
this.oMetaModelMock.expects("validate")
.withExactArgs(this.oMetaModel.sUrl, oMetadata);
this.oMetaModelMock.expects("validate")
.withExactArgs("n/a", oAnnotation1);
this.oMetaModelMock.expects("validate")
.withExactArgs("/my/annotation.xml", oAnnotation2);
this.oLogMock.expects("error")
.withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel);
assert.throws(function () {
// code under test
this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation1, oAnnotation2]);
}, new Error("/my/annotation.xml: " + sMessage));
});
//*********************************************************************************************
QUnit.test("_mergeAnnotations - a schema cannot span more than one document",
function (assert) {
var oAnnotation = {
"$Version" : "4.0",
"tea_busi." : {
"$kind" : "Schema"
}
},
sMessage = "A schema cannot span more than one document: tea_busi.",
oMetadata = {
"$Version" : "4.0",
"tea_busi." : {
"$kind" : "Schema"
}
};
this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"];
this.oLogMock.expects("error")
.withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel);
assert.throws(function () {
// code under test
this.oMetaModel._mergeAnnotations(oMetadata, [{"$Version" : "4.0"}, oAnnotation]);
}, new Error("/my/annotation.xml: " + sMessage));
}
);
//*********************************************************************************************
QUnit.test("getOrCreateValueListModel", function (assert) {
var oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModel = oModel.getMetaModel(),
oValueListModel;
oModel.oRequestor.mHeaders["X-CSRF-Token"] = "xyz";
// code under test
oValueListModel = oMetaModel.getOrCreateValueListModel("../ValueListService/$metadata");
assert.ok(oValueListModel instanceof ODataModel);
assert.strictEqual(oValueListModel.sServiceUrl, "/Foo/ValueListService/");
assert.strictEqual(oValueListModel.getDefaultBindingMode(), BindingMode.OneWay);
assert.strictEqual(oValueListModel.sOperationMode, OperationMode.Server);
assert.strictEqual(oValueListModel.oRequestor.mHeaders["X-CSRF-Token"], "xyz");
// code under test
assert.strictEqual(oMetaModel.getOrCreateValueListModel("/Foo/ValueListService/$metadata"),
oValueListModel);
// code under test
assert.strictEqual(oValueListModel.getMetaModel()
.getOrCreateValueListModel("/Foo/ValueListService/$metadata"),
oValueListModel);
// code under test
assert.strictEqual(oValueListModel.getMetaModel().getOrCreateValueListModel("$metadata"),
oValueListModel);
oModel = new ODataModel({
serviceUrl : "/Foo/DataService2/",
synchronizationMode : "None"
});
// code under test - even a totally different model gets the very same value list model
assert.strictEqual(oModel.getMetaModel()
.getOrCreateValueListModel("../ValueListService/$metadata"),
oValueListModel);
});
//*********************************************************************************************
QUnit.test("getOrCreateValueListModel: relative data service URL", function (assert) {
var sRelativePath = "../../../DataService/",
sAbsolutePath =
new URI(sRelativePath).absoluteTo(document.baseURI).pathname().toString(),
oModel = new ODataModel({
serviceUrl : sRelativePath,
synchronizationMode : "None"
}),
oValueListModel;
// code under test
oValueListModel = oModel.getMetaModel()
.getOrCreateValueListModel("../ValueListService/$metadata");
assert.strictEqual(oValueListModel.sServiceUrl,
new URI("../ValueListService/").absoluteTo(sAbsolutePath).toString());
});
//*********************************************************************************************
QUnit.test("fetchValueListType: unknown property", function (assert) {
var oContext = {},
sPath = "/Products('HT-1000')/Foo";
this.oMetaModelMock.expects("getMetaContext").withExactArgs(sPath).returns(oContext);
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(undefined, sinon.match.same(oContext))
.returns(Promise.resolve());
// code under test
return this.oMetaModel.fetchValueListType(sPath).then(function () {
assert.ok(false);
}, function (oError) {
assert.ok(oError.message, "No metadata for " + sPath);
});
});
//*********************************************************************************************
[{
mAnnotations : {
"@some.other.Annotation" : true
},
sValueListType : ValueListType.None
}, {
mAnnotations : {
"@com.sap.vocabularies.Common.v1.ValueListReferences" : [],
"@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true
},
sValueListType : ValueListType.Fixed
}, {
mAnnotations : {
"@com.sap.vocabularies.Common.v1.ValueListReferences" : []
},
sValueListType : ValueListType.Standard
}, {
mAnnotations : {
"@com.sap.vocabularies.Common.v1.ValueListReferences#foo" : [],
"@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false
},
sValueListType : ValueListType.Standard
}, {
mAnnotations : {
"@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {},
"@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false
},
sValueListType : ValueListType.Standard
}].forEach(function (oFixture) {
QUnit.test("fetchValueListType: " + oFixture.sValueListType, function (assert) {
var oContext = {},
sPropertyPath = "/ProductList('HT-1000')/Status";
this.oMetaModelMock.expects("getMetaContext")
.withExactArgs(sPropertyPath).returns(oContext);
this.oMetaModelMock.expects("fetchObject")
.withExactArgs(undefined, sinon.match.same(oContext))
.returns(SyncPromise.resolve({}));
this.oMetaModelMock.expects("getObject")
.withExactArgs("@", sinon.match.same(oContext))
.returns(oFixture.mAnnotations);
// code under test
this.oMetaModel.fetchValueListType(sPropertyPath).then(function (sValueListType) {
assert.strictEqual(sValueListType, oFixture.sValueListType);
});
});
});
//*********************************************************************************************
QUnit.test("getValueListType, requestValueListType", function (assert) {
return checkGetAndRequest(this, assert, "fetchValueListType", ["sPath"], true);
});
//*********************************************************************************************
QUnit.test("fetchValueListMappings: success", function (assert) {
var oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
oDefaultMapping = {
"CollectionPath" : "VH_Category1Set",
"Parameters" : [{"p1" : "foo"}]
},
oFooMapping = {
"CollectionPath" : "VH_Category2Set",
"Parameters" : [{"p2" : "bar"}]
},
oProperty = {},
oValueListMetadata = {
"$Annotations" : {
"zui5_epm_sample.Product/Category" : {
"@com.sap.vocabularies.Common.v1.ValueListMapping" : oDefaultMapping,
"@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oFooMapping
},
"some.other.Target" : {}
}
},
oValueListModel = {
getMetaModel : function () {
return {
fetchEntityContainer : function () {
return Promise.resolve(oValueListMetadata);
}
};
}
};
oMetaModelMock.expects("getObject")
.withExactArgs("/zui5_epm_sample.Product/Category")
.returns(oProperty);
// code under test
return oModel.getMetaModel()
.fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty)
.then(function (oValueListMappings) {
assert.deepEqual(oValueListMappings, {
"" : oDefaultMapping,
"foo" : oFooMapping
});
});
});
//*********************************************************************************************
[{
annotations : {
"zui5_epm_sample.Product/CurrencyCode/type.cast" : true
},
error : "Unexpected annotation target 'zui5_epm_sample.Product/CurrencyCode/type.cast' " +
"with namespace of data service in /Foo/ValueListService"
}, {
annotations : {
"zui5_epm_sample.Product/Category" : {
"@some.other.Term" : true
}
},
error : "Unexpected annotation 'some.other.Term' for target "
+ "'zui5_epm_sample.Product/Category' with namespace of data service "
+ "in /Foo/ValueListService"
}, {
annotations : {},
error : "No annotation 'com.sap.vocabularies.Common.v1.ValueListMapping' "
+ "in /Foo/ValueListService"
}].forEach(function (oFixture) {
QUnit.test("fetchValueListMappings: " + oFixture.error, function (assert) {
var oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModel = oModel.getMetaModel(),
oMetaModelMock = this.mock(oMetaModel),
oProperty = {},
oValueListMetadata = {
"$Annotations" : oFixture.annotations
},
oValueListModel = {
getMetaModel : function () {
return {
fetchEntityContainer : function () {
return Promise.resolve(oValueListMetadata);
}
};
},
sServiceUrl : "/Foo/ValueListService"
},
sTarget = Object.keys(oFixture.annotations)[0];
oMetaModelMock.expects("getObject").atLeast(0)
.withExactArgs("/" + sTarget)
.returns(sTarget === "zui5_epm_sample.Product/Category" ? oProperty : undefined);
// code under test
return oMetaModel
.fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty)
.then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError.message, oFixture.error);
});
});
});
//*********************************************************************************************
QUnit.test("fetchValueListMappings: value list model is data model", function (assert) {
var oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
oMapping = {
"CollectionPath" : "VH_CountrySet",
"Parameters" : [{"p1" : "foo"}]
},
oProperty = {
"$kind" : "Property"
},
oMetadata = {
"$EntityContainer" : "value_list.Container",
"value_list.VH_BusinessPartner" : {
"$kind" : "Entity",
"Country" : oProperty
},
"$Annotations" : {
// value list on value list
"value_list.VH_BusinessPartner/Country" : {
"@com.sap.vocabularies.Common.v1.Label" : "Country",
"@com.sap.vocabularies.Common.v1.ValueListMapping" : oMapping
},
"value_list.VH_BusinessPartner/Foo" : {/* some other field w/ value list*/}
}
};
oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(oMetadata));
// code under test
return oModel.getMetaModel()
.fetchValueListMappings(oModel, "value_list", oProperty)
.then(function (oValueListMappings) {
assert.deepEqual(oValueListMappings, {
"" : oMapping
});
});
});
//*********************************************************************************************
[{
sPropertyPath : "/EMPLOYEES/unknown",
sExpectedError : "No metadata"
}, {
sPropertyPath : "/EMPLOYEES/AGE",
sExpectedError : "No annotation 'com.sap.vocabularies.Common.v1.ValueListReferences'"
}].forEach(function (oFixture) {
QUnit.test("requestValueListInfo: " + oFixture.sExpectedError, function (assert) {
var oModel = new ODataModel({
serviceUrl : "/~/",
synchronizationMode : "None"
});
this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(mScope));
// code under test
return oModel.getMetaModel().requestValueListInfo(oFixture.sPropertyPath)
.then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError.message,
oFixture.sExpectedError + " for " + oFixture.sPropertyPath);
});
});
});
//*********************************************************************************************
[false, true].forEach(function (bDuplicate) {
QUnit.test("requestValueListInfo: duplicate=" + bDuplicate, function (assert) {
var sMappingUrl1 = "../ValueListService1/$metadata",
sMappingUrl2 = "../ValueListService2/$metadata",
sMappingUrlBar = "../ValueListServiceBar/$metadata",
oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
oProperty = {
"$kind" : "Property"
},
sPropertyPath = "/ProductList('HT-1000')/Category",
oMetadata = {
"$EntityContainer" : "zui5_epm_sample.Container",
"zui5_epm_sample.Product" : {
"$kind" : "Entity",
"Category" : oProperty
},
"$Annotations" : {
"zui5_epm_sample.Product/Category" : {
"@com.sap.vocabularies.Common.v1.ValueListReferences" :
[sMappingUrl1, sMappingUrl2],
"@com.sap.vocabularies.Common.v1.ValueListReferences#bar" :
[sMappingUrlBar],
"@com.sap.vocabularies.Common.v1.ValueListReferences#[email protected]"
: true,
"@some.other.Annotation" : true
}
},
"zui5_epm_sample.Container" : {
"ProductList" : {
"$kind" : "EntitySet",
"$Type" : "zui5_epm_sample.Product"
}
}
},
oValueListMappings1 = {
"" : {CollectionPath : ""}
},
oValueListMappings2 = {
"foo" : {CollectionPath : "foo"}
},
oValueListMappingsBar = {},
oValueListModel1 = {sServiceUrl : sMappingUrl1},
oValueListModel2 = {sServiceUrl : sMappingUrl2},
oValueListModelBar = {sServiceUrl : sMappingUrlBar};
oValueListMappingsBar[bDuplicate ? "" : "bar"] = {CollectionPath : "bar"};
oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(oMetadata));
oMetaModelMock.expects("getOrCreateValueListModel")
.withExactArgs(sMappingUrl1)
.returns(oValueListModel1);
oMetaModelMock.expects("fetchValueListMappings")
.withExactArgs(sinon.match.same(oValueListModel1), "zui5_epm_sample",
sinon.match.same(oProperty))
.returns(Promise.resolve(oValueListMappings1));
oMetaModelMock.expects("getOrCreateValueListModel")
.withExactArgs(sMappingUrl2)
.returns(oValueListModel2);
oMetaModelMock.expects("fetchValueListMappings")
.withExactArgs(sinon.match.same(oValueListModel2), "zui5_epm_sample",
sinon.match.same(oProperty))
.returns(Promise.resolve(oValueListMappings2));
oMetaModelMock.expects("getOrCreateValueListModel")
.withExactArgs(sMappingUrlBar)
.returns(oValueListModelBar);
oMetaModelMock.expects("fetchValueListMappings")
.withExactArgs(sinon.match.same(oValueListModelBar), "zui5_epm_sample",
sinon.match.same(oProperty))
.returns(SyncPromise.resolve(oValueListMappingsBar));
// code under test
return oModel.getMetaModel()
.requestValueListInfo(sPropertyPath)
.then(function (oResult) {
assert.ok(!bDuplicate);
assert.deepEqual(oResult, {
"" : {
$model : oValueListModel1,
CollectionPath : ""
},
"foo" : {
$model : oValueListModel2,
CollectionPath : "foo"
},
"bar" : {
$model : oValueListModelBar,
CollectionPath : "bar"
}
});
}, function (oError) {
assert.ok(bDuplicate);
assert.strictEqual(oError.message,
"Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with "
+ "identical qualifier '' for property " + sPropertyPath
+ " in " + sMappingUrlBar + " and " + sMappingUrl1);
});
});
});<|fim▁hole|>
//*********************************************************************************************
QUnit.test("requestValueListInfo: same model w/o reference", function (assert) {
var oProperty = {
"$kind" : "Property"
},
oValueListMappingFoo = {CollectionPath : "foo"},
oMetadata = {
"$EntityContainer" : "value_list.Container",
"value_list.Container" : {
"$kind" : "EntityContainer",
"VH_BusinessPartnerSet" : {
"$kind" : "EntitySet",
"$Type" : "value_list.VH_BusinessPartner"
}
},
"value_list.VH_BusinessPartner" : {
"$kind" : "Entity",
"Country" : oProperty
},
"$Annotations" : {
"value_list.VH_BusinessPartner/Country" : {
"@com.sap.vocabularies.Common.v1.ValueListMapping#foo" :
oValueListMappingFoo,
"@com.sap.vocabularies.Common.v1.ValueListMapping#bar" :
{CollectionPath : "bar"}
}
}
},
oModel = new ODataModel({
serviceUrl : "/Foo/ValueListService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
sPropertyPath = "/VH_BusinessPartnerSet('0100000000')/Country";
oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(oMetadata));
// code under test
return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) {
assert.strictEqual(oResult.foo.$model, oModel);
assert.strictEqual(oResult.bar.$model, oModel);
assert.notOk("$model" in oValueListMappingFoo);
delete oResult.foo.$model;
delete oResult.bar.$model;
assert.deepEqual(oResult, {
"foo" : {CollectionPath : "foo"},
"bar" : {CollectionPath : "bar"}
});
});
});
//*********************************************************************************************
[false, true].forEach(function (bDuplicate) {
var sTitle = "requestValueListInfo: fixed values: duplicate=" + bDuplicate;
QUnit.test(sTitle, function (assert) {
var oValueListMapping = {CollectionPath : "foo"},
oAnnotations = {
"@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true,
"@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oValueListMapping
},
oMetadata = {
"$EntityContainer" : "value_list.Container",
"value_list.Container" : {
"$kind" : "EntityContainer",
"VH_BusinessPartnerSet" : {
"$kind" : "EntitySet",
"$Type" : "value_list.VH_BusinessPartner"
}
},
"value_list.VH_BusinessPartner" : {
"$kind" : "Entity",
"Country" : {}
},
"$Annotations" : {
"value_list.VH_BusinessPartner/Country" : oAnnotations
}
},
oModel = new ODataModel({
serviceUrl : "/Foo/ValueListService/",
synchronizationMode : "None"
}),
sPropertyPath = "/VH_BusinessPartnerSet('42')/Country";
if (bDuplicate) {
oAnnotations["@com.sap.vocabularies.Common.v1.ValueListMapping#bar"] = {};
}
this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(oMetadata));
// code under test
return oModel.getMetaModel().requestValueListInfo(sPropertyPath)
.then(function (oResult) {
assert.notOk(bDuplicate);
assert.strictEqual(oResult[""].$model, oModel);
delete oResult[""].$model;
assert.deepEqual(oResult, {
"" : {CollectionPath : "foo"}
});
}, function (oError) {
assert.ok(bDuplicate);
assert.strictEqual(oError.message, "Annotation "
+ "'com.sap.vocabularies.Common.v1.ValueListWithFixedValues' but multiple "
+ "'com.sap.vocabularies.Common.v1.ValueListMapping' for property "
+ sPropertyPath);
});
});
});
// *********************************************************************************************
QUnit.test("requestValueListInfo: property in cross-service reference", function (assert) {
var sMappingUrl = "../ValueListService/$metadata",
oModel = new ODataModel({
serviceUrl : "/Foo/DataService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
oProperty = {
"$kind" : "Property"
},
oMetadata = {
"$Version" : "4.0",
"$Reference" : {
"/Foo/EpmSample/$metadata" : {
"$Include" : ["zui5_epm_sample."]
}
},
"$EntityContainer" : "base.Container",
"base.Container" : {
"BusinessPartnerList" : {
"$kind" : "EntitySet",
"$Type" : "base.BusinessPartner"
}
},
"base.BusinessPartner" : {
"$kind" : "EntityType",
"BP_2_PRODUCT" : {
"$kind" : "NavigationProperty",
"$Type" : "zui5_epm_sample.Product"
}
}
},
oMetadataProduct = {
"$Version" : "4.0",
"zui5_epm_sample.Product" : {
"$kind" : "Entity",
"Category" : oProperty
},
"zui5_epm_sample." : {
"$kind" : "Schema",
"$Annotations" : {
"zui5_epm_sample.Product/Category" : {
"@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl]
}
}
}
},
sPropertyPath = "/BusinessPartnerList('0100000000')/BP_2_PRODUCT('HT-1000')/Category",
oRequestorMock = this.mock(oModel.oMetaModel.oRequestor),
oValueListMappings = {
"" : {CollectionPath : ""}
},
oValueListModel = {sServiceUrl : sMappingUrl};
oRequestorMock.expects("read").withExactArgs("/Foo/DataService/$metadata", false, undefined)
.returns(Promise.resolve(oMetadata));
oRequestorMock.expects("read").withExactArgs("/Foo/EpmSample/$metadata")
.returns(Promise.resolve(oMetadataProduct));
oMetaModelMock.expects("getOrCreateValueListModel")
.withExactArgs(sMappingUrl)
.returns(oValueListModel);
oMetaModelMock.expects("fetchValueListMappings")
.withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample",
sinon.match.same(oProperty))
.returns(Promise.resolve(oValueListMappings));
// code under test
return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) {
assert.deepEqual(oResult, {
"" : {
$model : oValueListModel,
CollectionPath : ""
}
});
});
});
// *********************************************************************************************
QUnit.test("requestValueListInfo: same qualifier in reference and local", function (assert) {
var sMappingUrl = "../ValueListService/$metadata",
oProperty = {
"$kind" : "Property"
},
oMetadata = {
"$EntityContainer" : "zui5_epm_sample.Container",
"zui5_epm_sample.Container" : {
"$kind" : "EntityContainer",
"ProductList" : {
"$kind" : "EntitySet",
"$Type" : "zui5_epm_sample.Product"
}
},
"zui5_epm_sample.Product" : {
"$kind" : "Entity",
"Category" : oProperty
},
"$Annotations" : {
"zui5_epm_sample.Product/Category" : {
"@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl],
"@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {}
}
}
},
oModel = new ODataModel({
serviceUrl : "/Foo/ValueListService/",
synchronizationMode : "None"
}),
oMetaModelMock = this.mock(oModel.getMetaModel()),
sPropertyPath = "/ProductList('HT-1000')/Category",
oValueListModel = {};
oMetaModelMock.expects("fetchEntityContainer").atLeast(1)
.returns(SyncPromise.resolve(oMetadata));
oMetaModelMock.expects("getOrCreateValueListModel")
.withExactArgs(sMappingUrl)
.returns(oValueListModel);
oMetaModelMock.expects("fetchValueListMappings")
.withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample",
sinon.match.same(oProperty))
.returns(Promise.resolve({"foo" : {}}));
// code under test
return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function () {
assert.ok(false);
}, function (oError) {
assert.strictEqual(oError.message,
"Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with identical "
+ "qualifier 'foo' for property " + sPropertyPath + " in "
+ oModel.sServiceUrl + "$metadata and " + sMappingUrl);
});
});
// *********************************************************************************************
QUnit.test("fetchModule: synchronously", function (assert) {
var vModule = {};
this.mock(sap.ui).expects("require")
.withExactArgs("sap/ui/model/odata/type/Int")
.returns(vModule); // requested module already loaded
// code under test
assert.strictEqual(this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int").getResult(),
vModule);
});
// *********************************************************************************************
QUnit.test("fetchModule, asynchronous", function (assert) {
var vModule = {},
sModuleName = "sap/ui/model/odata/type/Int64",
oSapUiMock = this.mock(sap.ui);
oSapUiMock.expects("require")
.withExactArgs(sModuleName)
.returns(undefined); // requested module not yet loaded
oSapUiMock.expects("require")
.withExactArgs([sModuleName], sinon.match.func)
.callsArgWithAsync(1, vModule);
// code under test
return this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int64")
.then(function (oResult) {
assert.strictEqual(oResult, vModule);
});
});
//*********************************************************************************************
if (TestUtils.isRealOData()) {
//*****************************************************************************************
QUnit.test("getValueListType, requestValueListInfo: realOData", function (assert) {
var sPath = new URI(TestUtils.proxy(sSampleServiceUrl))
.absoluteTo(window.location.pathname).toString(),
oModel = new ODataModel({
serviceUrl : sPath,
synchronizationMode : "None"
}),
oMetaModel = oModel.getMetaModel(),
sPropertyPath = "/ProductList('HT-1000')/Category";
return oMetaModel.requestObject("/ProductList/").then(function () {
assert.strictEqual(oMetaModel.getValueListType(
"/com.sap.gateway.default.zui5_epm_sample.v0002.Contact/Sex"),
ValueListType.Fixed);
assert.strictEqual(oMetaModel.getValueListType(sPropertyPath),
ValueListType.Standard);
return oMetaModel.requestValueListInfo(sPropertyPath).then(function (oResult) {
var oValueListInfo = oResult[""];
assert.strictEqual(oValueListInfo.CollectionPath, "H_EPM_PD_CATS_SH_Set");
});
});
});
//*****************************************************************************************
QUnit.test("requestValueListInfo: same model w/o reference, realOData", function (assert) {
var oModel = new ODataModel({
serviceUrl : TestUtils.proxy(sSampleServiceUrl),
synchronizationMode : "None"
}),
oMetaModel = oModel.getMetaModel(),
sPropertyPath = "/ProductList/0/CurrencyCode",
oValueListMetaModel;
return oMetaModel.requestObject("/ProductList/").then(function () {
// value list in the data service
assert.strictEqual(oMetaModel.getValueListType(sPropertyPath),
ValueListType.Standard);
return oMetaModel.requestValueListInfo(sPropertyPath);
}).then(function (oValueListInfo) {
var sPropertyPath2 = "/H_TCURC_SH_Set/1/WAERS";
// value list in the value list service
oValueListMetaModel = oValueListInfo[""].$model.getMetaModel();
assert.strictEqual(oValueListMetaModel.getValueListType(sPropertyPath2),
ValueListType.Standard);
assert.strictEqual(oValueListInfo[""].CollectionPath, "H_TCURC_SH_Set");
return oValueListMetaModel.requestValueListInfo(sPropertyPath2);
}).then(function (oValueListInfo) {
assert.strictEqual(oValueListInfo[""].$model.getMetaModel(), oValueListMetaModel);
assert.strictEqual(oValueListInfo[""].CollectionPath, "TCURC_CT_Set");
});
});
}
});
//TODO getContext vs. createBindingContext; map of "singletons" vs. memory leak<|fim▁end|> | |
<|file_name|>test_cluster_config.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import telnetlib
from savanna.tests.integration import base
import savanna.tests.integration.configs.parameters as param
def _add_config(body, config):
if config in [param.NAMENODE_CONFIG, param.DATANODE_CONFIG]:
body['node_configs']['HDFS'] = config
elif config == param.GENERAL_CONFIG:
body['cluster_configs']['general'] = config
elif config == param.CLUSTER_HDFS_CONFIG:
body['cluster_configs']['HDFS'] = config
elif config == param.CLUSTER_MAPREDUCE_CONFIG:
body['cluster_configs']['MapReduce'] = config
else:
body['node_configs']['MapReduce'] = config
class ClusterConfigTest(base.ITestCase):
def setUp(self):
super(ClusterConfigTest, self).setUp()
telnetlib.Telnet(self.host, self.port)
def assertConfigs(self, get_config, param_config):
self.assertEqual(get_config, param_config,
msg='configs are not equal: %s != %s'
% (str(get_config), str(param_config)))
def assertConfigOnNode(self, host, config, value):
conf = config.replace(' ', '')
com = self.execute_command(host, './script.sh %s -val %s -url %s' %
(conf, value, param.OS_AUTH_URL))
self.assertEqual(com[0], 0,
msg='host: %s, config %s is not equal: %s'
% (host, config, value))
def _cluster_config_testing(self, cluster_body):
cluster_id = None
try:
_add_config(cluster_body, param.GENERAL_CONFIG)
_add_config(cluster_body, param.CLUSTER_HDFS_CONFIG)
_add_config(cluster_body, param.CLUSTER_MAPREDUCE_CONFIG)
cluster_id = self.create_cluster_and_get_id(cluster_body)<|fim▁hole|> cluster_id, 200, True)
get_data = get_data['cluster']
self.assertConfigs(get_data['cluster_configs']['general'],
param.GENERAL_CONFIG)
self.assertConfigs(get_data['cluster_configs']['HDFS'],
param.CLUSTER_HDFS_CONFIG)
self.assertConfigs(get_data['cluster_configs']['MapReduce'],
param.CLUSTER_MAPREDUCE_CONFIG)
node_groups = get_data['node_groups']
ip_instances = {}
process_map = {
'namenode': {
'service': 'HDFS', 'param': param.NAMENODE_CONFIG},
'jobtracker': {
'service': 'MapReduce', 'param': param.JOBTRACKER_CONFIG},
'datanode': {
'service': 'HDFS', 'param': param.DATANODE_CONFIG},
'tasktracker': {
'service': 'MapReduce', 'param': param.TASKTRACKER_CONFIG}
}
def get_node_configs(node_group, process):
return \
node_group['node_configs'][process_map[process]['service']]
def get_param(process):
return process_map[process]['param']
for node_group in node_groups:
for process in node_group['node_processes']:
self.assertConfigs(
get_node_configs(node_group,
process), get_param(process))
instances = node_group['instances']
for instans in instances:
management_ip = instans['management_ip']
self.transfer_script_to_node(
management_ip, 'test_config/config_test_script.sh')
ip_instances[management_ip] = node_group[
'node_processes']
try:
for key, processes in ip_instances.items():
telnetlib.Telnet(key, '22')
for conf, value in param.CLUSTER_MAPREDUCE_CONFIG.items():
self.assertConfigOnNode(key, conf, value)
for conf, value in param.CLUSTER_HDFS_CONFIG.items():
self.assertConfigOnNode(key, conf, value)
for process in processes:
for sec_key, sec_value in get_param(process).items():
self.assertConfigOnNode(key, sec_key, sec_value)
if 'namenode' in processes:
for sec_key, sec_value in param.GENERAL_CONFIG.items():
self.assertConfigOnNode(
key, sec_key, sec_value)
except Exception as e:
self.fail(e.message)
except Exception as e:
self.fail(e.message)
finally:
self.del_object(self.url_cluster_with_slash, cluster_id, 204)
def test_cluster_config_nnjt_ttdn(self):
id_master_ngt = None
id_worker_ngt = None
try:
master_ngt_body = self.make_node_group_template(
'master-ngt', 'qa probe', 'JT+NN')
_add_config(master_ngt_body, param.NAMENODE_CONFIG)
_add_config(master_ngt_body, param.JOBTRACKER_CONFIG)
id_master_ngt = self.get_object_id(
'node_group_template', self.post_object(self.url_ngt,
master_ngt_body, 202))
worker_ngt_body = self.make_node_group_template(
'worker-ngt', 'qa probe', 'TT+DN')
_add_config(worker_ngt_body, param.DATANODE_CONFIG)
_add_config(worker_ngt_body, param.TASKTRACKER_CONFIG)
id_worker_ngt = self.get_object_id(
'node_group_template', self.post_object(self.url_ngt,
worker_ngt_body, 202))
ngt_id_list = {id_master_ngt: 1, id_worker_ngt: 2}
cl_body = self.make_cl_body_node_group_templates(ngt_id_list)
self._cluster_config_testing(cl_body)
except Exception as e:
self.fail(str(e))
finally:
self.del_object(self.url_ngt_with_slash, id_master_ngt, 204)
self.del_object(self.url_ngt_with_slash, id_worker_ngt, 204)<|fim▁end|> | get_data = self.get_object(self.url_cluster_with_slash, |
<|file_name|>uiSlider.js<|end_file_name|><|fim▁begin|>function CreateUiSlider(args) {
var instanceSlider = Titanium.UI.createSlider({
height:"auto",
width:"auto",
//top:100,<|fim▁hole|> max:10 //Maximum value for the slider (needed for Android)
});
if(args.min){
instanceSlider.min = args.min;
}
if(args.max){
instanceSlider.max = args.max;
}
return instanceSlider;
};
exports.CreateUiSlider = CreateUiSlider;<|fim▁end|> | min:0, //Minimum value for the slider (needed for Android) |
<|file_name|>numberFormatter.py<|end_file_name|><|fim▁begin|>import math
def formatAmount(val, prec=3, lowest=0, highest=0, currency=False, forceSign=False):
"""
Add suffix to value, transform value to match new suffix and round it.
Keyword arguments:
val -- value to process
prec -- precision of final number (number of significant positions to show)
lowest -- lowest order for suffixizing for numbers 0 < |num| < 1
highest -- highest order for suffixizing for numbers |num| > 1
currency -- if currency, billion suffix will be B instead of G
forceSign -- if True, positive numbers are signed too
"""
if val is None:
return ""
# Define suffix maps
posSuffixMap = {3: "k", 6: "M", 9: "B" if currency is True else "G"}
negSuffixMap = {-6: '\u03bc', -3: "m"}
# Define tuple of the map keys
# As we're going to go from the biggest order of abs(key), sort
# them differently due to one set of values being negative
# and other positive
posOrders = tuple(sorted(iter(posSuffixMap.keys()), reverse=True))
negOrders = tuple(sorted(iter(negSuffixMap.keys()), reverse=False))
# Find the least abs(key)
posLowest = min(posOrders)
negHighest = max(negOrders)
# By default, mantissa takes just value and no suffix
mantissa, suffix = val, ""
# Positive suffixes
if abs(val) > 1 and highest >= posLowest:
# Start from highest possible suffix
for key in posOrders:
# Find first suitable suffix and check if it's not above highest order
if abs(val) >= 10 ** key and key <= highest:
mantissa, suffix = val / float(10 ** key), posSuffixMap[key]
# Do additional step to eliminate results like 999999 => 1000k
# If we're already using our greatest order, we can't do anything useful
if posOrders.index(key) == 0:
break
else:
# Get order greater than current
prevKey = posOrders[posOrders.index(key) - 1]
# Check if the key to which we potentially can change is greater
# than our highest boundary<|fim▁hole|> if prevKey > highest:
# If it is, bail - we already have acceptable results
break
# Find multiplier to get from one order to another
orderDiff = 10 ** (prevKey - key)
# If rounded mantissa according to our specifications is greater than
# or equal to multiplier
if roundToPrec(mantissa, prec) >= orderDiff:
# Divide mantissa and use suffix of greater order
mantissa, suffix = mantissa / orderDiff, posSuffixMap[prevKey]
# Otherwise consider current results as acceptable
break
# Take numbers between 0 and 1, and matching/below highest possible negative suffix
elif abs(val) < 1 and val != 0 and lowest <= negHighest:
# Start from lowest possible suffix
for key in negOrders:
# Get next order
try:
nextKey = negOrders[negOrders.index(key) + 1]
except IndexError:
nextKey = 0
# Check if mantissa with next suffix is in range [1, 1000)
if abs(val) < 10 ** nextKey and key >= lowest:
mantissa, suffix = val / float(10 ** key), negSuffixMap[key]
# Do additional step to eliminate results like 0.9999 => 1000m
# Check if the key we're potentially switching to is greater than our
# upper boundary
if nextKey > highest:
# If it is, leave loop with results we already have
break
# Find the multiplier between current and next order
orderDiff = 10 ** (nextKey - key)
# If rounded mantissa according to our specifications is greater than
# or equal to multiplier
if roundToPrec(mantissa, prec) >= orderDiff:
# Divide mantissa and use suffix of greater order
# Use special handling of zero key as it's not on the map
mantissa, suffix = mantissa / orderDiff, posSuffixMap[nextKey] if nextKey != 0 else ""
# Otherwise consider current results as acceptable
break
# Round mantissa according to our prec variable
mantissa = roundToPrec(mantissa, prec)
sign = "+" if forceSign is True and mantissa > 0 else ""
# Round mantissa and add suffix
result = "{0}{1}{2}".format(sign, mantissa, suffix)
return result
def roundToPrec(val, prec):
# We're not rounding integers anyway
# Also make sure that we do not ask to calculate logarithm of zero
if int(val) == val:
return int(val)
# Find round factor, taking into consideration that we want to keep at least prec
# positions for fractions with zero integer part (e.g. 0.0000354 for prec=3)
roundFactor = int(prec - math.ceil(math.log10(abs(val))))
# But we don't want to round integers
if roundFactor < 0:
roundFactor = 0
# Do actual rounding
val = round(val, roundFactor)
# Make sure numbers with .0 part designating float don't get through
if int(val) == val:
val = int(val)
return val
def roundDec(val, prec):
if int(val) == val:
return int(val)
return round(val, prec)<|fim▁end|> | |
<|file_name|>DefaultCharacteristicTest.java<|end_file_name|><|fim▁begin|>/*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.api.technicaldebt.server.internal;
import org.junit.Test;
import org.sonar.api.rule.RuleKey;
import org.sonar.api.utils.WorkUnit;
import org.sonar.api.utils.internal.WorkDuration;
import static org.fest.assertions.Assertions.assertThat;
public class DefaultCharacteristicTest {
@Test
public void test_setters_and_getters_for_characteristic() throws Exception {
DefaultCharacteristic characteristic = new DefaultCharacteristic()
.setId(1)
.setKey("NETWORK_USE")
.setName("Network use")
.setOrder(5)
.setParentId(2)
.setRootId(2);
assertThat(characteristic.id()).isEqualTo(1);
assertThat(characteristic.key()).isEqualTo("NETWORK_USE");
assertThat(characteristic.name()).isEqualTo("Network use");
assertThat(characteristic.order()).isEqualTo(5);
assertThat(characteristic.ruleKey()).isNull();
assertThat(characteristic.function()).isNull();
assertThat(characteristic.factorValue()).isNull();
assertThat(characteristic.factorUnit()).isNull();
assertThat(characteristic.offsetValue()).isNull();
assertThat(characteristic.offsetUnit()).isNull();
assertThat(characteristic.parentId()).isEqualTo(2);
assertThat(characteristic.rootId()).isEqualTo(2);
}
@Test
public void test_setters_and_getters_for_requirement() throws Exception {
DefaultCharacteristic requirement = new DefaultCharacteristic()
.setId(1)
.setRuleKey(RuleKey.of("repo", "rule"))
.setFunction("linear_offset")
.setFactorValue(2)
.setFactorUnit(WorkDuration.UNIT.MINUTES)
.setOffsetValue(1)
.setOffsetUnit(WorkDuration.UNIT.HOURS)<|fim▁hole|> assertThat(requirement.key()).isNull();
assertThat(requirement.name()).isNull();
assertThat(requirement.order()).isNull();
assertThat(requirement.ruleKey()).isEqualTo(RuleKey.of("repo", "rule"));
assertThat(requirement.function()).isEqualTo("linear_offset");
assertThat(requirement.factorValue()).isEqualTo(2);
assertThat(requirement.factorUnit()).isEqualTo(WorkDuration.UNIT.MINUTES);
assertThat(requirement.offsetValue()).isEqualTo(1);
assertThat(requirement.offsetUnit()).isEqualTo(WorkDuration.UNIT.HOURS);
assertThat(requirement.parentId()).isEqualTo(2);
assertThat(requirement.rootId()).isEqualTo(3);
}
@Test
public void is_root() throws Exception {
DefaultCharacteristic characteristic = new DefaultCharacteristic()
.setId(1)
.setKey("NETWORK_USE")
.setName("Network use")
.setOrder(5)
.setParentId(null)
.setRootId(null);
assertThat(characteristic.isRoot()).isTrue();
}
@Test
public void is_requirement() throws Exception {
DefaultCharacteristic requirement = new DefaultCharacteristic()
.setId(1)
.setRuleKey(RuleKey.of("repo", "rule"))
.setFunction("linear_offset")
.setFactorValue(2)
.setFactorUnit(WorkDuration.UNIT.MINUTES)
.setOffsetValue(1)
.setOffsetUnit(WorkDuration.UNIT.HOURS)
.setRootId(3)
.setParentId(2);
assertThat(requirement.isRequirement()).isTrue();
}
@Test
public void test_equals() throws Exception {
assertThat(new DefaultCharacteristic().setKey("NETWORK_USE")).isEqualTo(new DefaultCharacteristic().setKey("NETWORK_USE"));
assertThat(new DefaultCharacteristic().setKey("NETWORK_USE")).isNotEqualTo(new DefaultCharacteristic().setKey("MAINTABILITY"));
assertThat(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule"))).isEqualTo(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule")));
assertThat(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule"))).isNotEqualTo(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo2", "rule2")));
}
@Test
public void test_hascode() throws Exception {
assertThat(new DefaultCharacteristic().setKey("NETWORK_USE").hashCode()).isEqualTo(new DefaultCharacteristic().setKey("NETWORK_USE").hashCode());
assertThat(new DefaultCharacteristic().setKey("NETWORK_USE").hashCode()).isNotEqualTo(new DefaultCharacteristic().setKey("MAINTABILITY").hashCode());
assertThat(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule")).hashCode()).isEqualTo(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule")).hashCode());
assertThat(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo", "rule")).hashCode()).isNotEqualTo(new DefaultCharacteristic().setRuleKey(RuleKey.of("repo2", "rule2")).hashCode());
}
@Test
public void test_deprecated_setters_and_getters_for_characteristic() throws Exception {
DefaultCharacteristic requirement = new DefaultCharacteristic()
.setId(1)
.setRuleKey(RuleKey.of("repo", "rule"))
.setFunction("linear_offset")
.setFactor(WorkUnit.create(2d, WorkUnit.MINUTES))
.setOffset(WorkUnit.create(1d, WorkUnit.HOURS));
assertThat(requirement.factor()).isEqualTo(WorkUnit.create(2d, WorkUnit.MINUTES));
assertThat(requirement.offset()).isEqualTo(WorkUnit.create(1d, WorkUnit.HOURS));
assertThat(new DefaultCharacteristic()
.setId(1)
.setRuleKey(RuleKey.of("repo", "rule"))
.setFunction("linear")
.setFactor(WorkUnit.create(2d, WorkUnit.DAYS))
.factor()).isEqualTo(WorkUnit.create(2d, WorkUnit.DAYS));
}
}<|fim▁end|> | .setRootId(3)
.setParentId(2);
assertThat(requirement.id()).isEqualTo(1); |
<|file_name|>test_pheno2sql.py<|end_file_name|><|fim▁begin|>import os
import tempfile
import unittest
import numpy as np
import pandas as pd
from sqlalchemy import create_engine
from tests.settings import POSTGRESQL_ENGINE, SQLITE_ENGINE
from tests.utils import get_repository_path, DBTest
from ukbrest.common.pheno2sql import Pheno2SQL
class Pheno2SQLTest(DBTest):
@unittest.skip('sqlite being removed')
def test_sqlite_default_values(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'sqlite'
## Check table exists
tmp = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_00'), create_engine(db_engine))
assert not tmp.empty
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0","c31_0_0","c34_0_0","c46_0_0","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[1, 'c31_0_0'] == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'] == '2011-08-14'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
assert tmp.loc[2, 'c31_0_0'] == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert tmp.loc[2, 'c48_0_0'] == '2010-03-29'
def test_postgresql_default_values(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0","c31_0_0","c34_0_0","c46_0_0","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert tmp.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-03-29'
def test_exit(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
temp_dir = tempfile.mkdtemp()
# Run
with Pheno2SQL(csv_file, db_engine, tmpdir=temp_dir) as p2sql:
p2sql.load_data()
# Validate
## Check table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0","c31_0_0","c34_0_0","c46_0_0","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
## Check that temporary files were deleted
assert len(os.listdir(temp_dir)) == 0
@unittest.skip('sqlite being removed')
def test_sqlite_less_columns_per_table(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'sqlite'
## Check tables exist
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_00'), create_engine(db_engine))
assert not table.empty
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_01'), create_engine(db_engine))
assert not table.empty
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_02'), create_engine(db_engine))
assert not table.empty
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine))
expected_columns = ["eid","c31_0_0","c34_0_0","c46_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine))
expected_columns = ["eid","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c31_0_0'] == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'] == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'] == '2011-08-14'
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert tmp.loc[2, 'c48_0_0'] == '2010-03-29'
def test_postgresql_less_columns_per_table(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check tables exist
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_01'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_02'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine))
expected_columns = ["eid","c31_0_0","c34_0_0","c46_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine))
expected_columns = ["eid","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert tmp.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-03-29'
def test_custom_tmpdir(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
with Pheno2SQL(csv_file, db_engine, tmpdir='/tmp/custom/directory/here', delete_temp_csv=False) as p2sql:
# Run
p2sql.load_data()
# Validate
## Check table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0","c31_0_0","c34_0_0","c46_0_0","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 2
## Check that temporary are still there
assert len(os.listdir('/tmp/custom/directory/here')) > 0
## Check that temporary is now clean
assert len(os.listdir('/tmp/custom/directory/here')) == 0
@unittest.skip('sqlite being removed')
def test_sqlite_auxiliary_table_is_created(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'sqlite'
## Check tables exist
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_00'), create_engine(db_engine))
assert not table.empty
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_01'), create_engine(db_engine))
assert not table.empty
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('ukb_pheno_0_02'), create_engine(db_engine))
assert not table.empty
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine))
expected_columns = ["eid","c31_0_0","c34_0_0","c46_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine))
expected_columns = ["eid","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check auxiliary table existance
table = pd.read_sql("SELECT name FROM sqlite_master WHERE type='table' AND name='{}';".format('fields'), create_engine(db_engine))
assert not table.empty
## Check columns are correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine))
expected_columns = ["column_name", "table_name"]
assert len(tmp.columns) >= len(expected_columns)
assert all(x in tmp.columns for x in expected_columns)
## Check data is correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine), index_col='column_name')
assert not tmp.empty
assert tmp.shape[0] == 8
assert tmp.loc['c21_0_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_1_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_2_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c31_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c34_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c46_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c47_0_0', 'table_name'] == 'ukb_pheno_0_02'
assert tmp.loc['c48_0_0', 'table_name'] == 'ukb_pheno_0_02'
def test_postgresql_auxiliary_table_is_created_and_has_minimum_data_required(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check tables exist
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_01'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_02'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine))
expected_columns = ["eid","c31_0_0","c34_0_0","c46_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine))
expected_columns = ["eid","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check auxiliary table existance
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('fields'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine))
expected_columns = ["column_name", "table_name"]
assert len(tmp.columns) >= len(expected_columns)
assert all(x in tmp.columns for x in expected_columns)
## Check data is correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine), index_col='column_name')
assert not tmp.empty
assert tmp.shape[0] == 8
assert tmp.loc['c21_0_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_1_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_2_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c31_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c34_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c46_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c47_0_0', 'table_name'] == 'ukb_pheno_0_02'
assert tmp.loc['c48_0_0', 'table_name'] == 'ukb_pheno_0_02'
def test_postgresql_auxiliary_table_with_more_information(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check tables exist
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_01'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_02'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check auxiliary table existance
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('fields'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine))
expected_columns = ["column_name", "field_id", "inst", "arr", "coding", "table_name", "type", "description"]
assert len(tmp.columns) == len(expected_columns), len(tmp.columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine), index_col='column_name')
assert not tmp.empty
assert tmp.shape[0] == 8
assert tmp.loc['c21_0_0', 'field_id'] == '21'
assert tmp.loc['c21_0_0', 'inst'] == 0
assert tmp.loc['c21_0_0', 'arr'] == 0
assert tmp.loc['c21_0_0', 'coding'] == 100261
assert tmp.loc['c21_0_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_0_0', 'type'] == 'Categorical (single)'
assert tmp.loc['c21_0_0', 'description'] == 'An string value'
assert tmp.loc['c21_1_0', 'field_id'] == '21'
assert tmp.loc['c21_1_0', 'inst'] == 1
assert tmp.loc['c21_1_0', 'arr'] == 0
assert tmp.loc['c21_1_0', 'coding'] == 100261
assert tmp.loc['c21_1_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_1_0', 'type'] == 'Categorical (single)'
assert tmp.loc['c21_1_0', 'description'] == 'An string value'
assert tmp.loc['c21_2_0', 'field_id'] == '21'
assert tmp.loc['c21_2_0', 'inst'] == 2
assert tmp.loc['c21_2_0', 'arr'] == 0
assert tmp.loc['c21_2_0', 'coding'] == 100261
assert tmp.loc['c21_2_0', 'table_name'] == 'ukb_pheno_0_00'
assert tmp.loc['c21_2_0', 'type'] == 'Categorical (single)'
assert tmp.loc['c21_2_0', 'description'] == 'An string value'
assert tmp.loc['c31_0_0', 'field_id'] == '31'
assert tmp.loc['c31_0_0', 'inst'] == 0
assert tmp.loc['c31_0_0', 'arr'] == 0
assert pd.isnull(tmp.loc['c31_0_0', 'coding'])
assert tmp.loc['c31_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c31_0_0', 'type'] == 'Date'
assert tmp.loc['c31_0_0', 'description'] == 'A date'
assert tmp.loc['c34_0_0', 'field_id'] == '34'
assert tmp.loc['c34_0_0', 'inst'] == 0
assert tmp.loc['c34_0_0', 'arr'] == 0
assert tmp.loc['c34_0_0', 'coding'] == 9
assert tmp.loc['c34_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c34_0_0', 'type'] == 'Integer'
assert tmp.loc['c34_0_0', 'description'] == 'Some integer'
assert tmp.loc['c46_0_0', 'field_id'] == '46'
assert tmp.loc['c46_0_0', 'inst'] == 0
assert tmp.loc['c46_0_0', 'arr'] == 0
assert pd.isnull(tmp.loc['c46_0_0', 'coding'])
assert tmp.loc['c46_0_0', 'table_name'] == 'ukb_pheno_0_01'
assert tmp.loc['c46_0_0', 'type'] == 'Integer'
assert tmp.loc['c46_0_0', 'description'] == 'Some another integer'
assert tmp.loc['c47_0_0', 'field_id'] == '47'
assert tmp.loc['c47_0_0', 'inst'] == 0
assert tmp.loc['c47_0_0', 'arr'] == 0
assert pd.isnull(tmp.loc['c47_0_0', 'coding'])
assert tmp.loc['c47_0_0', 'table_name'] == 'ukb_pheno_0_02'
assert tmp.loc['c47_0_0', 'type'] == 'Continuous'
assert tmp.loc['c47_0_0', 'description'] == 'Some continuous value'
assert tmp.loc['c48_0_0', 'field_id'] == '48'
assert tmp.loc['c48_0_0', 'inst'] == 0
assert tmp.loc['c48_0_0', 'arr'] == 0
assert pd.isnull(tmp.loc['c48_0_0', 'coding'])
assert tmp.loc['c48_0_0', 'table_name'] == 'ukb_pheno_0_02'
assert tmp.loc['c48_0_0', 'type'] == 'Time'
assert tmp.loc['c48_0_0', 'description'] == 'Some time'
def test_postgresql_auxiliary_table_check_types(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check auxiliary table existance
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('fields'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from fields', create_engine(db_engine))
expected_columns = ["column_name", "field_id", "inst", "arr", "coding", "table_name", "type", "description"]
assert len(tmp.columns) == len(expected_columns), len(tmp.columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
sql_types = """
select column_name, data_type
from information_schema.columns
where table_name = 'fields';
"""
tmp = pd.read_sql(sql_types, create_engine(db_engine), index_col='column_name')
assert not tmp.empty
assert tmp.shape[0] == 8
assert tmp.loc['field_id', 'data_type'] == 'text'
assert tmp.loc['inst', 'data_type'] == 'bigint'
assert tmp.loc['arr', 'data_type'] == 'bigint'
assert tmp.loc['coding', 'data_type'] == 'bigint'
assert tmp.loc['table_name', 'data_type'] == 'text'
assert tmp.loc['type', 'data_type'] == 'text'
assert tmp.loc['description', 'data_type'] == 'text'
def test_postgresql_auxiliary_table_constraints(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example01.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check auxiliary table existance
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('fields'), create_engine(db_engine))
assert table.iloc[0, 0]
# primary key
constraint_sql = self._get_table_contrains('fields', column_query='column_name', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
# index on 'event' column
constraint_sql = self._get_table_contrains('fields', relationship_query='ix_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
columns = constraints_results['column_name'].tolist()
assert len(columns) == 6
assert 'arr' in columns
assert 'field_id' in columns
assert 'inst' in columns
assert 'table_name' in columns
assert 'type' in columns
assert 'coding' in columns
def test_postgresql_two_csv_files(self):
# Prepare
csv01 = get_repository_path('pheno2sql/example08_01.csv')
csv02 = get_repository_path('pheno2sql/example08_02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv01, csv02), db_engine)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check tables exist
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_1_00'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine))
expected_columns = ["eid","c21_0_0","c21_1_0","c21_2_0","c31_0_0","c34_0_0","c46_0_0","c47_0_0","c48_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
tmp = pd.read_sql('select * from ukb_pheno_1_00', create_engine(db_engine))
expected_columns = ["eid","c100_0_0", "c100_1_0", "c100_2_0", "c110_0_0", "c120_0_0", "c130_0_0", "c140_0_0", "c150_0_0"]
assert len(tmp.columns) == len(expected_columns)
assert all(x in expected_columns for x in tmp.columns)
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 5
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2011-03-07'
assert int(tmp.loc[1, 'c34_0_0']) == -33
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[1, 'c47_0_0'].round(5) == 41.55312
assert tmp.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-07-14'
assert tmp.loc[5, 'c21_0_0'] == 'Option number 5'
assert tmp.loc[5, 'c21_1_0'] == 'Maybe'
assert tmp.loc[5, 'c21_2_0'] == 'Probably'
assert pd.isnull(tmp.loc[5, 'c31_0_0'])
assert int(tmp.loc[5, 'c34_0_0']) == -4
assert int(tmp.loc[5, 'c46_0_0']) == 1
assert pd.isnull(tmp.loc[5, 'c47_0_0'])
assert tmp.loc[5, 'c48_0_0'].strftime('%Y-%m-%d') == '1999-10-11'
tmp = pd.read_sql('select * from ukb_pheno_1_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 3
assert int(tmp.loc[1, 'c100_0_0']) == -9
assert int(tmp.loc[1, 'c100_1_0']) == 3
assert pd.isnull(tmp.loc[1, 'c100_2_0'])
assert tmp.loc[1, 'c110_0_0'].round(5) == 42.55312
assert int(tmp.loc[1, 'c120_0_0']) == -33
assert tmp.loc[1, 'c130_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c140_0_0'].strftime('%Y-%m-%d') == '2011-03-07'
assert tmp.loc[1, 'c150_0_0'].strftime('%Y-%m-%d') == '2010-07-14'
assert pd.isnull(tmp.loc[3, 'c100_0_0'])
assert int(tmp.loc[3, 'c100_1_0']) == -4
assert int(tmp.loc[3, 'c100_2_0']) == -10
assert tmp.loc[3, 'c110_0_0'].round(5) == -35.31471
assert int(tmp.loc[3, 'c120_0_0']) == 0
assert tmp.loc[3, 'c130_0_0'] == 'Option number 3'
assert tmp.loc[3, 'c140_0_0'].strftime('%Y-%m-%d') == '1997-04-15'
assert pd.isnull(tmp.loc[3, 'c150_0_0'])
@unittest.skip('sqlite being removed')
def test_sqlite_query_single_table(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=999999)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 4
assert all(x in query_result.index for x in range(1, 4 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 4
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c48_0_0'] == '2011-08-14'
assert query_result.loc[2, 'c48_0_0'] == '2016-11-30'
assert query_result.loc[3, 'c48_0_0'] == '2010-01-01'
assert query_result.loc[4, 'c48_0_0'] == '2011-02-15'
def test_postgresql_query_single_table(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=999999)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 4
assert all(x in query_result.index for x in range(1, 4 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 4
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert query_result.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
assert query_result.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
assert query_result.loc[4, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-02-15'
def test_postgresql_two_csv_files_query_single_table(self):
# Prepare
csv01 = get_repository_path('pheno2sql/example08_01.csv')
csv02 = get_repository_path('pheno2sql/example08_02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv01, csv02), db_engine, n_columns_per_table=999999)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 5
assert all(x in query_result.index for x in range(1, 5 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 5
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[5, 'c21_0_0'] == 'Option number 5'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[5, 'c21_2_0'] == 'Probably'
assert query_result.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-07-14'
assert query_result.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2017-11-30'
assert query_result.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2020-01-01'
assert query_result.loc[4, 'c48_0_0'].strftime('%Y-%m-%d') == '1990-02-15'
assert query_result.loc[5, 'c48_0_0'].strftime('%Y-%m-%d') == '1999-10-11'
@unittest.skip('sqlite being removed')
def test_sqlite_query_multiple_tables(self):
# RIGHT and FULL OUTER JOINs are not currently supported
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = p2sql.query(columns)
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 4
assert all(x in query_result.index for x in range(1, 4 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 4
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c48_0_0'] == '2011-08-14'
assert query_result.loc[2, 'c48_0_0'] == '2016-11-30'
assert query_result.loc[3, 'c48_0_0'] == '2010-01-01'
assert query_result.loc[4, 'c48_0_0'] == '2011-02-15'
def test_postgresql_query_multiple_tables(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 4
assert all(x in query_result.index for x in range(1, 4 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 4
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert query_result.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
assert query_result.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
assert query_result.loc[4, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-02-15'
def test_postgresql_two_csv_files_query_multiple_tables(self):
# Prepare
csv01 = get_repository_path('pheno2sql/example08_01.csv')
csv02 = get_repository_path('pheno2sql/example08_02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv01, csv02), db_engine, n_columns_per_table=999999)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c110_0_0', 'c150_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 5
assert all(x in query_result.index for x in range(1, 5 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 5
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[5, 'c21_0_0'] == 'Option number 5'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[5, 'c21_2_0'] == 'Probably'
assert query_result.loc[1, 'c110_0_0'].round(5) == 42.55312
assert pd.isnull(query_result.loc[2, 'c110_0_0'])
assert query_result.loc[3, 'c110_0_0'].round(5) == -35.31471
assert pd.isnull(query_result.loc[4, 'c110_0_0'])
assert pd.isnull(query_result.loc[5, 'c110_0_0'])
assert query_result.loc[1, 'c150_0_0'].strftime('%Y-%m-%d') == '2010-07-14'
assert query_result.loc[2, 'c150_0_0'].strftime('%Y-%m-%d') == '2017-11-30'
assert pd.isnull(query_result.loc[3, 'c150_0_0'])
assert pd.isnull(query_result.loc[4, 'c150_0_0'])
assert pd.isnull(query_result.loc[5, 'c150_0_0'])
def test_postgresql_two_csv_files_flipped_query_multiple_tables(self):
# Prepare
# In this test the files are just flipped
csv01 = get_repository_path('pheno2sql/example08_01.csv')
csv02 = get_repository_path('pheno2sql/example08_02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv02, csv01), db_engine, n_columns_per_table=999999)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c110_0_0', 'c150_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 5
assert all(x in query_result.index for x in range(1, 5 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 5
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[5, 'c21_0_0'] == 'Option number 5'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[5, 'c21_2_0'] == 'Probably'
assert query_result.loc[1, 'c110_0_0'].round(5) == 42.55312
assert pd.isnull(query_result.loc[2, 'c110_0_0'])
assert query_result.loc[3, 'c110_0_0'].round(5) == -35.31471
assert pd.isnull(query_result.loc[4, 'c110_0_0'])
assert pd.isnull(query_result.loc[5, 'c110_0_0'])
assert query_result.loc[1, 'c150_0_0'].strftime('%Y-%m-%d') == '2010-07-14'
assert query_result.loc[2, 'c150_0_0'].strftime('%Y-%m-%d') == '2017-11-30'
assert pd.isnull(query_result.loc[3, 'c150_0_0'])
assert pd.isnull(query_result.loc[4, 'c150_0_0'])
assert pd.isnull(query_result.loc[5, 'c150_0_0'])
@unittest.skip('sqlite being removed')
def test_sqlite_query_custom_columns(self):
# SQLite is very limited when selecting variables, renaming, doing math operations, etc
pass
def test_postgresql_query_custom_columns(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c47_0_0', '(c47_0_0 ^ 2.0) as c47_squared']
query_result = next(p2sql.query(columns))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 4
assert all(x in query_result.index for x in range(1, 4 + 1))
assert len(query_result.columns) == len(columns)
assert all(x in ['c21_0_0', 'c21_2_0', 'c47_0_0', 'c47_squared'] for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 4
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[3, 'c21_0_0'] == 'Option number 3'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c47_0_0'].round(5) == 45.55412
assert query_result.loc[2, 'c47_0_0'].round(5) == -0.55461
assert query_result.loc[3, 'c47_0_0'].round(5) == -5.32471
assert query_result.loc[4, 'c47_0_0'].round(5) == 55.19832
assert query_result.loc[1, 'c47_squared'].round(5) == round(45.55412 ** 2, 5)
assert query_result.loc[2, 'c47_squared'].round(5) == round((-0.55461) ** 2, 5)
assert query_result.loc[3, 'c47_squared'].round(5) == round((-5.32471) ** 2, 5)
assert query_result.loc[4, 'c47_squared'].round(5) == round(55.19832 ** 2, 5)
@unittest.skip('sqlite being removed')
def test_sqlite_query_single_filter(self):
# RIGHT and FULL OUTER JOINs are not currently supported
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c47_0_0']
filter = ['c47_0_0 > 0']
query_result = p2sql.query(columns, filter)
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 2
assert all(x in query_result.index for x in (1, 4))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 2
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c47_0_0'].round(5) == 45.55412
assert query_result.loc[4, 'c47_0_0'].round(5) == 55.19832
def test_postgresql_query_single_filter(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c47_0_0']
filter = ['c47_0_0 > 0']
query_result = next(p2sql.query(columns, filterings=filter))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 2
assert all(x in query_result.index for x in (1, 4))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 2
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[4, 'c21_0_0'] == 'Option number 4'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert pd.isnull(query_result.loc[4, 'c21_2_0'])
assert query_result.loc[1, 'c47_0_0'].round(5) == 45.55412
assert query_result.loc[4, 'c47_0_0'].round(5) == 55.19832
@unittest.skip('sqlite being removed')
def test_sqlite_query_multiple_and_filter(self):
# 'RIGHT and FULL OUTER JOINs are not currently supported'
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c47_0_0', 'c48_0_0']
filter = ["c48_0_0 > '2011-01-01'", "c21_2_0 <> ''"]
query_result = p2sql.query(columns, filter)
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 2
assert all(x in query_result.index for x in (1, 2))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 2
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[1, 'c47_0_0'].round(5) == 45.55412
assert query_result.loc[2, 'c47_0_0'].round(5) == -0.55461
def test_postgresql_query_multiple_and_filter(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c47_0_0', 'c48_0_0']
filter = ["c48_0_0 > '2011-01-01'", "c21_2_0 <> ''"]
query_result = next(p2sql.query(columns, filterings=filter))
# Validate
assert query_result is not None
assert query_result.index.name == 'eid'
assert len(query_result.index) == 2
assert all(x in query_result.index for x in (1, 2))
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.shape[0] == 2
assert query_result.loc[1, 'c21_0_0'] == 'Option number 1'
assert query_result.loc[2, 'c21_0_0'] == 'Option number 2'
assert query_result.loc[1, 'c21_2_0'] == 'Yes'
assert query_result.loc[2, 'c21_2_0'] == 'No'
assert query_result.loc[1, 'c47_0_0'].round(5) == 45.55412
assert query_result.loc[2, 'c47_0_0'].round(5) == -0.55461
assert query_result.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert query_result.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
@unittest.skip('sqlite being removed')
def test_sqlite_float_is_empty(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example03.csv')
db_engine = SQLITE_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'sqlite'
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
assert tmp.loc[3, 'c21_0_0'] == 'Option number 3'
assert tmp.loc[3, 'c21_1_0'] == 'Of course'
assert tmp.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(tmp.loc[4, 'c21_2_0'])
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c31_0_0'] == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'] == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
assert tmp.loc[3, 'c31_0_0'] == '2007-03-19'
assert int(tmp.loc[3, 'c34_0_0']) == 1
assert int(tmp.loc[3, 'c46_0_0']) == -7
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
# FIXME: this is strange, data type in this particular case needs np.round
assert np.round(tmp.loc[1, 'c47_0_0'], 5) == 45.55412
assert tmp.loc[1, 'c48_0_0'] == '2011-08-14'
assert tmp.loc[2, 'c47_0_0'] == -0.55461
assert tmp.loc[2, 'c48_0_0'] == '2016-11-30'
assert pd.isnull(tmp.loc[3, 'c47_0_0'])
assert tmp.loc[3, 'c48_0_0'] == '2010-01-01'
def test_postgresql_float_is_empty(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example03.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
assert tmp.loc[3, 'c21_0_0'] == 'Option number 3'
assert tmp.loc[3, 'c21_1_0'] == 'Of course'
assert tmp.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(tmp.loc[4, 'c21_2_0'])
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
assert tmp.loc[3, 'c31_0_0'].strftime('%Y-%m-%d') == '2007-03-19'
assert int(tmp.loc[3, 'c34_0_0']) == 1
assert int(tmp.loc[3, 'c46_0_0']) == -7
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert tmp.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
assert pd.isnull(tmp.loc[3, 'c47_0_0'])
assert tmp.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
def test_postgresql_timestamp_is_empty(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example04.csv')
db_engine = 'postgresql://test:test@localhost:5432/ukb'
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_00', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c21_0_0'] == 'Option number 1'
assert tmp.loc[1, 'c21_1_0'] == 'No response'
assert tmp.loc[1, 'c21_2_0'] == 'Yes'
assert tmp.loc[2, 'c21_0_0'] == 'Option number 2'
assert pd.isnull(tmp.loc[2, 'c21_1_0'])
assert tmp.loc[2, 'c21_2_0'] == 'No'
assert tmp.loc[3, 'c21_0_0'] == 'Option number 3'
assert tmp.loc[3, 'c21_1_0'] == 'Of course'
assert tmp.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(tmp.loc[4, 'c21_2_0'])
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
assert int(tmp.loc[2, 'c46_0_0']) == -2
assert tmp.loc[3, 'c31_0_0'].strftime('%Y-%m-%d') == '2007-03-19'
assert int(tmp.loc[3, 'c34_0_0']) == 1
assert int(tmp.loc[3, 'c46_0_0']) == -7
assert pd.isnull(tmp.loc[4, 'c31_0_0'])
tmp = pd.read_sql('select * from ukb_pheno_0_02', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c47_0_0'].round(5) == 45.55412
assert tmp.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert tmp.loc[2, 'c47_0_0'].round(5) == -0.55461
assert pd.isnull(tmp.loc[2, 'c48_0_0'])
assert tmp.loc[3, 'c47_0_0'].round(5) == -5.32471
assert tmp.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
def test_postgresql_integer_is_nan(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example06_nan_integer.csv')
db_engine = 'postgresql://test:test@localhost:5432/ukb'
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert int(tmp.loc[1, 'c46_0_0']) == -9
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
pd.isnull(tmp.loc[2, 'c46_0_0'])
assert tmp.loc[3, 'c31_0_0'].strftime('%Y-%m-%d') == '2007-03-19'
assert int(tmp.loc[3, 'c34_0_0']) == 1
assert int(tmp.loc[3, 'c46_0_0']) == -7
assert pd.isnull(tmp.loc[4, 'c31_0_0'])
def test_postgresql_first_row_is_nan_integer(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example07_first_nan_integer.csv')
db_engine = 'postgresql://test:test@localhost:5432/ukb'
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check data is correct
tmp = pd.read_sql('select * from ukb_pheno_0_01', create_engine(db_engine), index_col='eid')
assert not tmp.empty
assert tmp.shape[0] == 4
assert tmp.loc[1, 'c31_0_0'].strftime('%Y-%m-%d') == '2012-01-05'
assert int(tmp.loc[1, 'c34_0_0']) == 21
assert pd.isnull(tmp.loc[1, 'c46_0_0'])
assert tmp.loc[2, 'c31_0_0'].strftime('%Y-%m-%d') == '2015-12-30'
assert int(tmp.loc[2, 'c34_0_0']) == 12
pd.isnull(tmp.loc[2, 'c46_0_0'])
assert tmp.loc[3, 'c31_0_0'].strftime('%Y-%m-%d') == '2007-03-19'
assert int(tmp.loc[3, 'c34_0_0']) == 1
assert int(tmp.loc[3, 'c46_0_0']) == -7
assert pd.isnull(tmp.loc[4, 'c31_0_0'])
def test_postgresql_sql_chunksize01(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, sql_chunksize=2)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = p2sql.query(columns)
# Validate
assert query_result is not None
import collections
assert isinstance(query_result, collections.Iterable)
index_len_sum = 0
for chunk_idx, chunk in enumerate(query_result):
assert chunk.index.name == 'eid'
index_len_sum += len(chunk.index)
assert len(chunk.index) == 2
if chunk_idx == 0:
indexes = (1, 2)
assert all(x in chunk.index for x in indexes)
else:
indexes = (3, 4)
assert all(x in chunk.index for x in indexes)
assert len(chunk.columns) == len(columns)
assert all(x in columns for x in chunk.columns)
assert not chunk.empty
assert chunk.shape[0] == 2
if chunk_idx == 0:
assert chunk.loc[1, 'c21_0_0'] == 'Option number 1'
assert chunk.loc[2, 'c21_0_0'] == 'Option number 2'
assert chunk.loc[1, 'c21_2_0'] == 'Yes'
assert chunk.loc[2, 'c21_2_0'] == 'No'
assert chunk.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert chunk.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
else:
assert chunk.loc[3, 'c21_0_0'] == 'Option number 3'
assert chunk.loc[4, 'c21_0_0'] == 'Option number 4'
assert chunk.loc[3, 'c21_2_0'] == 'Maybe'
assert pd.isnull(chunk.loc[4, 'c21_2_0'])
assert chunk.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
assert chunk.loc[4, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-02-15'
assert index_len_sum == 4
def test_postgresql_sql_chunksize02(self):
# Prepare
csv_file = get_repository_path('pheno2sql/example02.csv')
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, n_columns_per_table=3, sql_chunksize=3)
p2sql.load_data()
# Run
columns = ['c21_0_0', 'c21_2_0', 'c48_0_0']
query_result = p2sql.query(columns)
# Validate
assert query_result is not None
import collections
assert isinstance(query_result, collections.Iterable)
index_len_sum = 0
for chunk_idx, chunk in enumerate(query_result):
assert chunk.index.name == 'eid'
index_len_sum += len(chunk.index)
if chunk_idx == 0:
assert len(chunk.index) == 3
indexes = (1, 2, 3)
assert all(x in chunk.index for x in indexes)
else:
assert len(chunk.index) == 1
indexes = (4,)
assert all(x in chunk.index for x in indexes)
assert len(chunk.columns) == len(columns)
assert all(x in columns for x in chunk.columns)
assert not chunk.empty
if chunk_idx == 0:
assert chunk.shape[0] == 3
assert chunk.loc[1, 'c21_0_0'] == 'Option number 1'
assert chunk.loc[2, 'c21_0_0'] == 'Option number 2'
assert chunk.loc[3, 'c21_0_0'] == 'Option number 3'
assert chunk.loc[1, 'c21_2_0'] == 'Yes'
assert chunk.loc[2, 'c21_2_0'] == 'No'
assert chunk.loc[3, 'c21_2_0'] == 'Maybe'
assert chunk.loc[1, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-08-14'
assert chunk.loc[2, 'c48_0_0'].strftime('%Y-%m-%d') == '2016-11-30'
assert chunk.loc[3, 'c48_0_0'].strftime('%Y-%m-%d') == '2010-01-01'
else:
assert chunk.shape[0] == 1
assert chunk.loc[4, 'c21_0_0'] == 'Option number 4'
assert pd.isnull(chunk.loc[4, 'c21_2_0'])
assert chunk.loc[4, 'c48_0_0'].strftime('%Y-%m-%d') == '2011-02-15'
assert index_len_sum == 4
def test_postgresql_all_eids_table_created(self):
# Prepare
directory = get_repository_path('pheno2sql/example14')
csv_file1 = get_repository_path(os.path.join(directory, 'example14_00.csv'))
csv_file2 = get_repository_path(os.path.join(directory, 'example14_01.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv_file1, csv_file2), db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('all_eids'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
all_eids = pd.read_sql('select * from all_eids', create_engine(db_engine))
expected_columns = ["eid"]
assert len(all_eids.columns) == len(expected_columns)
assert all(x in all_eids.columns for x in expected_columns)
## Check data is correct
all_eids = pd.read_sql('select * from all_eids', create_engine(db_engine), index_col='eid')
assert len(all_eids.index) == 6 + 4, len(all_eids.index)
assert 1000010 in all_eids.index
assert 1000020 in all_eids.index
assert 1000021 in all_eids.index
assert 1000030 in all_eids.index
assert 1000040 in all_eids.index
assert 1000041 in all_eids.index
assert 1000050 in all_eids.index
assert 1000060 in all_eids.index
assert 1000061 in all_eids.index
assert 1000070 in all_eids.index
def test_postgresql_all_eids_table_constraints(self):
# Prepare
directory = get_repository_path('pheno2sql/example14')
csv_file1 = get_repository_path(os.path.join(directory, 'example14_00.csv'))
csv_file2 = get_repository_path(os.path.join(directory, 'example14_01.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv_file1, csv_file2), db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run<|fim▁hole|>
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('all_eids'), create_engine(db_engine))
assert table.iloc[0, 0]
# primary key
constraint_sql = self._get_table_contrains('all_eids', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
columns = constraints_results['column_name'].tolist()
assert len(columns) == 1
assert 'eid' in columns
def test_postgresql_bgen_samples_table_created(self):
# Prepare
directory = get_repository_path('pheno2sql/example10')
csv_file = get_repository_path(os.path.join(directory, 'example10_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('bgen_samples'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
samples_data = pd.read_sql('select * from bgen_samples', create_engine(db_engine))
expected_columns = ["index", "eid"]
assert len(samples_data.columns) == len(expected_columns)
assert all(x in samples_data.columns for x in expected_columns)
## Check data is correct
samples_data = pd.read_sql('select * from bgen_samples', create_engine(db_engine), index_col='index')
assert not samples_data.empty
assert samples_data.shape[0] == 5
assert samples_data.loc[1, 'eid'] == 1000050
assert samples_data.loc[2, 'eid'] == 1000030
assert samples_data.loc[3, 'eid'] == 1000040
assert samples_data.loc[4, 'eid'] == 1000010
assert samples_data.loc[5, 'eid'] == 1000020
def test_postgresql_bgen_samples_table_constraints(self):
# Prepare
directory = get_repository_path('pheno2sql/example10')
csv_file = get_repository_path(os.path.join(directory, 'example10_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('bgen_samples'), create_engine(db_engine))
assert table.iloc[0, 0]
# primary key
constraint_sql = self._get_table_contrains('bgen_samples', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
columns = constraints_results['column_name'].tolist()
assert len(columns) == 2
assert 'eid' in columns
assert 'index' in columns
# indexes
constraint_sql = self._get_table_contrains('bgen_samples', relationship_query='ix_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
columns = constraints_results['column_name'].tolist()
assert len(columns) == 2
assert 'eid' in columns
assert 'index' in columns
def test_postgresql_events_tables_only_one_instance_filled(self):
# Prepare
directory = get_repository_path('pheno2sql/example10')
csv_file = get_repository_path(os.path.join(directory, 'example10_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('events'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
events_data = pd.read_sql('select * from events order by eid, instance, event', create_engine(db_engine))
expected_columns = ['eid', 'field_id', 'instance', 'event']
assert len(events_data.columns) == len(expected_columns)
assert all(x in events_data.columns for x in expected_columns)
## Check data is correct
assert not events_data.empty
assert events_data.shape[0] == 6
assert events_data.loc[0, 'eid'] == 1000020
assert events_data.loc[0, 'field_id'] == 84
assert events_data.loc[0, 'event'] == 'E103'
assert events_data.loc[1, 'eid'] == 1000020
assert events_data.loc[1, 'field_id'] == 84
assert events_data.loc[1, 'event'] == 'N308'
assert events_data.loc[2, 'eid'] == 1000020
assert events_data.loc[2, 'field_id'] == 84
assert events_data.loc[2, 'event'] == 'Q750'
assert events_data.loc[3, 'eid'] == 1000030
assert events_data.loc[3, 'field_id'] == 84
assert events_data.loc[3, 'event'] == 'N308'
assert events_data.loc[4, 'eid'] == 1000040
assert events_data.loc[4, 'field_id'] == 84
assert events_data.loc[4, 'event'] == 'N308'
assert events_data.loc[5, 'eid'] == 1000050
assert events_data.loc[5, 'field_id'] == 84
assert events_data.loc[5, 'event'] == 'E103'
def test_postgresql_events_tables_only_two_instances_filled(self):
# Prepare
directory = get_repository_path('pheno2sql/example11')
csv_file = get_repository_path(os.path.join(directory, 'example11_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('events'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
events_data = pd.read_sql('select * from events order by eid, instance, event', create_engine(db_engine))
expected_columns = ['eid', 'field_id', 'instance', 'event']
assert len(events_data.columns) == len(expected_columns)
assert all(x in events_data.columns for x in expected_columns)
## Check data is correct
assert not events_data.empty
assert events_data.shape[0] == 11
cidx = 0
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'J32'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000030
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000030
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000050
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'E103'
def test_postgresql_events_tables_two_categorical_fields_and_two_and_three_instances(self):
# Prepare
directory = get_repository_path('pheno2sql/example12')
csv_file = get_repository_path(os.path.join(directory, 'example12_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('events'), create_engine(db_engine))
assert table.iloc[0, 0]
## Check columns are correct
events_data = pd.read_sql('select * from events order by eid, field_id, instance, event', create_engine(db_engine))
expected_columns = ['eid', 'field_id', 'instance', 'event']
assert len(events_data.columns) == len(expected_columns)
assert all(x in events_data.columns for x in expected_columns)
## Check total data
assert not events_data.empty
assert events_data.shape[0] == 25
# 1000010
cidx = 0
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == '1136'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == '1434'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000010
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 2
assert events_data.loc[cidx, 'event'] == '1701'
# 1000020
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'J32'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == '1114'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == '1434'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000020
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == '1136'
# 1000030
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000030
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000030
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000030
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == '1434'
# 1000040
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'N308'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == 'Q750'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == '1114'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == '1136'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000040
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 2
assert events_data.loc[cidx, 'event'] == '457'
# 1000050
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000050
assert events_data.loc[cidx, 'field_id'] == 84
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == 'E103'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000050
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 0
assert events_data.loc[cidx, 'event'] == '1434'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000050
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 1
assert events_data.loc[cidx, 'event'] == '1114'
# 1000060
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000060
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 2
assert events_data.loc[cidx, 'event'] == '1114'
cidx += 1
assert events_data.loc[cidx, 'eid'] == 1000060
assert events_data.loc[cidx, 'field_id'] == 85
assert events_data.loc[cidx, 'instance'] == 2
assert events_data.loc[cidx, 'event'] == '1136'
def test_postgresql_events_tables_check_constrains_exist(self):
# Prepare
directory = get_repository_path('pheno2sql/example12')
csv_file = get_repository_path(os.path.join(directory, 'example12_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check samples table exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('events'), create_engine(db_engine))
assert table.iloc[0, 0]
# primary key
constraint_sql = self._get_table_contrains('events', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
columns = constraints_results['column_name'].tolist()
assert len(columns) == 4
assert 'eid' in columns
assert 'field_id' in columns
assert 'instance' in columns
assert 'event' in columns
# index on 'event' column
constraint_sql = self._get_table_contrains('events', relationship_query='ix_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine), index_col='index_name')
assert constraints_results is not None
assert not constraints_results.empty
assert constraints_results.shape[0] == 6
assert constraints_results.loc[['ix_events_eid']].shape[0] == 1
assert constraints_results.loc['ix_events_eid', 'column_name'] == 'eid'
assert constraints_results.loc[['ix_events_field_id']].shape[0] == 1
assert constraints_results.loc['ix_events_field_id', 'column_name'] == 'field_id'
assert constraints_results.loc[['ix_events_instance']].shape[0] == 1
assert constraints_results.loc['ix_events_instance', 'column_name'] == 'instance'
assert constraints_results.loc[['ix_events_event']].shape[0] == 1
assert constraints_results.loc['ix_events_event', 'column_name'] == 'event'
assert constraints_results.loc[['ix_events_field_id_event']].shape[0] == 2
assert 'field_id' in constraints_results.loc['ix_events_field_id_event', 'column_name'].tolist()
assert 'event' in constraints_results.loc['ix_events_field_id_event', 'column_name'].tolist()
def test_postgresql_phenotypes_tables_check_constrains_exist(self):
# Prepare
directory = get_repository_path('pheno2sql/example12')
csv_file = get_repository_path(os.path.join(directory, 'example12_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=15, loading_n_jobs=1)
# Run
p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql'
## Check tables exists
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_00'), create_engine(db_engine))
assert table.iloc[0, 0]
table = pd.read_sql("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = '{}');".format('ukb_pheno_0_01'), create_engine(db_engine))
assert table.iloc[0, 0]
# primary key
constraint_sql = self._get_table_contrains('ukb_pheno_0_00', column_query='eid', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
constraint_sql = self._get_table_contrains('ukb_pheno_0_01', column_query='eid', relationship_query='pk_%%')
constraints_results = pd.read_sql(constraint_sql, create_engine(db_engine))
assert constraints_results is not None
assert not constraints_results.empty
def test_postgresql_vacuum(self):
# Prepare
directory = get_repository_path('pheno2sql/example12')
csv_file = get_repository_path(os.path.join(directory, 'example12_diseases.csv'))
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL(csv_file, db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data(vacuum=True)
# Validate
vacuum_data = pd.DataFrame()
query_count = 0
# FIXME waits for vacuum to finish
while vacuum_data.empty and query_count < 150:
vacuum_data = pd.read_sql("""
select relname, last_vacuum, last_analyze
from pg_stat_user_tables
where schemaname = 'public' and last_vacuum is not null and last_analyze is not null
""", db_engine)
query_count += 1
assert vacuum_data is not None
assert not vacuum_data.empty
def test_postgresql_load_data_non_utf_characters(self):
# Prepare
directory = get_repository_path('pheno2sql/example15')
csv_file1 = get_repository_path(os.path.join(directory, 'example15_00.csv')) # latin1
csv_file2 = get_repository_path(os.path.join(directory, 'example15_01.csv')) # latin1
csv_file3 = get_repository_path(os.path.join(directory, 'example15_02.csv')) # utf-8
db_engine = POSTGRESQL_ENGINE
p2sql = Pheno2SQL((csv_file1, csv_file2, csv_file3), db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
columns = ['c21_1_0', 'c21_0_0', 'c103_0_0', 'c104_0_0', 'c221_0_0', 'c221_1_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result.index.name == 'eid'
assert len(query_result.index) == 10
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
assert not query_result.empty
assert query_result.loc[1000041, 'c103_0_0'] == 'Optión 4'
assert query_result.loc[1000041, 'c104_0_0'] == '158'
assert query_result.loc[1000070, 'c21_1_0'] == 'Of course ñ'
assert query_result.loc[1000070, 'c21_0_0'] == 'Option number 7'
assert query_result.loc[1000050, 'c221_0_0'] == 'Option number 25'
assert query_result.loc[1000050, 'c221_1_0'] == 'Maybe ñó'
def test_postgresql_load_data_with_duplicated_data_field(self):
# Prepare
directory = get_repository_path('pheno2sql/example16')
csv_file1 = get_repository_path(os.path.join(directory, 'example1600.csv'))
csv_file2 = get_repository_path(os.path.join(directory, 'example1601.csv'))
db_engine = POSTGRESQL_ENGINE
# intentionally, load first "latest" dataset (since 1601 > 1600)
p2sql = Pheno2SQL((csv_file2, csv_file1), db_engine, bgen_sample_file=os.path.join(directory, 'impv2.sample'),
n_columns_per_table=2, loading_n_jobs=1)
# Run
p2sql.load_data()
columns = ['c103_0_0', 'c47_0_0', 'c50_0_0']
query_result = next(p2sql.query(columns))
# Validate
assert query_result.index.name == 'eid'
assert len(query_result.index) == 7 + 3, len(query_result.index)
assert not query_result.empty
assert query_result.shape[0] == 7 + 3, query_result.shape[0]
assert len(query_result.columns) == len(columns)
assert all(x in columns for x in query_result.columns)
# this individuals should not have data for data-field 50, since we overwrote the old dataset (1600)
assert pd.isnull(query_result.loc[1000021, 'c50_0_0'])
assert pd.isnull(query_result.loc[1000041, 'c50_0_0'])
assert pd.isnull(query_result.loc[1000061, 'c50_0_0'])
# should keep "newest" data (in 1601, csv_file2)
assert query_result.loc[1000010, 'c50_0_0'] == 1.01
assert query_result.loc[1000020, 'c50_0_0'] == 1.05
assert query_result.loc[1000030, 'c50_0_0'] == 1.21
assert query_result.loc[1000040, 'c50_0_0'] == 1.25
assert query_result.loc[1000050, 'c50_0_0'] == 1.41
assert query_result.loc[1000060, 'c50_0_0'] == 1.45
assert query_result.loc[1000070, 'c50_0_0'] == 1.50
# check other data-fields
assert pd.isnull(query_result.loc[1000020, 'c103_0_0'])
assert pd.isnull(query_result.loc[1000040, 'c103_0_0'])
assert pd.isnull(query_result.loc[1000060, 'c103_0_0'])
assert pd.isnull(query_result.loc[1000070, 'c103_0_0'])
assert query_result.loc[1000010, 'c103_0_0'] == 'Option 1'
assert query_result.loc[1000021, 'c103_0_0'] == 'Option 2'
assert query_result.loc[1000030, 'c103_0_0'] == 'Option 3'
assert query_result.loc[1000041, 'c103_0_0'] == 'Option 4'
assert query_result.loc[1000050, 'c103_0_0'] == 'Option 5'
assert query_result.loc[1000061, 'c103_0_0'] == 'Option 6'
assert pd.isnull(query_result.loc[1000021, 'c47_0_0'])
assert pd.isnull(query_result.loc[1000041, 'c47_0_0'])
assert pd.isnull(query_result.loc[1000061, 'c47_0_0'])
assert query_result.loc[1000010, 'c47_0_0'] == 41.55312
assert query_result.loc[1000020, 'c47_0_0'] == -10.51461
assert query_result.loc[1000030, 'c47_0_0'] == -35.31471
assert query_result.loc[1000040, 'c47_0_0'] == 5.20832
assert pd.isnull(query_result.loc[1000050, 'c47_0_0'])
assert query_result.loc[1000060, 'c47_0_0'] == 0.55478
assert pd.isnull(query_result.loc[1000070, 'c47_0_0'])<|fim▁end|> | p2sql.load_data()
# Validate
assert p2sql.db_type == 'postgresql' |
<|file_name|>defineSetByView.js<|end_file_name|><|fim▁begin|>//AppliedRules.defineSetByView<|fim▁hole|><|fim▁end|> | module.exports = (function( viewId ){
return this.defineSetByTag( viewId );
}); |
<|file_name|>region.rs<|end_file_name|><|fim▁begin|>use native;
use rustrt::rtio;
use rustrt::rtio::RtioFileStream;
use std::cell::Cell;
use std::os;
use array::*;
use chunk::{
BiomeId,
BlockState,
Chunk,
ChunkColumn,
EMPTY_CHUNK,
LightLevel,
SIZE
};
use minecraft::nbt::Nbt;
<|fim▁hole|> mmap: os::MemoryMap
}
impl Region {
pub fn open(filename: &Path) -> Region {
let mut file = native::io::file::open(
&filename.as_str().unwrap().to_c_str(),
rtio::Open,
rtio::Read
).ok().unwrap();
let min_len = file.fstat().ok().unwrap().size as uint;
let options = [
os::MapFd(file.fd()),
os::MapReadable
];
Region {
mmap: os::MemoryMap::new(min_len, options).unwrap()
}
}
fn as_slice<'a>(&'a self) -> &'a [u8] {
use std::mem;
use std::raw::Slice;
let slice = Slice {
data: self.mmap.data() as *const u8,
len: self.mmap.len()
};
unsafe { mem::transmute(slice) }
}
pub fn get_chunk_column(&self, x: u8, z: u8) -> Option<ChunkColumn> {
let locations = self.as_slice().slice_to(4096);
let i = 4 * ((x % 32) as uint + (z % 32) as uint * 32);
let start = (locations[i] as uint << 16)
| (locations[i + 1] as uint << 8)
| locations[i + 2] as uint;
let num = locations[i + 3] as uint;
if start == 0 || num == 0 { return None; }
let sectors = self.as_slice().slice(start * 4096, (start + num) * 4096);
let len = (sectors[0] as uint << 24)
| (sectors[1] as uint << 16)
| (sectors[2] as uint << 8)
| sectors[3] as uint;
let nbt = match sectors[4] {
1 => Nbt::from_gzip(sectors.slice(5, 4 + len)),
2 => Nbt::from_zlib(sectors.slice(5, 4 + len)),
c => panic!("unknown region chunk compression method {}", c)
};
let mut c = nbt.unwrap().into_compound().unwrap();
let mut level = c.pop_equiv("Level").unwrap().into_compound().unwrap();
let mut chunks = Vec::new();
for chunk in level.pop_equiv("Sections")
.unwrap().into_compound_list().unwrap().into_iter() {
let y = chunk.find_equiv("Y")
.unwrap().as_byte().unwrap();
let blocks = chunk.find_equiv("Blocks")
.unwrap().as_bytearray().unwrap();
let blocks_top = chunk.find_equiv("Add")
.and_then(|x| x.as_bytearray());
let blocks_data = chunk.find_equiv("Data")
.unwrap().as_bytearray().unwrap();
let block_light = chunk.find_equiv("BlockLight")
.unwrap().as_bytearray().unwrap();
let sky_light = chunk.find_equiv("SkyLight")
.unwrap().as_bytearray().unwrap();
fn array_16x16x16<T>(
f: |uint, uint, uint| -> T
) -> [[[T, ..SIZE], ..SIZE], ..SIZE] {
Array::from_fn(|y| -> [[T, ..SIZE], ..SIZE]
Array::from_fn(|z| -> [T, ..16]
Array::from_fn(|x| f(x, y, z))
)
)
}
let chunk = Chunk {
blocks: array_16x16x16(|x, y, z| {
let i = (y * SIZE + z) * SIZE + x;
let top = match blocks_top {
Some(blocks_top) => {
(blocks_top[i >> 1] >> ((i & 1) * 4)) & 0x0f
}
None => 0
};
let data = (blocks_data[i >> 1] >> ((i & 1) * 4)) & 0x0f;
BlockState {
value: (blocks[i] as u16 << 4)
| (top as u16 << 12)
| (data as u16)
}
}),
light_levels: array_16x16x16(|x, y, z| {
let i = (y * 16 + z) * 16 + x;
let block = (block_light[i >> 1] >> ((i & 1) * 4)) & 0x0f;
let sky = (sky_light[i >> 1] >> ((i & 1) * 4)) & 0x0f;
LightLevel {
value: block | (sky << 4)
}
}),
};
let len = chunks.len();
if y as uint >= len {
chunks.grow(y as uint - len + 1, *EMPTY_CHUNK);
}
chunks[y as uint] = chunk;
}
let biomes = level.find_equiv("Biomes")
.unwrap().as_bytearray().unwrap();
Some(ChunkColumn {
chunks: chunks,
buffers: Array::from_fn(|_| Cell::new(None)),
biomes: Array::from_fn(|z| -> [BiomeId, ..SIZE] Array::from_fn(|x| {
BiomeId {
value: biomes[z * SIZE + x]
}
}))
})
}
}<|fim▁end|> | pub struct Region { |
<|file_name|>LeenoUtils.py<|end_file_name|><|fim▁begin|>'''
Often used utility functions
Copyright 2020 by Massimo Del Fedele
'''
import sys
import uno
from com.sun.star.beans import PropertyValue
from datetime import date
import calendar
import PyPDF2
'''
ALCUNE COSE UTILI
La finestra che contiene il documento (o componente) corrente:
desktop.CurrentFrame.ContainerWindow
Non cambia nulla se è aperto un dialogo non modale,
ritorna SEMPRE il frame del documento.
desktop.ContainerWindow ritorna un None -- non so a che serva
Per ottenere le top windows, c'è il toolkit...
tk = ctx.ServiceManager.createInstanceWithContext("com.sun.star.awt.Toolkit", ctx)
tk.getTopWindowCount() ritorna il numero delle topwindow
tk.getTopWIndow(i) ritorna una topwindow dell'elenco
tk.getActiveTopWindow () ritorna la topwindow attiva
La topwindow attiva, per essere attiva deve, appunto, essere attiva, indi avere il focus
Se si fa il debug, ad esempio, è probabile che la finestra attiva sia None
Resta quindi SEMPRE il problema di capire come fare a centrare un dialogo sul componente corrente.
Se non ci sono dialoghi in esecuzione, il dialogo creato prende come parent la ContainerWindow(si suppone...)
e quindi viene posizionato in base a quella
Se c'è un dialogo aperto e nell'event handler se ne apre un altro, l'ultimo prende come parent il precedente,
e viene quindi posizionato in base a quello e non alla schermata principale.
Serve quindi un metodo per trovare le dimensioni DELLA FINESTRA PARENT di un dialogo, per posizionarlo.
L'oggetto UnoControlDialog permette di risalire al XWindowPeer (che non serve ad una cippa), alla XView
(che mi fornisce la dimensione del dialogo ma NON la parent...), al UnoControlDialogModel, che fornisce
la proprietà 'DesktopAsParent' che mi dice SOLO se il dialogo è modale (False) o non modale (True)
L'unica soluzione che mi viene in mente è tentare con tk.ActiveTopWindow e, se None, prendere quella del desktop
'''
def getComponentContext():
'''
Get current application's component context
'''
try:
if __global_context__ is not None:
return __global_context__
return uno.getComponentContext()
except Exception:
return uno.getComponentContext()
def getDesktop():
'''
Get current application's LibreOffice desktop
'''
ctx = getComponentContext()
return ctx.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", ctx)
def getDocument():
'''
Get active document
'''
desktop = getDesktop()
# try to activate current frame
# needed sometimes because UNO doesnt' find the correct window
# when debugging.
try:
desktop.getCurrentFrame().activate()
except Exception:
pass
return desktop.getCurrentComponent()
def getServiceManager():
'''
Gets the service manager
'''
return getComponentContext().ServiceManager
def createUnoService(serv):
'''
create an UNO service
'''
return getComponentContext().getServiceManager().createInstance(serv)
def MRI(target):
ctx = getComponentContext()
mri = ctx.ServiceManager.createInstanceWithContext("mytools.Mri", ctx)
mri.inspect(target)
def isLeenoDocument():
'''
check if current document is a LeenO document
'''
try:
return getDocument().getSheets().hasByName('S2')
except Exception:
return False
def DisableDocumentRefresh(oDoc):
'''
Disabilita il refresh per accelerare le procedure
'''
oDoc.lockControllers()
oDoc.addActionLock()
def EnableDocumentRefresh(oDoc):
'''
Riabilita il refresh
'''
oDoc.removeActionLock()
oDoc.unlockControllers()
def getGlobalVar(name):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__<|fim▁hole|> else:
bDict = __builtins__
return bDict.get('LEENO_GLOBAL_' + name)
def setGlobalVar(name, value):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__
else:
bDict = __builtins__
bDict['LEENO_GLOBAL_' + name] = value
def initGlobalVars(dict):
if type(__builtins__) == type(sys):
bDict = __builtins__.__dict__
else:
bDict = __builtins__
for key, value in dict.items():
bDict['LEENO_GLOBAL_' + key] = value
def dictToProperties(values, unoAny=False):
'''
convert a dictionary in a tuple of UNO properties
if unoAny is True, return the result in an UNO Any variable
otherwise use a python tuple
'''
ps = tuple([PropertyValue(Name=n, Value=v) for n, v in values.items()])
if unoAny:
ps = uno.Any('[]com.sun.star.beans.PropertyValue', ps)
return ps
def daysInMonth(dat):
'''
returns days in month of date dat
'''
month = dat.month + 1
year = dat.year
if month > 12:
month = 1
year += 1
dat2 = date(year=year, month=month, day=dat.day)
t = dat2 - dat
return t.days
def firstWeekDay(dat):
'''
returns first week day in month from dat
monday is 0
'''
return calendar.weekday(dat.year, dat.month, 1)
DAYNAMES = ['Lun', 'Mar', 'Mer', 'Gio', 'Ven', 'Sab', 'Dom']
MONTHNAMES = [
'Gennaio', 'Febbraio', 'Marzo', 'Aprile',
'Maggio', 'Giugno', 'Luglio', 'Agosto',
'Settembre', 'Ottobre', 'Novembre', 'Dicembre'
]
def date2String(dat, fmt = 0):
'''
conversione data in stringa
fmt = 0 25 Febbraio 2020
fmt = 1 25/2/2020
fmt = 2 25-02-2020
fmt = 3 25.02.2020
'''
d = dat.day
m = dat.month
if m < 10:
ms = '0' + str(m)
else:
ms = str(m)
y = dat.year
if fmt == 1:
return str(d) + '/' + ms + '/' + str(y)
elif fmt == 2:
return str(d) + '-' + ms + '-' + str(y)
elif fmt == 3:
return str(d) + '.' + ms + '.' + str(y)
else:
return str(d) + ' ' + MONTHNAMES[m - 1] + ' ' + str(y)
def string2Date(s):
if '.' in s:
sp = s.split('.')
elif '/' in s:
sp = s.split('/')
elif '-' in s:
sp = s.split('-')
else:
return date.today()
if len(sp) != 3:
raise Exception
day = int(sp[0])
month = int(sp[1])
year = int(sp[2])
return date(day=day, month=month, year=year)
def countPdfPages(path):
'''
Returns the number of pages in a PDF document
using external PyPDF2 module
'''
with open(path, 'rb') as f:
pdf = PyPDF2.PdfFileReader(f)
return pdf.getNumPages()
def replacePatternWithField(oTxt, pattern, oField):
'''
Replaces a string pattern in a Text object
(for example '[PATTERN]') with the given field
'''
# pattern may be there many times...
repl = False
pos = oTxt.String.find(pattern)
while pos >= 0:
#create a cursor
cursor = oTxt.createTextCursor()
# use it to select the pattern
cursor.collapseToStart()
cursor.goRight(pos, False)
cursor.goRight(len(pattern), True)
# remove the pattern from text
cursor.String = ''
# insert the field at cursor's position
cursor.collapseToStart()
oTxt.insertTextContent(cursor, oField, False)
# next occurrence of pattern
pos = oTxt.String.find(pattern)
repl = True
return repl<|fim▁end|> | |
<|file_name|>world.rs<|end_file_name|><|fim▁begin|>use rand::Rng;
use crate::{
geometry::Size,
models::{Bullet, Enemy, Particle, Player, Powerup, Star},
};
const MAX_STARS: usize = 100;
/// A model that contains the other models and renders them
pub struct World {
pub player: Player,
pub particles: Vec<Particle>,
pub powerups: Vec<Powerup>,
pub bullets: Vec<Bullet>,
pub enemies: Vec<Enemy>,
pub stars: Vec<Star>,
pub size: Size,
}
impl World {
/// Returns a new world of the given size
pub fn new<R: Rng>(rng: &mut R, size: Size) -> World {
World {
player: Player::random(rng, size),
particles: Vec::with_capacity(1000),<|fim▁hole|> enemies: vec![],
stars: (0..MAX_STARS).map(|_| Star::new(size, rng)).collect(),
size: size,
}
}
}<|fim▁end|> | powerups: vec![],
bullets: vec![], |
<|file_name|>persona.py<|end_file_name|><|fim▁begin|>"""
BrowserID support
"""
from social.backends.base import BaseAuth
from social.exceptions import AuthFailed, AuthMissingParameter
class PersonaAuth(BaseAuth):
"""BrowserID authentication backend"""
name = 'persona'
def get_user_id(self, details, response):
"""Use BrowserID email as ID"""<|fim▁hole|> # {'status': 'okay',
# 'audience': 'localhost:8000',
# 'expires': 1328983575529,
# 'email': '[email protected]',
# 'issuer': 'browserid.org'}
email = response['email']
return {'username': email.split('@', 1)[0],
'email': email,
'fullname': '',
'first_name': '',
'last_name': ''}
def extra_data(self, user, uid, response, details):
"""Return users extra data"""
return {'audience': response['audience'],
'issuer': response['issuer']}
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
if not 'assertion' in self.data:
raise AuthMissingParameter(self, 'assertion')
response = self.get_json('https://browserid.org/verify', data={
'assertion': self.data['assertion'],
'audience': self.strategy.request_host()
}, method='POST')
if response.get('status') == 'failure':
raise AuthFailed(self)
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)<|fim▁end|> | return details['email']
def get_user_details(self, response):
"""Return user details, BrowserID only provides Email.""" |
<|file_name|>tuple-style-enum.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:set print union on
// gdb-command:run
// gdb-command:print case1
// gdb-check:$1 = {{RUST$ENUM$DISR = Case1, 0, 31868, 31868, 31868, 31868}, {RUST$ENUM$DISR = Case1, 0, 2088533116, 2088533116}, {RUST$ENUM$DISR = Case1, 0, 8970181431921507452}}
// gdb-command:print case2
// gdb-check:$2 = {{RUST$ENUM$DISR = Case2, 0, 4369, 4369, 4369, 4369}, {RUST$ENUM$DISR = Case2, 0, 286331153, 286331153}, {RUST$ENUM$DISR = Case2, 0, 1229782938247303441}}
// gdb-command:print case3
// gdb-check:$3 = {{RUST$ENUM$DISR = Case3, 0, 22873, 22873, 22873, 22873}, {RUST$ENUM$DISR = Case3, 0, 1499027801, 1499027801}, {RUST$ENUM$DISR = Case3, 0, 6438275382588823897}}
// gdb-command:print univariant
// gdb-check:$4 = {{-1}}
// === LLDB TESTS ==================================================================================
// lldb-command:run
// lldb-command:print case1
// lldb-check:[...]$0 = Case1(0, 31868, 31868, 31868, 31868)
// lldb-command:print case2
// lldb-check:[...]$1 = Case2(0, 286331153, 286331153)
// lldb-command:print case3<|fim▁hole|>// lldb-check:[...]$3 = TheOnlyCase(-1)
#![allow(unused_variables)]
#![omit_gdb_pretty_printer_section]
use self::Regular::{Case1, Case2, Case3};
use self::Univariant::TheOnlyCase;
// The first element is to ensure proper alignment, irrespective of the machines word size. Since
// the size of the discriminant value is machine dependent, this has be taken into account when
// datatype layout should be predictable as in this case.
enum Regular {
Case1(u64, u16, u16, u16, u16),
Case2(u64, u32, u32),
Case3(u64, u64)
}
enum Univariant {
TheOnlyCase(i64)
}
fn main() {
// In order to avoid endianness trouble all of the following test values consist of a single
// repeated byte. This way each interpretation of the union should look the same, no matter if
// this is a big or little endian machine.
// 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
// 0b01111100011111000111110001111100 = 2088533116
// 0b0111110001111100 = 31868
// 0b01111100 = 124
let case1 = Case1(0, 31868, 31868, 31868, 31868);
// 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
// 0b00010001000100010001000100010001 = 286331153
// 0b0001000100010001 = 4369
// 0b00010001 = 17
let case2 = Case2(0, 286331153, 286331153);
// 0b0101100101011001010110010101100101011001010110010101100101011001 = 6438275382588823897
// 0b01011001010110010101100101011001 = 1499027801
// 0b0101100101011001 = 22873
// 0b01011001 = 89
let case3 = Case3(0, 6438275382588823897);
let univariant = TheOnlyCase(-1);
zzz(); // #break
}
fn zzz() {()}<|fim▁end|> | // lldb-check:[...]$2 = Case3(0, 6438275382588823897)
// lldb-command:print univariant |
<|file_name|>watcher.error-no-input.js<|end_file_name|><|fim▁begin|>import { exec } from "child_process"<|fim▁hole|>import test from "tape"
import cliBin from "./utils/cliBin"
test("--watch error if no input files", (t) => {
exec(
`${ cliBin }/testBin --watch`,
(err, stdout, stderr) => {
t.ok(
err,
"should return an error when <input> or <output> are missing when " +
"`--watch` option passed"
)
t.ok(
stderr.includes("--watch requires"),
"should show an explanation when <input> or <output> are missing when" +
" `--watch` option passed"
)
t.end()
}
)
})<|fim▁end|> | |
<|file_name|>test_mimehandler.py<|end_file_name|><|fim▁begin|>from thug.ThugAPI.ThugAPI import ThugAPI
class TestMIMEHandler(object):
def do_perform_test(self, caplog, url, expected, type_ = "remote"):
thug = ThugAPI()<|fim▁hole|> thug.set_ssl_verify()
thug.log_init(url)
m = getattr(thug, "run_{}".format(type_))
m(url)
records = [r.message for r in caplog.records]
matches = 0
for e in expected:
for record in records:
if e in record:
matches += 1
assert matches >= len(expected)
def test_zip_handler(self, caplog):
expected = ['[Window] Alert Text: Foobar']
self.do_perform_test(caplog, "https://github.com/buffer/thug/raw/master/tests/test_files/test.js.zip", expected)<|fim▁end|> |
thug.set_useragent('win7ie90')
thug.set_features_logging() |
<|file_name|>theming-tests.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import {
channel,
ContextWithTheme,
Theme,
themeListener,
ThemeProvider,
withTheme
} from "theming";
// Typings currently accept non-plain-objects while they get rejected at runtime.
// There exists currently no typing for plain objects.
const runtimeErrorTheme: Theme = [];
const customTheme = {
color: {
primary: "red",
secondary: "blue"
}
};
type CustomTheme = typeof customTheme;
interface DemoBoxProps {
text: string;
theme: CustomTheme;
}
const DemoBox = ({ text, theme }: DemoBoxProps) => {
return <div style={{ color: theme.color.primary }}>{text}</div>;
};
const ThemedDemoBox = withTheme(DemoBox);
const renderDemoBox = () => <ThemedDemoBox text="Hello, World!" />;
const App = () => {
return (
<ThemeProvider theme={customTheme}>
<ThemedDemoBox text="Theme provided" />
</ThemeProvider>
);
};
const AugmentedApp = () => {<|fim▁hole|> <ThemedDemoBox text="Theme provided" />
</ThemeProvider>
</ThemeProvider>
);
};
function customWithTheme<P>(
// tslint:disable-next-line: no-unnecessary-generics
Component: React.ComponentType<P & { theme: object }>
) {
return class CustomWithTheme extends React.Component<P, { theme: object }> {
static contextTypes = themeListener.contextTypes;
context: any;
setTheme = (theme: object) => this.setState({ theme });
subscription: number | undefined;
constructor(props: P, context: ContextWithTheme<typeof channel>) {
super(props, context);
this.state = { theme: themeListener.initial(context) };
}
componentDidMount() {
this.subscription = themeListener.subscribe(this.context, this.setTheme);
}
componentWillUnmount() {
const { subscription } = this;
if (subscription != null) {
themeListener.unsubscribe(this.context, subscription);
}
}
render() {
const { theme } = this.state;
return <Component theme={theme} {...this.props} />;
}
};
}<|fim▁end|> | return (
<ThemeProvider theme={customTheme}>
<ThemeProvider theme={outerTheme => ({ ...outerTheme, augmented: true })}> |
<|file_name|>actionpotential.py<|end_file_name|><|fim▁begin|>import os
import webapp2
from actions import cronActions
from views import views
import secrets
SECS_PER_WEEK = 60 * 60 * 24 * 7
# Enable ctypes -> Jinja2 tracebacks
PRODUCTION_MODE = not os.environ.get(
'SERVER_SOFTWARE', 'Development').startswith('Development')
ROOT_DIRECTORY = os.path.dirname(__file__)
if not PRODUCTION_MODE:
from google.appengine.tools.devappserver2.python import sandbox
sandbox._WHITE_LIST_C_MODULES += ['_ctypes', 'gestalt']
TEMPLATE_DIRECTORY = os.path.join(ROOT_DIRECTORY, 'src')
else:
TEMPLATE_DIRECTORY = os.path.join(ROOT_DIRECTORY, 'dist')
curr_path = os.path.abspath(os.path.dirname(__file__))
config = {
'webapp2_extras.sessions': {
'secret_key': secrets.COOKIE_KEY,
'session_max_age': SECS_PER_WEEK,
'cookie_args': {'max_age': SECS_PER_WEEK},
'cookie_name': 'echo_sense_session'
},
'webapp2_extras.jinja2': {
'template_path': TEMPLATE_DIRECTORY
}<|fim▁hole|>
app = webapp2.WSGIApplication(
[
# Cron jobs (see cron.yaml)
webapp2.Route('/cron/monthly', handler=cronActions.Monthly),
webapp2.Route(r'/<:.*>', handler=views.ActionPotentialApp, name="ActionPotentialApp"),
], debug=True, config=config)<|fim▁end|> | } |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from datetime import date
NTESTS = 1
PREV_DAYS = 10
PERCENT_UP = 0.01
PERCENT_DOWN = 0.01
PERIOD = 'Hourly' # [5-min, 15-min, 30-min, Hourly, 2-hour, 6-hour, 12-hour, Daily, Weekly]
MARKET = 'bitstampUSD'
# DATE START
YEAR_START = 2011
MONTH_START = 9
DAY_START = 13<|fim▁hole|>DATE_END = date.today()
URL_DATA_BASE = 'http://bitcoincharts.com/charts/chart.json?'<|fim▁end|> | DATE_START = date(YEAR_START, MONTH_START, DAY_START)
# DATE END |
<|file_name|>Short.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Convert short to any primitive data type
*/
public class Short {
/**
* Convert short to boolean
*
* @param input short
* @return boolean
*/
public static boolean shortToBoolean(short input) {
return input != 0;
}
/**
* Convert short to byte
*
* @param input short
* @return byte
*/
public static byte shortToByte(short input) {
return (byte) input;
}
/**
* Convert short to byte[]
*
* @param input short
* @return byte[]
*/
public static byte[] shortToByteArray(short input) {
return new byte[]{
(byte) (input >>> 8),
(byte) input};
}
/**
* Convert short to char
*
* @param input short
* @return char
*/
public static char shortToChar(short input) {
return (char) input;
}
/**
* Convert short to double
*
* @param input short
* @return double
*/
public static double shortToDouble(short input) {
return (double) input;
}
/**
* Convert short to float
*
* @param input short
* @return float
*/
public static float shortToFloat(short input) {
return (float) input;
}
/**
* Convert short to int
*
* @param input short
* @return int
*/
public static int shortToInt(short input) {
return (int) input;
}
/**
* Convert short to long
*
* @param input short
* @return long
*/
public static long shortToLong(short input) {
return (long) input;
}
/**
* Convert short to String
*
* @param input short
* @return String
*/
public static java.lang.String shortToString(short input) {
return java.lang.Short.toString(input);
}
}<|fim▁end|> | package com.kashukov.convert;
/** |
<|file_name|>consul_agent_test.go<|end_file_name|><|fim▁begin|>package cloudfoundry_test
import (
"github.com/enaml-ops/omg-product-bundle/products/oss_cf/enaml-gen/consul_agent"
. "github.com/enaml-ops/omg-product-bundle/products/oss_cf/plugin"
"github.com/enaml-ops/omg-product-bundle/products/oss_cf/plugin/config"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Consul Agent", func() {
Context("when initialized WITH a complete set of arguments", func() {
var cfg *config.Config
BeforeEach(func() {
cfg = &config.Config{
Secret: config.Secret{},
User: config.User{},
Certs: &config.Certs{},
InstanceCount: config.InstanceCount{},
IP: config.IP{},
}
cfg.ConsulAgentCert = "agent-cert"
cfg.ConsulAgentKey = "agent-key"
cfg.ConsulServerCert = "server-cert"
cfg.ConsulEncryptKeys = []string{"encyption-key"}
cfg.ConsulServerKey = "server-key"
cfg.ConsulIPs = []string{"1.0.0.1", "1.0.0.2"}
})
It("then consul with server false", func() {
consulAgent := NewConsulAgent([]string{}, cfg)
Ω(consulAgent.Mode).Should(Equal(""))
})
It("then consul with server true", func() {
consulAgent := NewConsulAgentServer(cfg)
Ω(consulAgent.Mode).Should(Equal("server"))
})
It("then job properties are set properly for server false", func() {<|fim▁hole|> Ω(props.Consul.Agent.Servers.Lan).Should(ConsistOf("1.0.0.1", "1.0.0.2"))
Ω(props.Consul.AgentCert).Should(Equal("agent-cert"))
Ω(props.Consul.AgentKey).Should(Equal("agent-key"))
Ω(props.Consul.ServerCert).Should(Equal("server-cert"))
Ω(props.Consul.ServerKey).Should(Equal("server-key"))
Ω(props.Consul.EncryptKeys).Should(ConsistOf("encyption-key"))
Ω(props.Consul.Agent.Domain).Should(Equal("cf.internal"))
Ω(props.Consul.Agent.Mode).Should(BeNil())
})
It("then job properties are set properly etcd service", func() {
consulAgent := NewConsulAgent([]string{"etcd"}, cfg)
job := consulAgent.CreateJob()
Ω(job).ShouldNot(BeNil())
props := job.Properties.(*consul_agent.ConsulAgentJob)
etcdMap := make(map[string]map[string]string)
etcdMap["etcd"] = make(map[string]string)
Ω(props.Consul.Agent.Services).Should(Equal(etcdMap))
})
It("then job properties are set properly etcd and uaa service", func() {
consulAgent := NewConsulAgent([]string{"etcd", "uaa"}, cfg)
job := consulAgent.CreateJob()
Ω(job).ShouldNot(BeNil())
props := job.Properties.(*consul_agent.ConsulAgentJob)
servicesMap := make(map[string]map[string]string)
servicesMap["etcd"] = make(map[string]string)
servicesMap["uaa"] = make(map[string]string)
Ω(props.Consul.Agent.Services).Should(Equal(servicesMap))
})
It("then job properties are set properly for server true", func() {
consulAgent := NewConsulAgentServer(cfg)
job := consulAgent.CreateJob()
Ω(job).ShouldNot(BeNil())
props := job.Properties.(*consul_agent.ConsulAgentJob)
Ω(props.Consul.Agent.Servers.Lan).Should(ConsistOf("1.0.0.1", "1.0.0.2"))
Ω(props.Consul.AgentCert).Should(Equal("agent-cert"))
Ω(props.Consul.AgentKey).Should(Equal("agent-key"))
Ω(props.Consul.ServerCert).Should(Equal("server-cert"))
Ω(props.Consul.ServerKey).Should(Equal("server-key"))
Ω(props.Consul.EncryptKeys).Should(ConsistOf("encyption-key"))
Ω(props.Consul.Agent.Domain).Should(Equal("cf.internal"))
Ω(props.Consul.Agent.Mode).Should(Equal("server"))
})
})
})<|fim▁end|> | consulAgent := NewConsulAgent([]string{}, cfg)
job := consulAgent.CreateJob()
Ω(job).ShouldNot(BeNil())
props := job.Properties.(*consul_agent.ConsulAgentJob) |
<|file_name|>implicit_cellular.rs<|end_file_name|><|fim▁begin|>/// The documentation is taken from original [C++ library by Joshua Tippetts](http://accidentalnoise.sourceforge.net/docs.html).
use super::implicit_base::ImplicitModuleBase;
use super::ImplicitModule;
use super::cellular_gen::*;
use std::rc::Rc;
use std::cell::RefCell;<|fim▁hole|> generator: Option<Rc<RefCell<CellularGenerator>>>,
coefficients: [f64; 4],
}
impl ImplicitCellular {
pub fn new() -> ImplicitCellular {
ImplicitCellular {
base: Default::default(),
generator: None,
coefficients: [1.0, 0.0, 0.0, 0.0],
}
}
pub fn with_coefficients(a: f64, b: f64, c: f64, d: f64) -> ImplicitCellular {
ImplicitCellular {
base: Default::default(),
generator: None,
coefficients: [a, b, c, d],
}
}
pub fn set_coefficients(&mut self, a: f64, b: f64, c: f64, d: f64) {
self.coefficients[0] = a;
self.coefficients[1] = b;
self.coefficients[2] = c;
self.coefficients[3] = d;
}
pub fn set_cellular_source(&mut self, source: Option<Rc<RefCell<CellularGenerator>>>) {
self.generator = source;
}
}
impl ImplicitModule for ImplicitCellular {
fn set_seed(&mut self, seed: u32) {
if let Some(ref mut g) = self.generator {
g.borrow_mut().set_seed(seed);
}
}
fn get_2d(&mut self, x: f64, y: f64) -> f64 {
match self.generator {
None => 0.0,
Some(ref mut g) => {
let mut b = g.borrow_mut();
let c = b.get_2d(x, y);
c.f[0] * self.coefficients[0] + c.f[1] * self.coefficients[1] + c.f[2] * self.coefficients[2] + c.f[3] * self.coefficients[3]
}
}
}
fn get_3d(&mut self, x: f64, y: f64, z: f64) -> f64 {
match self.generator {
None => 0.0,
Some(ref mut g) => {
let mut b = g.borrow_mut();
let c = b.get_3d(x, y, z);
c.f[0] * self.coefficients[0] + c.f[1] * self.coefficients[1] + c.f[2] * self.coefficients[2] + c.f[3] * self.coefficients[3]
}
}
}
fn get_4d(&mut self, x: f64, y: f64, z: f64, w: f64) -> f64 {
match self.generator {
None => 0.0,
Some(ref mut g) => {
let mut b = g.borrow_mut();
let c = b.get_4d(x, y, z, w);
c.f[0] * self.coefficients[0] + c.f[1] * self.coefficients[1] + c.f[2] * self.coefficients[2] + c.f[3] * self.coefficients[3]
}
}
}
fn get_6d(&mut self, x: f64, y: f64, z: f64, w: f64, u: f64, v: f64) -> f64 {
match self.generator {
None => 0.0,
Some(ref mut g) => {
let mut b = g.borrow_mut();
let c = b.get_6d(x, y, z, w, u, v);
c.f[0] * self.coefficients[0] + c.f[1] * self.coefficients[1] + c.f[2] * self.coefficients[2] + c.f[3] * self.coefficients[3]
}
}
}
fn set_deriv_spacing(&mut self, s: f64) {
self.base.spacing = s;
}
fn spacing(&self) -> f64 {
self.base.spacing
}
}<|fim▁end|> |
/// The Cellular function provides an interface to a [`CellularGenerator`](struct.CellularGenerator.html) to generate cellular noise based on the function originally written by Steve Worley. The function calculates F1,F2,F3 and F4 as distances to the first, second, third and fourth nearest seed points, applies the specified coefficients to the distances and sums them together, returning the result.
pub struct ImplicitCellular {
base: ImplicitModuleBase, |
<|file_name|>exception_sink.py<|end_file_name|><|fim▁begin|># Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import datetime
import faulthandler
import logging
import os
import signal
import sys
import threading
import traceback
from contextlib import contextmanager
from typing import Callable, Iterator, Optional
import setproctitle
from pants.base.exiter import Exiter
from pants.util.dirutil import safe_mkdir, safe_open
from pants.util.osutil import Pid
logger = logging.getLogger(__name__)
class SignalHandler:
"""A specification for how to handle a fixed set of nonfatal signals.
This is subclassed and registered with ExceptionSink.reset_signal_handler() whenever the signal
handling behavior is modified for different pants processes, for example in the remote client when
pantsd is enabled. The default behavior is to exit "gracefully" by leaving a detailed log of which
signal was received, then exiting with failure.
Note that the terminal will convert a ctrl-c from the user into a SIGINT.
"""
@property
def signal_handler_mapping(self):
"""A dict mapping (signal number) -> (a method handling the signal)."""
# Could use an enum here, but we never end up doing any matching on the specific signal value,
# instead just iterating over the registered signals to set handlers, so a dict is probably
# better.
return {
signal.SIGINT: self._handle_sigint_if_enabled,
signal.SIGQUIT: self.handle_sigquit,
signal.SIGTERM: self.handle_sigterm,
}
def __init__(self):
self._ignore_sigint_lock = threading.Lock()
self._threads_ignoring_sigint = 0
self._ignoring_sigint_v2_engine = False
def _check_sigint_gate_is_correct(self):
assert (
self._threads_ignoring_sigint >= 0
), "This should never happen, someone must have modified the counter outside of SignalHandler."
def _handle_sigint_if_enabled(self, signum, _frame):
with self._ignore_sigint_lock:
self._check_sigint_gate_is_correct()
threads_ignoring_sigint = self._threads_ignoring_sigint
ignoring_sigint_v2_engine = self._ignoring_sigint_v2_engine
if threads_ignoring_sigint == 0 and not ignoring_sigint_v2_engine:
self.handle_sigint(signum, _frame)
def _toggle_ignoring_sigint_v2_engine(self, toggle: bool):
with self._ignore_sigint_lock:
self._ignoring_sigint_v2_engine = toggle
@contextmanager
def _ignoring_sigint(self):
with self._ignore_sigint_lock:
self._check_sigint_gate_is_correct()
self._threads_ignoring_sigint += 1
try:
yield
finally:
with self._ignore_sigint_lock:
self._threads_ignoring_sigint -= 1
self._check_sigint_gate_is_correct()
def handle_sigint(self, signum, _frame):
raise KeyboardInterrupt("User interrupted execution with control-c!")
# TODO(#7406): figure out how to let sys.exit work in a signal handler instead of having to raise
# this exception!
class SignalHandledNonLocalExit(Exception):
"""Raised in handlers for non-fatal signals to overcome Python limitations.
When waiting on a subprocess and in a signal handler, sys.exit appears to be ignored, and
causes the signal handler to return. We want to (eventually) exit after these signals, not
ignore them, so we raise this exception instead and check it in our sys.excepthook override.
"""
def __init__(self, signum, signame):
self.signum = signum
self.signame = signame
self.traceback_lines = traceback.format_stack()
super(SignalHandler.SignalHandledNonLocalExit, self).__init__()
def handle_sigquit(self, signum, _frame):
raise self.SignalHandledNonLocalExit(signum, "SIGQUIT")
def handle_sigterm(self, signum, _frame):
raise self.SignalHandledNonLocalExit(signum, "SIGTERM")
class ExceptionSink:
"""A mutable singleton object representing where exceptions should be logged to."""
# NB: see the bottom of this file where we call reset_log_location() and other mutators in order
# to properly setup global state.
_log_dir = None
# We need an exiter in order to know what to do after we log a fatal exception or handle a
# catchable signal.
_exiter: Optional[Exiter] = None
# Where to log stacktraces to in a SIGUSR2 handler.
_interactive_output_stream = None
# Whether to print a stacktrace in any fatal error message printed to the terminal.
_should_print_backtrace_to_terminal = True
# An instance of `SignalHandler` which is invoked to handle a static set of specific
# nonfatal signals (these signal handlers are allowed to make pants exit, but unlike SIGSEGV they
# don't need to exit immediately).
_signal_handler: Optional[SignalHandler] = None
# These persistent open file descriptors are kept so the signal handler can do almost no work
# (and lets faulthandler figure out signal safety).
_pid_specific_error_fileobj = None
_shared_error_fileobj = None
def __new__(cls, *args, **kwargs):
raise TypeError("Instances of {} are not allowed to be constructed!".format(cls.__name__))
class ExceptionSinkError(Exception):
pass
@classmethod
def reset_should_print_backtrace_to_terminal(cls, should_print_backtrace):
"""Set whether a backtrace gets printed to the terminal error stream on a fatal error.
Class state:
- Overwrites `cls._should_print_backtrace_to_terminal`.
"""
cls._should_print_backtrace_to_terminal = should_print_backtrace
# All reset_* methods are ~idempotent!
@classmethod
def reset_log_location(cls, new_log_location: str) -> None:
"""Re-acquire file handles to error logs based in the new location.
Class state:
- Overwrites `cls._log_dir`, `cls._pid_specific_error_fileobj`, and
`cls._shared_error_fileobj`.
OS state:
- May create a new directory.
- Overwrites signal handlers for many fatal and non-fatal signals (but not SIGUSR2).
:raises: :class:`ExceptionSink.ExceptionSinkError` if the directory does not exist or is not
writable.
"""
# We could no-op here if the log locations are the same, but there's no reason not to have the
# additional safety of re-acquiring file descriptors each time (and erroring out early if the
# location is no longer writable).
try:
safe_mkdir(new_log_location)
except Exception as e:
raise cls.ExceptionSinkError(
"The provided log location path at '{}' is not writable or could not be created: {}.".format(
new_log_location, str(e)
),
e,
)
pid = os.getpid()
pid_specific_log_path = cls.exceptions_log_path(for_pid=pid, in_dir=new_log_location)
shared_log_path = cls.exceptions_log_path(in_dir=new_log_location)
assert pid_specific_log_path != shared_log_path
try:
pid_specific_error_stream = safe_open(pid_specific_log_path, mode="w")
shared_error_stream = safe_open(shared_log_path, mode="a")
except Exception as e:
raise cls.ExceptionSinkError(
"Error opening fatal error log streams for log location '{}': {}".format(
new_log_location, str(e)
)
)
# NB: mutate process-global state!
if faulthandler.is_enabled():
logger.debug("re-enabling faulthandler")
# Call Py_CLEAR() on the previous error stream:
# https://github.com/vstinner/faulthandler/blob/master/faulthandler.c
faulthandler.disable()
# Send a stacktrace to this file if interrupted by a fatal error.
faulthandler.enable(file=pid_specific_error_stream, all_threads=True)
# NB: mutate the class variables!
cls._log_dir = new_log_location
cls._pid_specific_error_fileobj = pid_specific_error_stream
cls._shared_error_fileobj = shared_error_stream
class AccessGlobalExiterMixin:
@property
def _exiter(self) -> Optional[Exiter]:
return ExceptionSink.get_global_exiter()
@classmethod
def get_global_exiter(cls) -> Optional[Exiter]:
return cls._exiter
@classmethod
@contextmanager
def exiter_as(cls, new_exiter_fun: Callable[[Optional[Exiter]], Exiter]) -> Iterator[None]:
"""Temporarily override the global exiter.
NB: We don't want to try/finally here, because we want exceptions to propagate
with the most recent exiter installed in sys.excepthook.
If we wrap this in a try:finally, exceptions will be caught and exiters unset.
"""
previous_exiter = cls._exiter
new_exiter = new_exiter_fun(previous_exiter)<|fim▁hole|> @classmethod
@contextmanager
def exiter_as_until_exception(
cls, new_exiter_fun: Callable[[Optional[Exiter]], Exiter]
) -> Iterator[None]:
"""Temporarily override the global exiter, except this will unset it when an exception
happens."""
previous_exiter = cls._exiter
new_exiter = new_exiter_fun(previous_exiter)
try:
cls._reset_exiter(new_exiter)
yield
finally:
cls._reset_exiter(previous_exiter)
@classmethod
def _reset_exiter(cls, exiter: Optional[Exiter]) -> None:
"""Class state:
- Overwrites `cls._exiter`.
Python state:
- Overwrites sys.excepthook.
"""
logger.debug(f"overriding the global exiter with {exiter} (from {cls._exiter})")
# NB: mutate the class variables! This is done before mutating the exception hook, because the
# uncaught exception handler uses cls._exiter to exit.
cls._exiter = exiter
# NB: mutate process-global state!
sys.excepthook = cls._log_unhandled_exception_and_exit
@classmethod
def reset_interactive_output_stream(
cls, interactive_output_stream, override_faulthandler_destination=True
):
"""Class state:
- Overwrites `cls._interactive_output_stream`.
OS state:
- Overwrites the SIGUSR2 handler.
This method registers a SIGUSR2 handler, which permits a non-fatal `kill -31 <pants pid>` for
stacktrace retrieval. This is also where the the error message on fatal exit will be printed to.
"""
try:
# NB: mutate process-global state!
# This permits a non-fatal `kill -31 <pants pid>` for stacktrace retrieval.
if override_faulthandler_destination:
faulthandler.register(
signal.SIGUSR2, interactive_output_stream, all_threads=True, chain=False
)
# NB: mutate the class variables!
cls._interactive_output_stream = interactive_output_stream
except ValueError:
# Warn about "ValueError: IO on closed file" when the stream is closed.
cls.log_exception(
"Cannot reset interactive_output_stream -- stream (probably stderr) is closed"
)
@classmethod
def exceptions_log_path(cls, for_pid=None, in_dir=None):
"""Get the path to either the shared or pid-specific fatal errors log file."""
if for_pid is None:
intermediate_filename_component = ""
else:
assert isinstance(for_pid, Pid)
intermediate_filename_component = ".{}".format(for_pid)
in_dir = in_dir or cls._log_dir
return os.path.join(
in_dir, ".pids", "exceptions{}.log".format(intermediate_filename_component)
)
@classmethod
def log_exception(cls, msg):
"""Try to log an error message to this process's error log and the shared error log.
NB: Doesn't raise (logs an error instead).
"""
pid = os.getpid()
fatal_error_log_entry = cls._format_exception_message(msg, pid)
# We care more about this log than the shared log, so write to it first.
try:
cls._try_write_with_flush(cls._pid_specific_error_fileobj, fatal_error_log_entry)
except Exception as e:
logger.error(
"Error logging the message '{}' to the pid-specific file handle for {} at pid {}:\n{}".format(
msg, cls._log_dir, pid, e
)
)
# Write to the shared log.
try:
# TODO: we should probably guard this against concurrent modification by other pants
# subprocesses somehow.
cls._try_write_with_flush(cls._shared_error_fileobj, fatal_error_log_entry)
except Exception as e:
logger.error(
"Error logging the message '{}' to the shared file handle for {} at pid {}:\n{}".format(
msg, cls._log_dir, pid, e
)
)
@classmethod
def _try_write_with_flush(cls, fileobj, payload):
"""This method is here so that it can be patched to simulate write errors.
This is because mock can't patch primitive objects like file objects.
"""
fileobj.write(payload)
fileobj.flush()
@classmethod
def reset_signal_handler(cls, signal_handler):
"""Class state:
- Overwrites `cls._signal_handler`.
OS state:
- Overwrites signal handlers for SIGINT, SIGQUIT, and SIGTERM.
NB: This method calls signal.signal(), which will crash if not called from the main thread!
:returns: The :class:`SignalHandler` that was previously registered, or None if this is
the first time this method was called.
"""
assert isinstance(signal_handler, SignalHandler)
# NB: Modify process-global state!
for signum, handler in signal_handler.signal_handler_mapping.items():
signal.signal(signum, handler)
# Retry any system calls interrupted by any of the signals we just installed handlers for
# (instead of having them raise EINTR). siginterrupt(3) says this is the default behavior on
# Linux and OSX.
signal.siginterrupt(signum, False)
previous_signal_handler = cls._signal_handler
# NB: Mutate the class variables!
cls._signal_handler = signal_handler
return previous_signal_handler
@classmethod
@contextmanager
def trapped_signals(cls, new_signal_handler):
"""A contextmanager which temporarily overrides signal handling.
NB: This method calls signal.signal(), which will crash if not called from the main thread!
"""
previous_signal_handler = cls.reset_signal_handler(new_signal_handler)
try:
yield
finally:
cls.reset_signal_handler(previous_signal_handler)
@classmethod
@contextmanager
def ignoring_sigint(cls):
"""A contextmanager which disables handling sigint in the current signal handler. This
allows threads that are not the main thread to ignore sigint.
NB: Only use this if you can't use ExceptionSink.trapped_signals().
Class state:
- Toggles `self._ignore_sigint` in `cls._signal_handler`.
"""
with cls._signal_handler._ignoring_sigint():
yield
@classmethod
def toggle_ignoring_sigint_v2_engine(cls, toggle: bool) -> None:
assert cls._signal_handler is not None
cls._signal_handler._toggle_ignoring_sigint_v2_engine(toggle)
@classmethod
def _iso_timestamp_for_now(cls):
return datetime.datetime.now().isoformat()
# NB: This includes a trailing newline, but no leading newline.
_EXCEPTION_LOG_FORMAT = """\
timestamp: {timestamp}
process title: {process_title}
sys.argv: {args}
pid: {pid}
{message}
"""
@classmethod
def _format_exception_message(cls, msg, pid):
return cls._EXCEPTION_LOG_FORMAT.format(
timestamp=cls._iso_timestamp_for_now(),
process_title=setproctitle.getproctitle(),
args=sys.argv,
pid=pid,
message=msg,
)
_traceback_omitted_default_text = "(backtrace omitted)"
@classmethod
def _format_traceback(cls, traceback_lines, should_print_backtrace):
if should_print_backtrace:
traceback_string = "\n{}".format("".join(traceback_lines))
else:
traceback_string = " {}".format(cls._traceback_omitted_default_text)
return traceback_string
_UNHANDLED_EXCEPTION_LOG_FORMAT = """\
Exception caught: ({exception_type}){backtrace}
Exception message: {exception_message}{maybe_newline}
"""
@classmethod
def _format_unhandled_exception_log(cls, exc, tb, add_newline, should_print_backtrace):
exc_type = type(exc)
exception_full_name = "{}.{}".format(exc_type.__module__, exc_type.__name__)
exception_message = str(exc) if exc else "(no message)"
maybe_newline = "\n" if add_newline else ""
return cls._UNHANDLED_EXCEPTION_LOG_FORMAT.format(
exception_type=exception_full_name,
backtrace=cls._format_traceback(
traceback_lines=traceback.format_tb(tb),
should_print_backtrace=should_print_backtrace,
),
exception_message=exception_message,
maybe_newline=maybe_newline,
)
_EXIT_FAILURE_TERMINAL_MESSAGE_FORMAT = """\
{timestamp_msg}{terminal_msg}{details_msg}
"""
@classmethod
def _exit_with_failure(cls, terminal_msg):
timestamp_msg = (
f"timestamp: {cls._iso_timestamp_for_now()}\n"
if cls._should_print_backtrace_to_terminal
else ""
)
details_msg = (
""
if cls._should_print_backtrace_to_terminal
else "\n\n(Use --print-exception-stacktrace to see more error details.)"
)
terminal_msg = terminal_msg or "<no exit reason provided>"
formatted_terminal_msg = cls._EXIT_FAILURE_TERMINAL_MESSAGE_FORMAT.format(
timestamp_msg=timestamp_msg, terminal_msg=terminal_msg, details_msg=details_msg
)
# Exit with failure, printing a message to the terminal (or whatever the interactive stream is).
cls._exiter.exit_and_fail(msg=formatted_terminal_msg, out=cls._interactive_output_stream)
@classmethod
def _log_unhandled_exception_and_exit(
cls, exc_class=None, exc=None, tb=None, add_newline=False
):
"""A sys.excepthook implementation which logs the error and exits with failure."""
exc_class = exc_class or sys.exc_info()[0]
exc = exc or sys.exc_info()[1]
tb = tb or sys.exc_info()[2]
# This exception was raised by a signal handler with the intent to exit the program.
if exc_class == SignalHandler.SignalHandledNonLocalExit:
return cls._handle_signal_gracefully(exc.signum, exc.signame, exc.traceback_lines)
extra_err_msg = None
try:
# Always output the unhandled exception details into a log file, including the traceback.
exception_log_entry = cls._format_unhandled_exception_log(
exc, tb, add_newline, should_print_backtrace=True
)
cls.log_exception(exception_log_entry)
except Exception as e:
extra_err_msg = "Additional error logging unhandled exception {}: {}".format(exc, e)
logger.error(extra_err_msg)
# Generate an unhandled exception report fit to be printed to the terminal (respecting the
# Exiter's should_print_backtrace field).
if cls._should_print_backtrace_to_terminal:
stderr_printed_error = cls._format_unhandled_exception_log(
exc, tb, add_newline, should_print_backtrace=cls._should_print_backtrace_to_terminal
)
if extra_err_msg:
stderr_printed_error = "{}\n{}".format(stderr_printed_error, extra_err_msg)
else:
# If the user didn't ask for a backtrace, show a succinct error message without
# all the exception-related preamble. A power-user/pants developer can still
# get all the preamble info along with the backtrace, but the end user shouldn't
# see that boilerplate by default.
error_msgs = getattr(exc, "end_user_messages", lambda: [str(exc)])()
stderr_printed_error = "\n" + "\n".join(f"ERROR: {msg}" for msg in error_msgs)
cls._exit_with_failure(stderr_printed_error)
_CATCHABLE_SIGNAL_ERROR_LOG_FORMAT = """\
Signal {signum} ({signame}) was raised. Exiting with failure.{formatted_traceback}
"""
@classmethod
def _handle_signal_gracefully(cls, signum, signame, traceback_lines):
"""Signal handler for non-fatal signals which raises or logs an error and exits with
failure."""
# Extract the stack, and format an entry to be written to the exception log.
formatted_traceback = cls._format_traceback(
traceback_lines=traceback_lines, should_print_backtrace=True
)
signal_error_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format(
signum=signum, signame=signame, formatted_traceback=formatted_traceback
)
# TODO: determine the appropriate signal-safe behavior here (to avoid writing to our file
# descriptors re-entrantly, which raises an IOError).
# This method catches any exceptions raised within it.
cls.log_exception(signal_error_log_entry)
# Create a potentially-abbreviated traceback for the terminal or other interactive stream.
formatted_traceback_for_terminal = cls._format_traceback(
traceback_lines=traceback_lines,
should_print_backtrace=cls._should_print_backtrace_to_terminal,
)
terminal_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format(
signum=signum, signame=signame, formatted_traceback=formatted_traceback_for_terminal
)
# Exit, printing the output to the terminal.
cls._exit_with_failure(terminal_log_entry)
# Setup global state such as signal handlers and sys.excepthook with probably-safe values at module
# import time.
# Set the log location for writing logs before bootstrap options are parsed.
ExceptionSink.reset_log_location(os.getcwd())
# Sets except hook for exceptions at import time.
ExceptionSink._reset_exiter(Exiter(exiter=sys.exit))
# Sets a SIGUSR2 handler.
ExceptionSink.reset_interactive_output_stream(sys.stderr.buffer)
# Sets a handler that logs nonfatal signals to the exception sink before exiting.
ExceptionSink.reset_signal_handler(SignalHandler())
# Set whether to print stacktraces on exceptions or signals during import time.
# NB: This will be overridden by bootstrap options in PantsRunner, so we avoid printing out a full
# stacktrace when a user presses control-c during import time unless the environment variable is set
# to explicitly request it. The exception log will have any stacktraces regardless so this should
# not hamper debugging.
ExceptionSink.reset_should_print_backtrace_to_terminal(
should_print_backtrace=os.environ.get("PANTS_PRINT_EXCEPTION_STACKTRACE", "True") == "True"
)<|fim▁end|> | cls._reset_exiter(new_exiter)
yield
cls._reset_exiter(previous_exiter)
|
<|file_name|>receitas-app.js<|end_file_name|><|fim▁begin|>angular.module('ReceitasApp', ['ngRoute', 'minhasDiretivas'])
.config(function ($routeProvider) {
$routeProvider.when('/', {
templateUrl: 'home.html',
});
$routeProvider.when('/adicionar', {
templateUrl: 'adicionar.html',
});<|fim▁hole|> DbFactory.loadDb();
}]);<|fim▁end|> | })
.run(['DbFactory', function (DbFactory) { |
<|file_name|>ClassDemo03.java<|end_file_name|><|fim▁begin|>package com.jikexueyuan.classdemo;
/**
* Created by zmzp on 14-12-4.
*/
class Student{<|fim▁hole|>public class ClassDemo03 {
public static void main(String[] args) {
// Student stu = new Student();
// stu.tell();
//匿名对象
new Student().tell();
}
}<|fim▁end|> | public void tell(){
System.out.println("Hello Jikexueyuan");
}
} |
<|file_name|>music.py<|end_file_name|><|fim▁begin|>#Music Class and support functions
import pygame
import parameters
from filemanager import filemanager
from pygame.locals import *
from pygame import *
from pygame.mixer import *
#Pygame Module for Music and Sound
pigmusic = None
currentStdMusic=None
currentMenuMusic=None
currentType = None
def initmusic():
global pigmusic
#Init pygame mixer and music
print "music init GO"
try:
if pygame.mixer and not pygame.mixer.get_init():
pygame.mixer.init()
if not pygame.mixer:
print 'Warning, sound disabled'
else:
pigmusic=pygame.mixer.music
except (pygame.error):
print 'Warning, unable to init music'
print "music init OUT ",pigmusic
def upmusic():
global pigmusic
if not pigmusic:
return
vol=pigmusic.get_volume()
if vol <= 0.9:
pigmusic.set_volume(vol+0.1)
def downmusic():
global pigmusic
if not pigmusic:
return
vol=pigmusic.get_volume()
if vol > 0.0:
pigmusic.set_volume(vol-0.1)
def stopmusic():
global pigmusic
if not pygame.mixer.get_init():
return
if not pigmusic:
return
if pigmusic.get_busy():
pigmusic.stop()
def setvolume(vol):
global pigmusic
pigmusic.set_volume(vol)
def getcurrentStdMusic():
global currentStdMusic
return currentStdMusic
def getcurrentMenuMusic():
global currentMenuMusic
return currentMenuMusic
def returtostdmusic():
#called when we want to force the music to play std music
cur=currentStdMusic
cur.playmusic()
class Music:
def __init__(self, name, filename, musictype='std', vol=0.5):
self._name=name
self._file=filename
self._type=musictype
self._vol=vol
def playmusic(self,loop=-1):
global pigmusic,currentStdMusic,currentMenuMusic,currentType
print "music play",self._file
if not pigmusic:
initmusic()
if self._type == 'std':
#print "music std type current is ",currentType
if not currentStdMusic:
#print "music std no currentStdMusic, we create it with ",self._file
currentStdMusic=self
#print "is pigmusic busy ? ",pigmusic.get_busy()
if pigmusic.get_busy():
#print "music std, music is busy"
if currentType == 'std':
#print "music std, currentType is std isn't it : ",currentType
if currentStdMusic.getfile()==self._file:
#print "music std, same music don't do anything"
return
else:
#print "music std, not the same we change, currentStdMusic=",self._file
currentStdMusic=self
#print "is pigmusic busy ? ",pigmusic.get_busy()
if pigmusic.get_busy():
print " music std, music is busy"
if currentType == 'std':
print " music std, currentType is std isn't it : ",currentType
if currentStdMusic.getfile()==self._file:
print " music std, same music don't do anything"
return
else:
print " music std, not the same we change, currentStdMusic=",self._file
currentStdMusic=self
else:
print " music std, current type is menu isn't it :", currentType ," so we change it to std\n"
#we change menu slide to standard slide
currentType='std'
else:
#print "music std, current type is menu isn't it :", currentType ," so we change it to std\n"
#we change menu slide to standard slide
currentType='std'
else:
#print "music std, music is not busy we start it"
currentType='std'
currentStdMusic=self
else:
#print "music menu type current is ",currentType
if not currentMenuMusic:
#print "music menu no currentMenuMusic, we create it with ",self._file
currentMenuMusic=self
if pigmusic.get_busy():
#print "music menu, music is busy"<|fim▁hole|> #return
pass
else:
#print "music menu, not the same we change, currentMenuMusic=",self._file
currentMenuMusic=self
if pigmusic.get_busy():
print " music menu, music is busy"
if currentType == 'menu':
print " music menu, currentType is menu isn't it : ",currentType
if currentMenuMusic.getfile()==self._file:
print " music menu, same music don't do anything"
return
else:
print " music menu, not the same we change, currentMenuMusic=",self._file
currentMenuMusic=self
else:
print " music menu, current type is std isn't it :", currentType ," so we change it to menu\n"
#we change standard slide to menu slide
currentType='menu'
else:
#print "music menu, current type is std isn't it :", currentType ," so we change it to menu\n"
#we change standard slide to menu slide
currentType='menu'
else:
#print "music menu ,music is not busy we start it"
currentType='menu'
currentMenuMusic=self
pigmusic.load(filemanager.find_music(self._file))
pigmusic.set_volume(self._vol)
pigmusic.play(loop)
def getfile(self):
return self._file
def getname(self):
return self._name
def stopmusic(self):
print "we stop music!!!!! ",self._file
global pigmusic
if not pigmusic:
return
if pigmusic.get_busy():
if self._type == 'std':
if currentStdMusic.getfile()==self._file:
pigmusic.stop()
else:
if currentMenuMusic.getfile()==self._file:
pigmusic.stop()<|fim▁end|> | if currentType == 'menu':
#print "music menu, currentType is menu isn't it : ",currentType
if currentMenuMusic.getfile()==self._file:
#print "music menu, same music don't do anything" |
<|file_name|>copy.cc<|end_file_name|><|fim▁begin|>// { dg-options "-std=gnu++11" }
// { dg-require-cstdint "" }
//
// 2010-02-16 Paolo Carlini <[email protected]>
//
// Copyright (C) 2010-2014 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <random>
void
test01()
{
std::subtract_with_carry_engine<unsigned long, 24, 10, 24> e(1);
const auto f(e);
auto g(f);
g = g; // Suppress unused warning.
}<|fim▁hole|>int main()
{
test01();
return 0;
}<|fim▁end|> | |
<|file_name|>message.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-burnout Authors
// This file is part of the go-burnout library.
//
// The go-burnout library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-burnout library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-burnout library. If not, see <http://www.gnu.org/licenses/>.
package p2p
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"net"
"sync"
"sync/atomic"
"time"
"github.com/burnoutcoin/go-burnout/event"
"github.com/burnoutcoin/go-burnout/p2p/discover"
"github.com/burnoutcoin/go-burnout/rlp"
)
// Msg defines the structure of a p2p message.
//
// Note that a Msg can only be sent once since the Payload reader is
// consumed during sending. It is not possible to create a Msg and
// send it any number of times. If you want to reuse an encoded
// structure, encode the payload into a byte array and create a
// separate Msg with a bytes.Reader as Payload for each send.
type Msg struct {
Code uint64
Size uint32 // size of the paylod
Payload io.Reader
ReceivedAt time.Time<|fim▁hole|>//
// For the decoding rules, please see package rlp.
func (msg Msg) Decode(val interface{}) error {
s := rlp.NewStream(msg.Payload, uint64(msg.Size))
if err := s.Decode(val); err != nil {
return newPeerError(errInvalidMsg, "(code %x) (size %d) %v", msg.Code, msg.Size, err)
}
return nil
}
func (msg Msg) String() string {
return fmt.Sprintf("msg #%v (%v bytes)", msg.Code, msg.Size)
}
// Discard reads any remaining payload data into a black hole.
func (msg Msg) Discard() error {
_, err := io.Copy(ioutil.Discard, msg.Payload)
return err
}
type MsgReader interface {
ReadMsg() (Msg, error)
}
type MsgWriter interface {
// WriteMsg sends a message. It will block until the message's
// Payload has been consumed by the other end.
//
// Note that messages can be sent only once because their
// payload reader is drained.
WriteMsg(Msg) error
}
// MsgReadWriter provides reading and writing of encoded messages.
// Implementations should ensure that ReadMsg and WriteMsg can be
// called simultaneously from multiple goroutines.
type MsgReadWriter interface {
MsgReader
MsgWriter
}
// Send writes an RLP-encoded message with the given code.
// data should encode as an RLP list.
func Send(w MsgWriter, msgcode uint64, data interface{}) error {
size, r, err := rlp.EncodeToReader(data)
if err != nil {
return err
}
return w.WriteMsg(Msg{Code: msgcode, Size: uint32(size), Payload: r})
}
// SendItems writes an RLP with the given code and data elements.
// For a call such as:
//
// SendItems(w, code, e1, e2, e3)
//
// the message payload will be an RLP list containing the items:
//
// [e1, e2, e3]
//
func SendItems(w MsgWriter, msgcode uint64, elems ...interface{}) error {
return Send(w, msgcode, elems)
}
// netWrapper wraps a MsgReadWriter with locks around
// ReadMsg/WriteMsg and applies read/write deadlines.
type netWrapper struct {
rmu, wmu sync.Mutex
rtimeout, wtimeout time.Duration
conn net.Conn
wrapped MsgReadWriter
}
func (rw *netWrapper) ReadMsg() (Msg, error) {
rw.rmu.Lock()
defer rw.rmu.Unlock()
rw.conn.SetReadDeadline(time.Now().Add(rw.rtimeout))
return rw.wrapped.ReadMsg()
}
func (rw *netWrapper) WriteMsg(msg Msg) error {
rw.wmu.Lock()
defer rw.wmu.Unlock()
rw.conn.SetWriteDeadline(time.Now().Add(rw.wtimeout))
return rw.wrapped.WriteMsg(msg)
}
// eofSignal wraps a reader with eof signaling. the eof channel is
// closed when the wrapped reader returns an error or when count bytes
// have been read.
type eofSignal struct {
wrapped io.Reader
count uint32 // number of bytes left
eof chan<- struct{}
}
// note: when using eofSignal to detect whether a message payload
// has been read, Read might not be called for zero sized messages.
func (r *eofSignal) Read(buf []byte) (int, error) {
if r.count == 0 {
if r.eof != nil {
r.eof <- struct{}{}
r.eof = nil
}
return 0, io.EOF
}
max := len(buf)
if int(r.count) < len(buf) {
max = int(r.count)
}
n, err := r.wrapped.Read(buf[:max])
r.count -= uint32(n)
if (err != nil || r.count == 0) && r.eof != nil {
r.eof <- struct{}{} // tell Peer that msg has been consumed
r.eof = nil
}
return n, err
}
// MsgPipe creates a message pipe. Reads on one end are matched
// with writes on the other. The pipe is full-duplex, both ends
// implement MsgReadWriter.
func MsgPipe() (*MsgPipeRW, *MsgPipeRW) {
var (
c1, c2 = make(chan Msg), make(chan Msg)
closing = make(chan struct{})
closed = new(int32)
rw1 = &MsgPipeRW{c1, c2, closing, closed}
rw2 = &MsgPipeRW{c2, c1, closing, closed}
)
return rw1, rw2
}
// ErrPipeClosed is returned from pipe operations after the
// pipe has been closed.
var ErrPipeClosed = errors.New("p2p: read or write on closed message pipe")
// MsgPipeRW is an endpoint of a MsgReadWriter pipe.
type MsgPipeRW struct {
w chan<- Msg
r <-chan Msg
closing chan struct{}
closed *int32
}
// WriteMsg sends a messsage on the pipe.
// It blocks until the receiver has consumed the message payload.
func (p *MsgPipeRW) WriteMsg(msg Msg) error {
if atomic.LoadInt32(p.closed) == 0 {
consumed := make(chan struct{}, 1)
msg.Payload = &eofSignal{msg.Payload, msg.Size, consumed}
select {
case p.w <- msg:
if msg.Size > 0 {
// wait for payload read or discard
select {
case <-consumed:
case <-p.closing:
}
}
return nil
case <-p.closing:
}
}
return ErrPipeClosed
}
// ReadMsg returns a message sent on the other end of the pipe.
func (p *MsgPipeRW) ReadMsg() (Msg, error) {
if atomic.LoadInt32(p.closed) == 0 {
select {
case msg := <-p.r:
return msg, nil
case <-p.closing:
}
}
return Msg{}, ErrPipeClosed
}
// Close unblocks any pending ReadMsg and WriteMsg calls on both ends
// of the pipe. They will return ErrPipeClosed. Close also
// interrupts any reads from a message payload.
func (p *MsgPipeRW) Close() error {
if atomic.AddInt32(p.closed, 1) != 1 {
// someone else is already closing
atomic.StoreInt32(p.closed, 1) // avoid overflow
return nil
}
close(p.closing)
return nil
}
// ExpectMsg reads a message from r and verifies that its
// code and encoded RLP content match the provided values.
// If content is nil, the payload is discarded and not verified.
func ExpectMsg(r MsgReader, code uint64, content interface{}) error {
msg, err := r.ReadMsg()
if err != nil {
return err
}
if msg.Code != code {
return fmt.Errorf("message code mismatch: got %d, expected %d", msg.Code, code)
}
if content == nil {
return msg.Discard()
} else {
contentEnc, err := rlp.EncodeToBytes(content)
if err != nil {
panic("content encode error: " + err.Error())
}
if int(msg.Size) != len(contentEnc) {
return fmt.Errorf("message size mismatch: got %d, want %d", msg.Size, len(contentEnc))
}
actualContent, err := ioutil.ReadAll(msg.Payload)
if err != nil {
return err
}
if !bytes.Equal(actualContent, contentEnc) {
return fmt.Errorf("message payload mismatch:\ngot: %x\nwant: %x", actualContent, contentEnc)
}
}
return nil
}
// msgEventer wraps a MsgReadWriter and sends events whenever a message is sent
// or received
type msgEventer struct {
MsgReadWriter
feed *event.Feed
peerID discover.NodeID
Protocol string
}
// newMsgEventer returns a msgEventer which sends message events to the given
// feed
func newMsgEventer(rw MsgReadWriter, feed *event.Feed, peerID discover.NodeID, proto string) *msgEventer {
return &msgEventer{
MsgReadWriter: rw,
feed: feed,
peerID: peerID,
Protocol: proto,
}
}
// ReadMsg reads a message from the underlying MsgReadWriter and emits a
// "message received" event
func (self *msgEventer) ReadMsg() (Msg, error) {
msg, err := self.MsgReadWriter.ReadMsg()
if err != nil {
return msg, err
}
self.feed.Send(&PeerEvent{
Type: PeerEventTypeMsgRecv,
Peer: self.peerID,
Protocol: self.Protocol,
MsgCode: &msg.Code,
MsgSize: &msg.Size,
})
return msg, nil
}
// WriteMsg writes a message to the underlying MsgReadWriter and emits a
// "message sent" event
func (self *msgEventer) WriteMsg(msg Msg) error {
err := self.MsgReadWriter.WriteMsg(msg)
if err != nil {
return err
}
self.feed.Send(&PeerEvent{
Type: PeerEventTypeMsgSend,
Peer: self.peerID,
Protocol: self.Protocol,
MsgCode: &msg.Code,
MsgSize: &msg.Size,
})
return nil
}
// Close closes the underlying MsgReadWriter if it implements the io.Closer
// interface
func (self *msgEventer) Close() error {
if v, ok := self.MsgReadWriter.(io.Closer); ok {
return v.Close()
}
return nil
}<|fim▁end|> | }
// Decode parses the RLP content of a message into
// the given value, which must be a pointer. |
<|file_name|>toolkit.js<|end_file_name|><|fim▁begin|>re.on = function(name, callback) {
return function(el) {
return el.addEventListener(name, callback);
};
};
(function() {
[ 'click', 'dblclick', 'wheel',
'keydown', 'keyup',
'input', 'focus', 'blur',
'drag', 'dragstart', 'dragover', 'dragstop', 'drop',
'mousedown', 'mouseup', 'mouseenter', 'mouseleave', 'mousemove', 'mouseout'
].forEach(function(eventName) {
re.on[eventName] = function(callback) {
return re.on(eventName, callback);
};
});
re.on.wheel.down = function wheelDown(callback) {
return re.on.wheel(function(ev) {
ev.preventDefault();
if (ev.wheelDelta < 0)
callback(ev);
});
};
re.on.wheel.up = function wheelUp(callback) {
return re.on.wheel(function(ev) {
ev.preventDefault();
if (ev.wheelDelta > 0)
callback(ev);
});
};
// keystroke sugar:
// re.on.keydown.g(ev => console.log('you pressed g!'));
// re.on.keydown.ctrl.s(functionThatSavesMyStuff)(document.body);
var chars;
var otherKeys;
function loadKeyNames(evName) {
if (!chars) {
chars = [];
for (var i=0 ; i<230 ; i++) {
var char = String.fromCharCode(i);
if (char.length != "")
chars.push(char);
}
}
if (!otherKeys)
otherKeys = {
shift:16, ctrl:17, alt:18,
backspace:8, tab:9, enter:13, pause:19, capsLock:20, escape:27,
pageUp:33, pageDown:34, end:35, home:36, left:37, up:38, right:39, down:40,
insert:45, delete:46,
leftWindow:91, rightWindow:92, select:93,
f1:112, f2:113, f3:114, f4:115, f5:116, f6:117, f7:118, f8:119, f9:120, f10:121, f11:122, f12:123,
numLock:144, scrollLock:145
};
var evSetup = re.on[evName];
Object.keys(otherKeys).forEach(function(keyName) {
evSetup[keyName] = function(callback) {
return evSetup(function(ev) {
if (otherKeys[keyName] === ev.which) {
ev.preventDefault();
callback(ev);
}
});
};
});
chars.forEach(function(char) {
evSetup[char] = function(callback) {
return evSetup(function(ev) {
if (String.fromCharCode(ev.which).toLowerCase() === char) {
ev.preventDefault();
callback(ev);
}
});
};
evSetup.ctrl[char] = function(callback) {
return evSetup(function(ev) {
if ((ev.ctrlKey || ev.metaKey) && String.fromCharCode(ev.which).toLowerCase() === char) {
ev.preventDefault();
callback(ev);
}
});
};
evSetup.shift[char] = function(callback) {
return evSetup(function(ev) {
if (ev.shiftKey && String.fromCharCode(ev.which).toLowerCase() === char) {
ev.preventDefault();
callback(ev);
}
});
};
evSetup.alt[char] = function(callback) {
return evSetup(function(ev) {
if (ev.altKey && String.fromCharCode(ev.which).toLowerCase() === char) {
ev.preventDefault();
callback(ev);
}
});
};
});
};
// performs an action once when a property is first used via a getter and then removes function based property
function setupProperty(obj, prop, loader) {
//if (!Object.defineProperty)
loader(prop);
/*else {
var holder = obj[prop];
Object.defineProperty(obj, prop, {
get: function() {
loader(prop);
var out = obj[prop];
Object.defineProperty(obj, prop, {
value: obj[prop],
enumerable: true
});
return out;
},
enumerable: true,
configurable: true
});
}*/
}
['keydown', 'keyup'].forEach(function(evName) {
setupProperty(re.on, evName, loadKeyNames);
});
})();
(function() {
var hashRouter;
re.getHashRouter = function getHashRouter() {
if (!hashRouter) {
hashRouter = {};
var gsLoc = re(hashRouter, 'location', function() {
return window.location;
});
['hash', 'search', 'pathname'].map(function(prop) {
return re(hashRouter, prop, function get() {
return hashRouter.location[prop];
}, function set(value) {
hashRouter.location[prop] = value;
});
});
var ops = window.onpopstate;
window.onpopstate = function(ev) {
re.invalidate(gsLoc);
if (ops)
ops(ev);
};
var priorLoc;
setInterval(function() {
var loc = window.location.toString();
if (priorLoc !== loc)
re.invalidate(gsLoc);
priorLoc = loc;
}, 200);
}
return hashRouter;
};
})();
(function() {
function eventTrigger(thiz) {
var counter = 1;
var out = function(callback) {
var id = counter++;
out.dependents[id] = callback;
out.remove = function() {
delete out.dependents[id];
};
};
out.trigger = function() {
var args = arguments;
Object.keys(out.dependents).forEach(function(key) {
var callback = out.dependents[key];
callback.apply(thiz, args);
});
};
out.dependents = {};
return out;
}
re.alertArray = function(arr) {
if (!arr.onRemove) {
arr.onRemove = eventTrigger(arr);
arr.onInsert = eventTrigger(arr);
var push = arr.push;
arr.push = function(val) {
var out = push.call(arr, val);
arr.onInsert.trigger(val, arr.length);
return out;
};
var pop = arr.pop;
arr.pop = function() {
var out = pop.apply(arr);
arr.onRemove.trigger(arr.length - 1);
return out;
};
var shift = arr.shift;
arr.shift = function() {
var out = shift.apply(arr);
arr.onRemove.trigger(0);
return out;
};
var unshift = arr.unshift;
arr.unshift = function(val) {
var out = unshift.call(arr, val);
arr.onRemove.trigger(val, 0);
return out;
};
var splice = arr.splice;
arr.splice = function(pos, deleteCount) {
pos = pos.valueOf();
var out = splice.apply(arr, arguments);
while (deleteCount > 0) {
arr.onRemove.trigger(pos + deleteCount - 1);
<|fim▁hole|> deleteCount--;
}
for (var i=2; i<arguments.length; i++) {
var item = arguments[i];
arr.onInsert.trigger(item, pos + i - 2);
}
return out;
};
}
};
re.bindMap = function(arr, transform) {
function posGetter(val) {
var out = function getPos() {
for (var i=0; i<arr.length; i++)
if (val === arr[i])
return i;
};
out.valueOf = out;
return out;
};
// map via for loop to prevent undesired change detection
var out = [];
for (var i=0; i<arr.length; i++) {
var item = arr[i];
var tItem = transform(item, posGetter(item), arr);
out.push(tItem);
}
re.alertArray(arr);
arr.onRemove(function(pos) {
return out.splice(pos, 1);
});
arr.onInsert(function(item, pos) {
var tItem = transform(item, posGetter(item), arr);
return out.splice(pos, 0, tItem);
});
return out;
};
re.arrInstall = function(arr) {
re.alertArray(arr);
var initialized = false;
var installations = [];
return function(el, loc) {
if (!initialized) {
initialized = true;
arr.onRemove(function(pos) {
arr;
var inst = installations[pos];
inst.remove();
installations.splice(pos, 1);
});
arr.onInsert(function(item, pos) {
arr;
var instPos = installations[pos];
var inst;
if (instPos)
inst = instPos.insertContent(item);
else
inst = loc.installChild(item, el);
installations.splice(pos, 0, inst);
});
arr.forEach(function(item) {
var inst = loc.installChild(item, el);
installations.push(inst);
});
}
};
};
re.mapInstall = function(arr, transform) {
return re.arrInstall(re.bindMap(arr, transform));
};
re.for = re.mapInstall;
})();<|fim▁end|> | |
<|file_name|>MicroLock.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/MicroLock.h>
#include <thread>
#include <folly/portability/Asm.h>
namespace folly {
void MicroLockCore::lockSlowPath(uint32_t oldWord,
detail::Futex<>* wordPtr,
uint32_t slotHeldBit,
unsigned maxSpins,
unsigned maxYields) {
uint32_t newWord;
unsigned spins = 0;
uint32_t slotWaitBit = slotHeldBit << 1;
retry:
if ((oldWord & slotHeldBit) != 0) {
++spins;
if (spins > maxSpins + maxYields) {
// Somebody appears to have the lock. Block waiting for the
// holder to unlock the lock. We set heldbit(slot) so that the
// lock holder knows to FUTEX_WAKE us.
newWord = oldWord | slotWaitBit;
if (newWord != oldWord) {<|fim▁hole|> std::memory_order_relaxed,
std::memory_order_relaxed)) {
goto retry;
}
}
(void)wordPtr->futexWait(newWord, slotHeldBit);
} else if (spins > maxSpins) {
// sched_yield(), but more portable
std::this_thread::yield();
} else {
folly::asm_volatile_pause();
}
oldWord = wordPtr->load(std::memory_order_relaxed);
goto retry;
}
newWord = oldWord | slotHeldBit;
if (!wordPtr->compare_exchange_weak(oldWord,
newWord,
std::memory_order_acquire,
std::memory_order_relaxed)) {
goto retry;
}
}
} // namespace folly<|fim▁end|> | if (!wordPtr->compare_exchange_weak(oldWord,
newWord, |
<|file_name|>xchacha20poly1305_key_manager.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Key manager for XChaCha20Poly1305 keys.
use crate::subtle;
use tink_core::{utils::wrap_err, TinkError};
use tink_proto::prost::Message;
/// Maximal version of XChaCha20Poly1305 keys.
pub const X_CHA_CHA20_POLY1305_KEY_VERSION: u32 = 0;
/// Type URL of XChaCha20Poly1305 keys that Tink supports.
pub const X_CHA_CHA20_POLY1305_TYPE_URL: &str =
"type.googleapis.com/google.crypto.tink.XChaCha20Poly1305Key";
/// [`XChaCha20Poly1305KeyManager`] is an implementation of the [`tink_core::registry::KeyManager`]
/// trait. It generates new [`XChaCha20Poly1305Key`](tink_proto::XChaCha20Poly1305Key) keys and
/// produces new instances of [`subtle::XChaCha20Poly1305`].
#[derive(Default)]
pub(crate) struct XChaCha20Poly1305KeyManager {}
impl tink_core::registry::KeyManager for XChaCha20Poly1305KeyManager {
/// Create a [`subtle::XChaCha20Poly1305`] for the given serialized
/// [`tink_proto::XChaCha20Poly1305Key`].
fn primitive(&self, serialized_key: &[u8]) -> Result<tink_core::Primitive, TinkError> {
if serialized_key.is_empty() {
return Err("XChaCha20Poly1305KeyManager: invalid key".into());
}
let key = tink_proto::XChaCha20Poly1305Key::decode(serialized_key)
.map_err(|e| wrap_err("XChaCha20Poly1305KeyManager: invalid key", e))?;
validate_key(&key)?;
match subtle::XChaCha20Poly1305::new(&key.key_value) {
Ok(p) => Ok(tink_core::Primitive::Aead(Box::new(p))),
Err(e) => Err(wrap_err(
"XChaCha20Poly1305KeyManager: cannot create new primitive",
e,
)),
}
}
/// Create a new key, ignoring the specification in the given serialized key format
/// because the key size and other params are fixed.
fn new_key(&self, _serialized_key_format: &[u8]) -> Result<Vec<u8>, TinkError> {
let key = new_x_cha_cha20_poly1305_key();
let mut sk = Vec::new();
key.encode(&mut sk)
.map_err(|e| wrap_err("XChaCha20Poly1305KeyManager: failed to encode new key", e))?;
Ok(sk)
}
fn type_url(&self) -> &'static str {
X_CHA_CHA20_POLY1305_TYPE_URL
}
fn key_material_type(&self) -> tink_proto::key_data::KeyMaterialType {
tink_proto::key_data::KeyMaterialType::Symmetric
}
}
fn new_x_cha_cha20_poly1305_key() -> tink_proto::XChaCha20Poly1305Key {
let key_value = tink_core::subtle::random::get_random_bytes(subtle::X_CHA_CHA20_KEY_SIZE);<|fim▁hole|>}
/// Validates the given [`tink_proto::XChaCha20Poly1305Key`].
fn validate_key(key: &tink_proto::XChaCha20Poly1305Key) -> Result<(), TinkError> {
tink_core::keyset::validate_key_version(key.version, X_CHA_CHA20_POLY1305_KEY_VERSION)
.map_err(|e| wrap_err("XChaCha20Poly1305KeyManager", e))?;
let key_size = key.key_value.len();
if key_size != subtle::X_CHA_CHA20_KEY_SIZE {
return Err(format!(
"XChaCha20Poly1305KeyManager: keySize != {}",
subtle::X_CHA_CHA20_KEY_SIZE
)
.into());
}
Ok(())
}<|fim▁end|> | tink_proto::XChaCha20Poly1305Key {
version: X_CHA_CHA20_POLY1305_KEY_VERSION,
key_value,
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import prosper.datareader.exceptions<|fim▁hole|><|fim▁end|> | import prosper.datareader._version |
<|file_name|>p037.py<|end_file_name|><|fim▁begin|><|fim▁hole|># The number 3797 has an interesting property. Being prime itself, it is possible to continuously remove digits from left to right, and remain prime at each stage: 3797, 797, 97, and 7. Similarly we can work from right to left: 3797, 379, 37, and 3.
# Find the sum of the only eleven primes that are both truncatable from left to right and right to left.
# NOTE: 2, 3, 5, and 7 are not considered to be truncatable primes.
from euler import *
if __name__ == '__main__':
primes = PrimesBelow(8e5)
# Remove terminal 1's and 9's
# Remove beginning 1's and 9's
maybeTruncatable = [prime for prime in primes if prime % 10 not in (1, 9)]
maybeTruncatable = [prime for prime in maybeTruncatable if str(prime)[0] not in ('1', '9')]
truncatables = []
for prime in maybeTruncatable:
if IsTruncatable(prime, primes):
truncatables.append(prime)
if len(truncatables) > 11 + 3:
break
print(sum(prime for prime in truncatables if prime > 9))<|fim▁end|> | #! /usr/bin/python3
|
<|file_name|>glprogram.cpp<|end_file_name|><|fim▁begin|>#include "glprogram.h"
#include <gl/gl3w.h>
#include <cstdio>
bool compileStatus(GLuint shader) {
int ret;
glGetShaderiv(shader, GL_COMPILE_STATUS, &ret);
return ret;
}
bool linkStatus(GLuint program) {
int ret;
glGetProgramiv(program, GL_LINK_STATUS, &ret);
return ret;
}
bool compileShader(GLuint handle, GLenum stype, const char* src) {
int shader_len = strlen(src);
glShaderSource(handle, 1, &src, &shader_len);
glCompileShader(handle);
if (!compileStatus(handle)) {
char buff[2048];
int nwritten;
glGetShaderInfoLog(handle, 2048, &nwritten, buff);
const char* typelabel = stype == GL_VERTEX_SHADER ? "vertex" : (stype == GL_FRAGMENT_SHADER ? "fragment" : "unknown");
printf("Error in %s shader\n%s\n", typelabel, buff);
return false;
}
return true;
}
int compileShader(GLenum type, const char* src) {
GLuint handle = glCreateShader(type);
compileShader(handle, type, src);
return handle;
}
bool linkProgram(GLuint handle, GLuint vshader, GLuint fshader) {
glAttachShader(handle, vshader);
glAttachShader(handle, fshader);
glLinkProgram(handle);
if (!linkStatus(handle)) {
char buff[2048];
int nwritten;
glGetProgramInfoLog(handle, 2048, &nwritten, buff);
printf("Program link error:\n%s\n", buff);
return false;
}
return true;
}
int linkProgram(const char* vshader_src, const char* fshader_src) {<|fim▁hole|> GLuint program = glCreateProgram();
GLuint vshader = compileShader(GL_VERTEX_SHADER, vshader_src);
GLuint fshader = compileShader(GL_FRAGMENT_SHADER, fshader_src);
if (!linkProgram(program, vshader, fshader)) {
glDeleteProgram(program);
program = 0;
}
glDeleteShader(vshader);
glDeleteShader(fshader);
return program;
}<|fim▁end|> | |
<|file_name|>database_sync.go<|end_file_name|><|fim▁begin|>// Copyright 2019 NDP Systèmes. All Rights Reserved.
// See LICENSE file for full licensing details.
package models
import (
"fmt"
"strings"
"github.com/hexya-erp/hexya/src/models/security"
)
// SyncDatabase creates or updates database tables with the data in the model registry
func SyncDatabase() {
log.Info("Updating database schema")
adapter := adapters[db.DriverName()]
dbTables := adapter.tables()
// Create or update sequences
updateDBSequences()
// Create or update existing tables
for tableName, model := range Registry.registryByTableName {
if model.IsMixin() || model.IsManual() {
continue
}
if _, ok := dbTables[tableName]; !ok {
createDBTable(model)
}
updateDBColumns(model)
updateDBIndexes(model)
}
// Setup constraints
for _, model := range Registry.registryByTableName {
if model.IsMixin() || model.IsManual() {
continue
}
buildSQLErrorSubstitutionMap(model)
updateDBForeignKeyConstraints(model)
updateDBConstraints(model)
}
// Run init method on each model
for _, model := range Registry.registryByTableName {
if model.IsMixin() {
continue
}
runInit(model)
}
// Drop DB tables that are not in the models
for dbTable := range adapter.tables() {
var modelExists bool
for tableName, model := range Registry.registryByTableName {
if dbTable != tableName || model.IsMixin() {
continue
}
modelExists = true
break
}
if !modelExists {
dropDBTable(dbTable)
}
}
}
// buildSQLErrorSubstitutionMap populates the sqlErrors map of the
// model with the appropriate error message substitution
func buildSQLErrorSubstitutionMap(model *Model) {
for sqlConstrName, sqlConstr := range model.sqlConstraints {
model.sqlErrors[sqlConstrName] = sqlConstr.errorString
}
for _, field := range model.fields.registryByJSON {
if field.unique {
cName := fmt.Sprintf("%s_%s_key", model.tableName, field.json)
model.sqlErrors[cName] = fmt.Sprintf("%s must be unique", field.name)
}
if field.fieldType.IsFKRelationType() {
cName := fmt.Sprintf("%s_%s_fkey", model.tableName, field.json)
model.sqlErrors[cName] = fmt.Sprintf("%s must reference an existing %s record", field.name, field.relatedModelName)
}
}
}
// updateDBSequences creates sequences in the DB from data in the registry.
func updateDBSequences() {
adapter := adapters[db.DriverName()]
// Create or alter boot sequences
for _, sequence := range Registry.sequences {
if !sequence.boot {
continue
}
exists := false
for _, dbSeq := range adapter.sequences("%_bootseq") {
if sequence.JSON == dbSeq.Name {
exists = true
}
}
if !exists {
adapter.createSequence(sequence.JSON, sequence.Increment, sequence.Start)
continue
}
adapter.alterSequence(sequence.JSON, sequence.Increment, sequence.Start)
}
// Drop unused boot sequences
for _, dbSeq := range adapter.sequences("%_bootseq") {
var sequenceExists bool
for _, sequence := range Registry.sequences {
if sequence.JSON == dbSeq.Name {
sequenceExists = true
break
}
}
if !sequenceExists {
adapter.dropSequence(dbSeq.Name)
}
}
}
// createDBTable creates a table in the database from the given Model
// It only creates the primary key. Call updateDBColumns to create columns.
func createDBTable(m *Model) {
adapter := adapters[db.DriverName()]
var columns []string
for colName, fi := range m.fields.registryByJSON {
if colName == "id" || !fi.isStored() {
continue
}
col := fmt.Sprintf("%s %s", colName, adapter.columnSQLDefinition(fi, false))
columns = append(columns, col)
}
query := fmt.Sprintf(`
CREATE TABLE %s (
id serial NOT NULL PRIMARY KEY`,
adapter.quoteTableName(m.tableName))
if len(columns) > 0 {
query += ",\n\t" + strings.Join(columns, ",\n\t")
}
query += "\n)"
dbExecuteNoTx(query)
}
// dropDBTable drops the given table in the database
func dropDBTable(tableName string) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`DROP TABLE %s`, adapter.quoteTableName(tableName))
dbExecuteNoTx(query)
}
// updateDBColumns synchronizes the colums of the database with the
// given Model.
func updateDBColumns(mi *Model) {
adapter := adapters[db.DriverName()]
dbColumns := adapter.columns(mi.tableName)
// create or update columns from registry data
for colName, fi := range mi.fields.registryByJSON {
if colName == "id" || !fi.isStored() {
continue
}
dbColData, ok := dbColumns[colName]
if !ok {
createDBColumn(fi)
continue
}
if dbColData.DataType != adapter.typeSQL(fi) {
updateDBColumnDataType(fi)
}
if (dbColData.IsNullable == "NO" && !adapter.fieldIsNotNull(fi)) ||
(dbColData.IsNullable == "YES" && adapter.fieldIsNotNull(fi)) {
updateDBColumnNullable(fi)
}
}
// drop columns that no longer exist
for colName := range dbColumns {
if _, ok := mi.fields.registryByJSON[colName]; !ok {
dropDBColumn(mi.tableName, colName)
}
}
}
// createDBColumn insert the column described by Field in the database
func createDBColumn(fi *Field) {
if !fi.isStored() {
log.Panic("createDBColumn should not be called on non stored fields", "model", fi.model.name, "field", fi.json)
}
adapter := adapters[db.DriverName()]
// Add column without not null
query := fmt.Sprintf(`
ALTER TABLE %s
ADD COLUMN %s %s
`, adapter.quoteTableName(fi.model.tableName), fi.json, adapter.columnSQLDefinition(fi, true))
dbExecuteNoTx(query)
// Set default value if defined
if fi.defaultFunc != nil {
updateQuery := fmt.Sprintf(`
UPDATE %s SET %s = ? WHERE %s IS NULL
`, adapter.quoteTableName(fi.model.tableName), fi.json, fi.json)
var defaultValue interface{}
SimulateInNewEnvironment(security.SuperUserID, func(env Environment) {
defaultValue = fi.defaultFunc(env)
})
dbExecuteNoTx(updateQuery, defaultValue)
}
// Add not null if required
updateDBColumnNullable(fi)
}
// updateDBColumnDataType updates the data type in database for the given Field
func updateDBColumnDataType(fi *Field) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`
ALTER TABLE %s
ALTER COLUMN %s SET DATA TYPE %s
`, adapter.quoteTableName(fi.model.tableName), fi.json, adapter.typeSQL(fi))
dbExecuteNoTx(query)
}
// updateDBColumnNullable updates the NULL/NOT NULL data in database for the given Field
func updateDBColumnNullable(fi *Field) {
adapter := adapters[db.DriverName()]
var verb string
if adapter.fieldIsNotNull(fi) {
verb = "SET"
} else {
verb = "DROP"
}
query := fmt.Sprintf(`
ALTER TABLE %s
ALTER COLUMN %s %s NOT NULL
`, adapter.quoteTableName(fi.model.tableName), fi.json, verb)
query, _ = sanitizeQuery(query)
_, err := db.Exec(query)
if err != nil {
log.Warn("unable to change NOT NULL constraint", "model", fi.model.name, "field", fi.name, "verb", verb)
}
}
<|fim▁hole|>// dropDBColumn drops the column colName from table tableName in database
func dropDBColumn(tableName, colName string) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`
ALTER TABLE %s
DROP COLUMN %s
`, adapter.quoteTableName(tableName), colName)
dbExecuteNoTx(query)
}
// updateDBForeignKeyConstraints creates or updates fk constraints
// based on the data of the given Model
func updateDBForeignKeyConstraints(m *Model) {
adapter := adapters[db.DriverName()]
for colName, fi := range m.fields.registryByJSON {
fkContraintInDB := adapter.constraintExists(fmt.Sprintf("%s_%s_fkey", m.tableName, colName))
fieldIsFK := fi.fieldType.IsFKRelationType() && fi.isStored()
switch {
case fieldIsFK && !fkContraintInDB:
createFKConstraint(m.tableName, colName, fi.relatedModel.tableName, string(fi.onDelete))
case !fieldIsFK && fkContraintInDB:
dropFKConstraint(m.tableName, colName)
}
}
}
// updateDBConstraints creates or updates sql constraints
// based on the data of the given Model
func updateDBConstraints(m *Model) {
adapter := adapters[db.DriverName()]
for constraintName, constraint := range m.sqlConstraints {
if !adapter.constraintExists(constraintName) {
createConstraint(m.tableName, constraintName, constraint.sql)
}
}
dbConLoop:
for _, dbConstraintName := range adapter.constraints(fmt.Sprintf("%%_%s_mancon", m.tableName)) {
for constraintName := range m.sqlConstraints {
if constraintName == dbConstraintName {
continue dbConLoop
}
}
dropConstraint(m.tableName, dbConstraintName)
}
}
// createFKConstraint creates an FK constraint for the given column that references the given targetTable
func createFKConstraint(tableName, colName, targetTable, ondelete string) {
adapter := adapters[db.DriverName()]
constraint := fmt.Sprintf("FOREIGN KEY (%s) REFERENCES %s ON DELETE %s", colName, adapter.quoteTableName(targetTable), ondelete)
createConstraint(tableName, fmt.Sprintf("%s_%s_fkey", tableName, colName), constraint)
}
// dropFKConstraint drops an FK constraint for colName in the given table
func dropFKConstraint(tableName, colName string) {
dropConstraint(tableName, fmt.Sprintf("%s_%s_fkey", tableName, colName))
}
// createConstraint creates a constraint in the given table
func createConstraint(tableName, constraintName, sql string) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`
ALTER TABLE %s ADD CONSTRAINT %s %s
`, adapter.quoteTableName(tableName), constraintName, sql)
dbExecuteNoTx(query)
}
// dropConstraint drops a constraint with the given name
func dropConstraint(tableName, constraintName string) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`
ALTER TABLE %s DROP CONSTRAINT IF EXISTS %s
`, adapter.quoteTableName(tableName), constraintName)
dbExecuteNoTx(query)
}
// updateDBIndexes creates or updates indexes based on the data of
// the given Model
func updateDBIndexes(m *Model) {
adapter := adapters[db.DriverName()]
for colName, fi := range m.fields.registryByJSON {
indexInDB := adapter.indexExists(m.tableName, fmt.Sprintf("%s_%s_index", m.tableName, colName))
switch {
case fi.index && !indexInDB:
createColumnIndex(m.tableName, colName)
case indexInDB && !fi.index:
dropColumnIndex(m.tableName, colName)
}
}
}
// createColumnIndex creates an column index for colName in the given table
func createColumnIndex(tableName, colName string) {
adapter := adapters[db.DriverName()]
query := fmt.Sprintf(`
CREATE INDEX %s ON %s (%s)
`, fmt.Sprintf("%s_%s_index", tableName, colName), adapter.quoteTableName(tableName), colName)
dbExecuteNoTx(query)
}
// dropColumnIndex drops a column index for colName in the given table
func dropColumnIndex(tableName, colName string) {
query := fmt.Sprintf(`
DROP INDEX IF EXISTS %s
`, fmt.Sprintf("%s_%s_index", tableName, colName))
dbExecuteNoTx(query)
}
// runInit runs the Init function of the given model if it exists
func runInit(model *Model) {
if _, exists := model.methods.Get("Init"); exists {
err := ExecuteInNewEnvironment(security.SuperUserID, func(env Environment) {
env.Pool(model.name).Call("Init")
})
if err != nil {
log.Panic("Error while calling Init function", "model", model.name, "error", err)
}
}
}<|fim▁end|> | |
<|file_name|>logger.py<|end_file_name|><|fim▁begin|>import os
import inspect
import logging
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
TRACEBACK_INSPECTOR = inspect.currentframe
DEBUG = logging.DEBUG
INFO = logging.INFO
ERROR = logging.ERROR
WARNING = logging.WARN
CRITICAL = logging.CRITICAL
def debugCaller(traceback=None):
""" Function who return all the traceback of a call."""
tracebackLog = inspect.getouterframes(traceback)
moduleName = os.path.basename(
tracebackLog[1][1]).replace(".py", "").replace(
'<Script Block >', 'stdin')
methodName = tracebackLog[1][3]
return [moduleName, methodName]
class Logger(object):
"""
"""
level = INFO
criticalFunc = None
infoFunc = None
warningFunc = None
debugFunc = None
errorFunc = None
tracebackFunc = None
separatorFunc = None
spaceFunc = None
@classmethod
def onDebug(cls, func):<|fim▁hole|> """
cls.debugFunc = func
@classmethod
def onWarning(cls, func):
"""
"""
cls.warningFunc = func
@classmethod
def onCritical(cls, func):
"""
"""
cls.criticalFunc = func
@classmethod
def onError(cls, func):
"""
"""
cls.errorFunc = func
@classmethod
def onInfo(cls, func):
"""
"""
cls.infoFunc = func
@classmethod
def onTraceback(cls, func):
"""
"""
cls.tracebackFunc = func
@classmethod
def onSeparator(cls, func):
"""
"""
cls.separatorFunc = func
@classmethod
def onSpace(cls, func):
"""
"""
cls.spaceFunc = func
@classmethod
def warning(cls, msg):
"""
"""
if cls.level <= WARNING:
msgComplete = cls._buildString(
inspect.currentframe(), msg, WARNING)
if cls.warningFunc:
cls().warningFunc(msgComplete)
else:
logging.warning(msgComplete)
@classmethod
def info(cls, msg):
"""
"""
if cls.level <= INFO:
msgComplete = cls._buildString(inspect.currentframe(), msg, INFO)
if cls.infoFunc:
cls().infoFunc(msgComplete)
else:
logging.info(msgComplete)
@classmethod
def debug(cls, msg):
"""
"""
if cls.level <= DEBUG:
msgComplete = cls._buildString(inspect.currentframe(), msg, DEBUG)
if cls.debugFunc:
cls().debugFunc(msgComplete)
else:
logging.debug(msgComplete)
@classmethod
def error(cls, msg):
"""
"""
if cls.level <= ERROR:
msgComplete = cls._buildString(inspect.currentframe(), msg, ERROR)
if cls.errorFunc:
cls().errorFunc(msgComplete)
else:
logging.error(msgComplete)
@classmethod
def critical(cls, msg):
"""
"""
if cls.level <= CRITICAL:
msgComplete = cls._buildString(
inspect.currentframe(), msg, CRITICAL)
if cls.criticalFunc:
cls().criticalFunc(msgComplete)
else:
logging.critical(msgComplete)
def traceback(cls, msg):
"""
"""
if cls.tracebackFunc:
cls.tracebackFunc(msg)
else:
TracebackError(msg)
@classmethod
def _buildString(cls, input, msg, typeErr):
""" Build the display error string by the type of error """
debugAsString = debugCaller(input)
if typeErr in [INFO, WARNING]:
return "[%s] %s" % (debugAsString[0], msg)
return "[%s::%s] %s" % (debugAsString[0], debugAsString[1], msg)
@classmethod
def getLogger(cls, loggerName):
""" Return the given name of the logger """
logging.getLogger(loggerName)
@classmethod
def setLevel(cls, level):
""" set the level of debugging """
cls.level = level
@classmethod
def getLevel(cls):
""""""
return cls.level
@classmethod
def addSeparator(cls, separator="-", length=75):
"""
Create a line of separator to help viewable
displaying of an error
"""
if cls.separatorFunc:
cls().separatorFunc(separator * length)
else:
logging.info(separator * length)
@classmethod
def addSpace(cls):
if cls.spaceFunc:
cls().spaceFunc()
else:
logging.info("")
class TracebackError(object):
"""
Output the whole traceback instead of only the
last message and Log it as Critical
"""
def __init__(self, e):
"""TracebackError Constructor"""
super(TracebackError, self).__init__()
import StringIO
import traceback
fileHandler = StringIO.StringIO()
traceback.print_exc(file=fileHandler)
self.trace = fileHandler.getvalue()
Logger.critical(self.trace)
def asString(self):
""""""
return self.trace
if __name__ == "__main__":
class Test(object):
def __init__(self):
pass
def runTest(self):
Logger.setLevel(DEBUG)
Logger.info("info")
Logger.critical("critical")
Logger.debug("debug")
Logger.warning("warning")
Logger.error("error")
aTest = Test()
aTest.runTest()<|fim▁end|> | """ |
<|file_name|>bdfix.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# см. также http://habrahabr.ru/post/135863/
# "Как написать дополнение для GIMP на языке Python"
# Импортируем необходимые модули
from gimpfu import *
## fix
def bdfix(image, drawable, w0, c0, w1, c1):
# for Undo
pdb.gimp_context_push()
pdb.gimp_image_undo_group_start(image)
# border-0
pdb.gimp_image_resize(image, image.width + w0*2, image.height + w0*2, w0, w0)
cz = pdb.gimp_context_get_background()
pdb.gimp_context_set_background(c0)
pdb.gimp_image_flatten(image)
pdb.gimp_context_set_background(cz)
# border-1
pdb.gimp_image_resize(image, image.width + w1*2, image.height + w1*2, w1, w1)
cz = pdb.gimp_context_get_background()
pdb.gimp_context_set_background(c1)
pdb.gimp_image_flatten(image)
pdb.gimp_context_set_background(cz)
# Refresh
pdb.gimp_displays_flush()
# Undo
pdb.gimp_image_undo_group_end(image)
pdb.gimp_context_pop()
# Регистрируем функцию в PDB
register(
"python-fu-bdfix", # Имя регистрируемой функции
"Добавление рамки к изображению", # Информация о дополнении
"Помещает вокруг изображения рамку", # Короткое описание выполняемых скриптом действий
"Александр Лубягин", # Информация об авторе
"Александр Лубягин", # Информация о правах
"15.01.2015", # Дата изготовления
"Добавить рамку", # Название пункта меню, с помощью которого дополнение будет запускаться
"*", # Типы изображений, с которыми работает дополнение<|fim▁hole|> (PF_IMAGE, "image", "Исходное изображение", None), # Указатель на изображение
(PF_DRAWABLE, "drawable", "Исходный слой", None), # Указатель на слой
(PF_INT, "w0", "Ширина рамки, px", "9"), # Ширина рамки
(PF_COLOR, "c0", "Цвет рамки", (255,255,255)), # Цвет рамки
(PF_INT, "w1", "Ширина рамки, px", "1"), # Ширина рамки
(PF_COLOR, "c1", "Цвет рамки", (0,0,0)) # Цвет рамки
],
[], # Список переменных которые вернет дополнение
bdfix, menu="<Image>/ТЕСТ/") # Имя исходной функции, и меню, в которое будет помещён пункт
# Запускаем скрипт
main()<|fim▁end|> | [ |
<|file_name|>assoc-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we do not yet support elision in associated types, even
// when there is just one name we could take from the impl header.
#![allow(warnings)]
trait MyTrait {
type Output;
}
impl MyTrait for &i32 {
type Output = &i32;
//~^ ERROR missing lifetime specifier
}
impl MyTrait for &u32 {
type Output = &'_ i32;
//~^ ERROR missing lifetime specifier
}
// This is what you have to do:
impl<'a> MyTrait for &'a f32 {
type Output = &'a f32;
}
fn main() { }<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
<|file_name|>example02.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
A simple python example for multiprocess.
Created by yetship at 2017/4/18 08:57
"""
from random import randint
import multiprocessing
<|fim▁hole|>
def worker(lower, upper):
"""thread worker function"""
print("get a random int: {}".format(randint(lower, upper)))
if __name__ == '__main__':
for i in range(3):
p = multiprocessing.Process(target=worker, args=(i, i ** 2))
p.start()<|fim▁end|> | |
<|file_name|>bundles.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016, 2017 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""JS/CSS bundles for Records."""
from __future__ import absolute_import, print_function
from flask_assets import Bundle
from invenio_assets import NpmBundle
stats_js = NpmBundle(
"node_modules/invenio-charts-js/dist/lib.bundle.js",
"js/cds_records/stats.js",
output="gen/cds.records.stats.%(version)s.js",
npm={
"invenio-charts-js": "^0.2.2",
},
)
stats_css = Bundle(
Bundle(
"node_modules/invenio-charts-js/src/styles/styles.scss",
"scss/stats.scss",
filters="node-scss,cleancssurl",
),
output="gen/cds.stats.%(version)s.css",
)
js = NpmBundle(
Bundle(
"node_modules/cds/dist/cds.js",
"node_modules/angular-sanitize/angular-sanitize.js",
"node_modules/angular-strap/dist/angular-strap.js",
"node_modules/invenio-files-js/dist/invenio-files-js.js",
"node_modules/ngmodal/dist/ng-modal.js",
"js/cds_records/main.js",
"js/cds_records/user_actions_logger.js",
filters="jsmin",
),
depends=("node_modules/cds/dist/*.js",),
filters="jsmin",<|fim▁hole|> output="gen/cds.record.%(version)s.js",
npm={
"angular": "~1.4.10",
"angular-sanitize": "~1.4.10",
"angular-loading-bar": "~0.9.0",
"cds": "~0.2.0",
"ng-dialog": "~0.6.0",
"ngmodal": "~2.0.1",
},
)<|fim▁end|> | |
<|file_name|>0002_auto_20180924_1938.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-09-24 19:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('controlled_vocabularies', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='property',
name='label',
field=models.TextField(help_text=b'The value for the added property.'),
),
migrations.AlterField(
model_name='property',<|fim▁hole|> model_name='term',
name='label',
field=models.CharField(help_text=b'The human-readable name of the term.', max_length=255),
),
migrations.AlterField(
model_name='term',
name='name',
field=models.CharField(help_text=b'The name or key that uniquely identifies the term within the vocabulary.', max_length=50),
),
migrations.AlterField(
model_name='term',
name='order',
field=models.IntegerField(blank=True, help_text=b'The preferred order for viewing the term in the vocabulary.', null=True),
),
migrations.AlterField(
model_name='term',
name='vocab_list',
field=models.ForeignKey(help_text=b'The vocabulary that the term needs to be added to.', on_delete=django.db.models.deletion.CASCADE, to='controlled_vocabularies.Vocabulary', verbose_name=b'Vocabulary'),
),
migrations.AlterField(
model_name='vocabulary',
name='definition',
field=models.TextField(blank=True, help_text=b'A brief statement of the meaning of the vocabulary.'),
),
migrations.AlterField(
model_name='vocabulary',
name='label',
field=models.CharField(help_text=b'The human-readable name of the vocabulary.', max_length=255),
),
migrations.AlterField(
model_name='vocabulary',
name='maintainer',
field=models.CharField(help_text=b'The person responsible for creating and updating the vocabulary.', max_length=50),
),
migrations.AlterField(
model_name='vocabulary',
name='maintainerEmail',
field=models.CharField(help_text=b'E-mail address of maintainer.', max_length=50, verbose_name=b'Maintainer E-mail'),
),
migrations.AlterField(
model_name='vocabulary',
name='name',
field=models.CharField(help_text=b'The name or key that uniquely identifies the vocabulary.', max_length=50, unique=True),
),
migrations.AlterField(
model_name='vocabulary',
name='order',
field=models.CharField(choices=[(b'name', b'name'), (b'label', b'label'), (b'order', b'order')], help_text=b'The preferred order for viewing the UNTL list of controlled vocabularies.', max_length=10),
),
]<|fim▁end|> | name='property_name',
field=models.CharField(choices=[(b'definition', b'Definition'), (b'description', b'Description'), (b'note', b'Note'), (b'system', b'System')], help_text=b"The name of the added property; e.g., 'Description'.", max_length=50, verbose_name=b'Property Type'),
),
migrations.AlterField( |
<|file_name|>test.java<|end_file_name|><|fim▁begin|>package db;
import db.*;
<|fim▁hole|> // TODO Auto-generated method stub
Database.getInstance().register("111", "22");
// Database.getInstance().login("111", "22");
}
}<|fim▁end|> | public class test {
public static void main(String[] args) { |
<|file_name|>QmitkUSNavigationProcessWidget.cpp<|end_file_name|><|fim▁begin|>/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "QmitkUSNavigationProcessWidget.h"
#include "ui_QmitkUSNavigationProcessWidget.h"
#include "../NavigationStepWidgets/QmitkUSAbstractNavigationStep.h"
#include "../SettingsWidgets/QmitkUSNavigationAbstractSettingsWidget.h"
#include "mitkDataNode.h"
#include "mitkNavigationDataToNavigationDataFilter.h"
#include <QTimer>
#include <QSignalMapper>
#include <QShortcut>
QmitkUSNavigationProcessWidget::QmitkUSNavigationProcessWidget(QWidget* parent) :
QWidget(parent),
m_SettingsWidget(0),
m_BaseNode(mitk::DataNode::New()), m_CurrentTabIndex(0), m_CurrentMaxStep(0),
m_ImageAlreadySetToNode(false),
m_ReadySignalMapper(new QSignalMapper(this)), m_NoLongerReadySignalMapper(new QSignalMapper(this)),
m_StdMultiWidget(0),
m_UsePlanningStepWidget(false),
ui(new Ui::QmitkUSNavigationProcessWidget)
{
m_Parent = parent;
ui->setupUi(this);
// remove the default page
ui->stepsToolBox->removeItem(0);
//set shortcuts
QShortcut *nextShortcut = new QShortcut(QKeySequence("F10"), parent);
QShortcut *prevShortcut = new QShortcut(QKeySequence("F11"), parent);
connect(nextShortcut, SIGNAL(activated()), this, SLOT(OnNextButtonClicked()));
connect(prevShortcut, SIGNAL(activated()), this, SLOT(OnPreviousButtonClicked()));
//connect other slots
connect( ui->restartStepButton, SIGNAL(clicked()), this, SLOT(OnRestartStepButtonClicked()) );
connect( ui->previousButton, SIGNAL(clicked()), this, SLOT(OnPreviousButtonClicked()) );
connect( ui->nextButton, SIGNAL(clicked()), this, SLOT(OnNextButtonClicked()) );
connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) );
connect (ui->settingsButton, SIGNAL(clicked()), this, SLOT(OnSettingsButtonClicked()) );
connect( m_ReadySignalMapper, SIGNAL(mapped(int)), this, SLOT(OnStepReady(int)) );
connect( m_NoLongerReadySignalMapper, SIGNAL(mapped(int)), this, SLOT(OnStepNoLongerReady(int)) );
ui->settingsFrameWidget->setHidden(true);
}
QmitkUSNavigationProcessWidget::~QmitkUSNavigationProcessWidget()
{
ui->stepsToolBox->blockSignals(true);
for ( NavigationStepVector::iterator it = m_NavigationSteps.begin();
it != m_NavigationSteps.end(); ++it )
{
if ( (*it)->GetNavigationStepState() > QmitkUSAbstractNavigationStep::State_Stopped ) { (*it)->StopStep(); }
delete *it;
}
m_NavigationSteps.clear();
if ( m_SettingsNode.IsNotNull() && m_DataStorage.IsNotNull() )
{
m_DataStorage->Remove(m_SettingsNode);
}
delete ui;
}
void QmitkUSNavigationProcessWidget::EnableInteraction(bool enable)
{
if (enable)
{
ui->restartStepButton->setEnabled(true);
ui->previousButton->setEnabled(true);
ui->nextButton->setEnabled(true);
ui->stepsToolBox->setEnabled(true);
}
else
{
ui->restartStepButton->setEnabled(false);
ui->previousButton->setEnabled(false);
ui->nextButton->setEnabled(false);
ui->stepsToolBox->setEnabled(false);
}
}
void QmitkUSNavigationProcessWidget::SetDataStorage(itk::SmartPointer<mitk::DataStorage> dataStorage)
{
m_DataStorage = dataStorage;
if ( dataStorage.IsNull() )
{
mitkThrow() << "Data Storage must not be null for QmitkUSNavigationProcessWidget.";
}
// test if base node is already in the data storage and add it if not
m_BaseNode = dataStorage->GetNamedNode(QmitkUSAbstractNavigationStep::DATANAME_BASENODE);
if ( m_BaseNode.IsNull() )
{
m_BaseNode = mitk::DataNode::New();
m_BaseNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_BASENODE);
dataStorage->Add(m_BaseNode);
}
// base node and image stream node may be the same node
if ( strcmp(QmitkUSAbstractNavigationStep::DATANAME_BASENODE, QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM) != 0)
{
m_ImageStreamNode = dataStorage->GetNamedNode(QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM);
if (m_ImageStreamNode.IsNull())
{
// Create Node for US Stream
m_ImageStreamNode = mitk::DataNode::New();
m_ImageStreamNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM);
dataStorage->Add(m_ImageStreamNode);
}
}
else
{
m_ImageStreamNode = m_BaseNode;
}
m_SettingsNode =
dataStorage->GetNamedDerivedNode(QmitkUSAbstractNavigationStep::DATANAME_SETTINGS, m_BaseNode);
if ( m_SettingsNode.IsNull() )
{
m_SettingsNode = mitk::DataNode::New();
m_SettingsNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_SETTINGS);
dataStorage->Add(m_SettingsNode, m_BaseNode);
}
if (m_SettingsWidget) { m_SettingsWidget->SetSettingsNode(m_SettingsNode); }
}
void QmitkUSNavigationProcessWidget::SetSettingsWidget(QmitkUSNavigationAbstractSettingsWidget* settingsWidget)
{
// disconnect slots to settings widget if there was a widget before
if ( m_SettingsWidget )
{
disconnect( ui->settingsSaveButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnSave()) );
disconnect( ui->settingsCancelButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnCancel()) );
disconnect (m_SettingsWidget, SIGNAL(Saved()), this, SLOT(OnSettingsWidgetReturned()) );
disconnect (m_SettingsWidget, SIGNAL(Canceled()), this, SLOT(OnSettingsWidgetReturned()) );
disconnect (m_SettingsWidget, SIGNAL(SettingsChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsChanged(itk::SmartPointer<mitk::DataNode>)) );
ui->settingsWidget->removeWidget(m_SettingsWidget);
}
m_SettingsWidget = settingsWidget;
if ( m_SettingsWidget )
{
m_SettingsWidget->LoadSettings();
connect( ui->settingsSaveButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnSave()) );
connect( ui->settingsCancelButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnCancel()) );
connect (m_SettingsWidget, SIGNAL(Saved()), this, SLOT(OnSettingsWidgetReturned()) );
connect (m_SettingsWidget, SIGNAL(Canceled()), this, SLOT(OnSettingsWidgetReturned()) );
connect (m_SettingsWidget, SIGNAL(SettingsChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsChanged(itk::SmartPointer<mitk::DataNode>)) );
if ( m_SettingsNode.IsNotNull() ) { m_SettingsWidget->SetSettingsNode(m_SettingsNode, true); }
ui->settingsWidget->addWidget(m_SettingsWidget);
}
ui->settingsButton->setEnabled(m_SettingsWidget != 0);
}
void QmitkUSNavigationProcessWidget::SetNavigationSteps(NavigationStepVector navigationSteps)
{
disconnect( this, SLOT(OnTabChanged(int)) );
for ( int n = ui->stepsToolBox->count()-1; n >= 0; --n )
{
ui->stepsToolBox->removeItem(n);
}
connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) );
m_NavigationSteps.clear();
m_NavigationSteps = navigationSteps;
this->InitializeNavigationStepWidgets();
// notify all navigation step widgets about the current settings
for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it)
{
(*it)->OnSettingsChanged(m_SettingsNode);
}
}
void QmitkUSNavigationProcessWidget::ResetNavigationProcess()
{
MITK_INFO("QmitkUSNavigationProcessWidget") << "Resetting navigation process.";
ui->stepsToolBox->blockSignals(true);
for ( int n = 0; n <= m_CurrentMaxStep; ++n )
{
m_NavigationSteps.at(n)->StopStep();
if ( n > 0 ) { ui->stepsToolBox->setItemEnabled(n, false); }
}
ui->stepsToolBox->blockSignals(false);
m_CurrentMaxStep = 0;
ui->stepsToolBox->setCurrentIndex(0);
if ( m_NavigationSteps.size() > 0 )
{
m_NavigationSteps.at(0)->ActivateStep();
}
this->UpdatePrevNextButtons();
}
void QmitkUSNavigationProcessWidget::UpdateNavigationProgress()
{
if ( m_CombinedModality.IsNotNull() && !m_CombinedModality->GetIsFreezed() )
{
m_CombinedModality->Modified();
m_CombinedModality->Update();
if ( m_LastNavigationDataFilter.IsNotNull() ) { m_LastNavigationDataFilter->Update(); }
mitk::Image::Pointer image = m_CombinedModality->GetOutput();
// make sure that always the current image is set to the data node
if ( image.IsNotNull() && m_ImageStreamNode->GetData() != image.GetPointer() && image->IsInitialized() )
{
m_ImageStreamNode->SetData(image);
m_ImageAlreadySetToNode = true;
}
}
if ( m_CurrentTabIndex > 0 && static_cast<unsigned int>(m_CurrentTabIndex) < m_NavigationSteps.size() )
{
m_NavigationSteps.at(m_CurrentTabIndex)->Update();
}
}
void QmitkUSNavigationProcessWidget::OnNextButtonClicked()
{
if (m_CombinedModality.IsNotNull() && m_CombinedModality->GetIsFreezed()) {return;} //no moving through steps when the modality is NULL or frozen
int currentIndex = ui->stepsToolBox->currentIndex();
if (currentIndex >= m_CurrentMaxStep)
{
MITK_WARN << "Next button clicked though no next tab widget is available.";
return;
}
ui->stepsToolBox->setCurrentIndex(++currentIndex);
this->UpdatePrevNextButtons();
}
void QmitkUSNavigationProcessWidget::OnPreviousButtonClicked()
{
if (m_CombinedModality.IsNotNull() && m_CombinedModality->GetIsFreezed()) {return;} //no moving through steps when the modality is NULL or frozen
int currentIndex = ui->stepsToolBox->currentIndex();
if (currentIndex <= 0)
{
MITK_WARN << "Previous button clicked though no previous tab widget is available.";
return;
}
ui->stepsToolBox->setCurrentIndex(--currentIndex);
this->UpdatePrevNextButtons();
}
void QmitkUSNavigationProcessWidget::OnRestartStepButtonClicked()
{
MITK_INFO("QmitkUSNavigationProcessWidget") << "Restarting step "
<< m_CurrentTabIndex << " (" << m_NavigationSteps.at(m_CurrentTabIndex)->GetTitle().toStdString() << ").";
m_NavigationSteps.at(ui->stepsToolBox->currentIndex())->RestartStep();
m_NavigationSteps.at(ui->stepsToolBox->currentIndex())->ActivateStep();
}
void QmitkUSNavigationProcessWidget::OnTabChanged(int index)
{
if ( index < 0 || index >= static_cast<int>(m_NavigationSteps.size()) )
{
return;
}
else if ( m_CurrentTabIndex == index )
{
// just activate the step if it is the same step againg
m_NavigationSteps.at(index)->ActivateStep();
return;
}
MITK_INFO("QmitkUSNavigationProcessWidget") << "Activating navigation step "
<< index << " (" << m_NavigationSteps.at(index)->GetTitle().toStdString() <<").";
if (index > m_CurrentTabIndex)
{
this->UpdateFilterPipeline();
// finish all previous steps to make sure that all data is valid
for (int n = m_CurrentTabIndex; n < index; ++n)
{
m_NavigationSteps.at(n)->FinishStep();
}
}
// deactivate the previously active step
if ( m_CurrentTabIndex > 0 && m_NavigationSteps.size() > static_cast<unsigned int>(m_CurrentTabIndex) )
{
m_NavigationSteps.at(m_CurrentTabIndex)->DeactivateStep();
}
// start step of the current tab if it wasn't started before
if ( m_NavigationSteps.at(index)->GetNavigationStepState() == QmitkUSAbstractNavigationStep::State_Stopped )
{
m_NavigationSteps.at(index)->StartStep();
}
m_NavigationSteps.at(index)->ActivateStep();
if (static_cast<unsigned int>(index) < m_NavigationSteps.size())
ui->restartStepButton->setEnabled(m_NavigationSteps.at(index)->GetIsRestartable());
this->UpdatePrevNextButtons();
m_CurrentTabIndex = index;
emit SignalActiveNavigationStepChanged(index);
}
void QmitkUSNavigationProcessWidget::OnSettingsButtonClicked()<|fim▁hole|>void QmitkUSNavigationProcessWidget::OnSettingsWidgetReturned()
{
this->SetSettingsWidgetVisible(false);
}
void QmitkUSNavigationProcessWidget::OnSettingsNodeChanged(itk::SmartPointer<mitk::DataNode> dataNode)
{
if ( m_SettingsWidget ) m_SettingsWidget->SetSettingsNode(dataNode);
}
void QmitkUSNavigationProcessWidget::OnStepReady(int index)
{
if (m_CurrentMaxStep <= index)
{
m_CurrentMaxStep = index + 1;
this->UpdatePrevNextButtons();
for (int n = 0; n <= m_CurrentMaxStep; ++n)
{
ui->stepsToolBox->setItemEnabled(n, true);
}
}
emit SignalNavigationStepFinished(index, true);
}
void QmitkUSNavigationProcessWidget::OnStepNoLongerReady(int index)
{
if (m_CurrentMaxStep > index)
{
m_CurrentMaxStep = index;
this->UpdatePrevNextButtons();
this->UpdateFilterPipeline();
for (int n = m_CurrentMaxStep+1; n < ui->stepsToolBox->count(); ++n)
{
ui->stepsToolBox->setItemEnabled(n, false);
m_NavigationSteps.at(n)->StopStep();
}
}
emit SignalNavigationStepFinished(index, false);
}
void QmitkUSNavigationProcessWidget::OnCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality> combinedModality)
{
m_CombinedModality = combinedModality;
m_ImageAlreadySetToNode = false;
if ( combinedModality.IsNotNull() )
{
if ( combinedModality->GetNavigationDataSource().IsNull() )
{
MITK_WARN << "There is no navigation data source set for the given combined modality.";
return;
}
this->UpdateFilterPipeline();
}
for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it)
{
(*it)->SetCombinedModality(combinedModality);
}
emit SignalCombinedModalityChanged(combinedModality);
}
void QmitkUSNavigationProcessWidget::OnSettingsChanged(const mitk::DataNode::Pointer dataNode)
{
static bool methodEntered = false;
if ( methodEntered )
{
MITK_WARN("QmitkUSNavigationProcessWidget") << "Ignoring recursive call to 'OnSettingsChanged()'. "
<< "Make sure to no emit 'SignalSettingsNodeChanged' in an 'OnSettingsChanged()' method.";
return;
}
methodEntered = true;
std::string application;
if ( dataNode->GetStringProperty("settings.application", application) )
{
QString applicationQString = QString::fromStdString(application);
if ( applicationQString != ui->titleLabel->text() )
{
ui->titleLabel->setText(applicationQString);
}
}
// notify all navigation step widgets about the changed settings
for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it)
{
(*it)->OnSettingsChanged(dataNode);
}
emit SignalSettingsChanged(dataNode);
methodEntered = false;
}
void QmitkUSNavigationProcessWidget::InitializeNavigationStepWidgets()
{
// do not listen for steps tool box signal during insertion of items into tool box
disconnect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) );
m_CurrentMaxStep = 0;
mitk::DataStorage::Pointer dataStorage = m_DataStorage;
for (unsigned int n = 0; n < m_NavigationSteps.size(); ++n)
{
QmitkUSAbstractNavigationStep* curNavigationStep = m_NavigationSteps.at(n);
curNavigationStep->SetDataStorage(dataStorage);
connect( curNavigationStep, SIGNAL(SignalReadyForNextStep()), m_ReadySignalMapper, SLOT(map()));
connect( curNavigationStep, SIGNAL(SignalNoLongerReadyForNextStep()), m_NoLongerReadySignalMapper, SLOT(map()) );
connect( curNavigationStep, SIGNAL(SignalCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality>)), this, SLOT(OnCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality>)) );
connect( curNavigationStep, SIGNAL(SignalIntermediateResult(const itk::SmartPointer<mitk::DataNode>)), this, SIGNAL(SignalIntermediateResult(const itk::SmartPointer<mitk::DataNode>)) );
connect( curNavigationStep, SIGNAL(SignalSettingsNodeChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsNodeChanged(itk::SmartPointer<mitk::DataNode>)) );
m_ReadySignalMapper->setMapping(curNavigationStep, n);
m_NoLongerReadySignalMapper->setMapping(curNavigationStep, n);
ui->stepsToolBox->insertItem(n, curNavigationStep, QString("Step ") + QString::number(n+1) + ": " + curNavigationStep->GetTitle());
if ( n > 0 ) { ui->stepsToolBox->setItemEnabled(n, false); }
}
ui->restartStepButton->setEnabled(m_NavigationSteps.at(0)->GetIsRestartable());
ui->stepsToolBox->setCurrentIndex(0);
// activate the first navigation step widgets
if ( ! m_NavigationSteps.empty() ) { m_NavigationSteps.at(0)->ActivateStep(); }
// after filling the steps tool box the signal is interesting again
connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) );
this->UpdateFilterPipeline();
}
void QmitkUSNavigationProcessWidget::UpdatePrevNextButtons()
{
int currentIndex = ui->stepsToolBox->currentIndex();
ui->previousButton->setEnabled(currentIndex > 0);
ui->nextButton->setEnabled(currentIndex < m_CurrentMaxStep);
}
void QmitkUSNavigationProcessWidget::UpdateFilterPipeline()
{
if ( m_CombinedModality.IsNull() ) { return; }
std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer> filterList;
mitk::NavigationDataSource::Pointer navigationDataSource = m_CombinedModality->GetNavigationDataSource();
for (unsigned int n = 0; n <= m_CurrentMaxStep && n < m_NavigationSteps.size(); ++n)
{
QmitkUSAbstractNavigationStep::FilterVector filter = m_NavigationSteps.at(n)->GetFilter();
if ( ! filter.empty() ) { filterList.insert(filterList.end(), filter.begin(), filter.end()); }
}
if ( ! filterList.empty() )
{
for (unsigned int n = 0; n < navigationDataSource->GetNumberOfOutputs(); ++n)
{
filterList.at(0)->SetInput(n, navigationDataSource->GetOutput(n));
}
for (std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer>::iterator it = filterList.begin()+1;
it != filterList.end(); ++it)
{
std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer>::iterator prevIt = it-1;
for (unsigned int n = 0; n < (*prevIt)->GetNumberOfOutputs(); ++n)
{
(*it)->SetInput(n, (*prevIt)->GetOutput(n));
}
}
m_LastNavigationDataFilter = filterList.at(filterList.size()-1);
}
else
{
m_LastNavigationDataFilter = navigationDataSource.GetPointer();
}
}
void QmitkUSNavigationProcessWidget::SetSettingsWidgetVisible(bool visible)
{
ui->settingsFrameWidget->setVisible(visible);
ui->stepsToolBox->setHidden(visible);
ui->settingsButton->setHidden(visible);
ui->restartStepButton->setHidden(visible);
ui->previousButton->setHidden(visible);
ui->nextButton->setHidden(visible);
}
void QmitkUSNavigationProcessWidget::FinishCurrentNavigationStep()
{
int currentIndex = ui->stepsToolBox->currentIndex();
QmitkUSAbstractNavigationStep* curNavigationStep = m_NavigationSteps.at(currentIndex);
curNavigationStep->FinishStep();
}<|fim▁end|> | {
this->SetSettingsWidgetVisible(true);
}
|
<|file_name|>export_test.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*-
/*
* Copyright (C) 2016 Canonical Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package store
import (
"io"
"net/http"
"net/url"
"github.com/juju/ratelimit"
"golang.org/x/net/context"
"gopkg.in/retry.v1"
"github.com/snapcore/snapd/overlord/auth"
"github.com/snapcore/snapd/progress"
"github.com/snapcore/snapd/snap"
"github.com/snapcore/snapd/testutil"
)
var (
HardLinkCount = hardLinkCount
ApiURL = apiURL
Download = download
UseDeltas = useDeltas
ApplyDelta = applyDelta
AuthLocation = authLocation
AuthURL = authURL
StoreURL = storeURL
StoreDeveloperURL = storeDeveloperURL
MustBuy = mustBuy
RequestStoreMacaroon = requestStoreMacaroon
DischargeAuthCaveat = dischargeAuthCaveat
RefreshDischargeMacaroon = refreshDischargeMacaroon
RequestStoreDeviceNonce = requestStoreDeviceNonce
RequestDeviceSession = requestDeviceSession
LoginCaveatID = loginCaveatID
JsonContentType = jsonContentType
SnapActionFields = snapActionFields
)
// MockDefaultRetryStrategy mocks the retry strategy used by several store requests
func MockDefaultRetryStrategy(t *testutil.BaseTest, strategy retry.Strategy) {
originalDefaultRetryStrategy := defaultRetryStrategy
defaultRetryStrategy = strategy
t.AddCleanup(func() {
defaultRetryStrategy = originalDefaultRetryStrategy
})
}
func MockConnCheckStrategy(t *testutil.BaseTest, strategy retry.Strategy) {
originalConnCheckStrategy := connCheckStrategy
connCheckStrategy = strategy
t.AddCleanup(func() {
connCheckStrategy = originalConnCheckStrategy
})
}
func (cm *CacheManager) CacheDir() string {
return cm.cacheDir
}
<|fim▁hole|> return cm.cleanup()
}
func (cm *CacheManager) Count() int {
return cm.count()
}
func MockOsRemove(f func(name string) error) func() {
oldOsRemove := osRemove
osRemove = f
return func() {
osRemove = oldOsRemove
}
}
func MockDownload(f func(ctx context.Context, name, sha3_384, downloadURL string, user *auth.UserState, s *Store, w io.ReadWriteSeeker, resume int64, pbar progress.Meter, dlOpts *DownloadOptions) error) (restore func()) {
origDownload := download
download = f
return func() {
download = origDownload
}
}
func MockApplyDelta(f func(name string, deltaPath string, deltaInfo *snap.DeltaInfo, targetPath string, targetSha3_384 string) error) (restore func()) {
origApplyDelta := applyDelta
applyDelta = f
return func() {
applyDelta = origApplyDelta
}
}
func (sto *Store) MockCacher(obs downloadCache) (restore func()) {
oldCacher := sto.cacher
sto.cacher = obs
return func() {
sto.cacher = oldCacher
}
}
func (sto *Store) SetDeltaFormat(dfmt string) {
sto.deltaFormat = dfmt
}
func (sto *Store) DownloadDelta(deltaName string, downloadInfo *snap.DownloadInfo, w io.ReadWriteSeeker, pbar progress.Meter, user *auth.UserState) error {
return sto.downloadDelta(deltaName, downloadInfo, w, pbar, user)
}
func (sto *Store) DoRequest(ctx context.Context, client *http.Client, reqOptions *requestOptions, user *auth.UserState) (*http.Response, error) {
return sto.doRequest(ctx, client, reqOptions, user)
}
func (sto *Store) Client() *http.Client {
return sto.client
}
func (sto *Store) DetailFields() []string {
return sto.detailFields
}
func (sto *Store) DecorateOrders(snaps []*snap.Info, user *auth.UserState) error {
return sto.decorateOrders(snaps, user)
}
func (cfg *Config) SetBaseURL(u *url.URL) error {
return cfg.setBaseURL(u)
}
func NewHashError(name, sha3_384, targetSha3_384 string) HashError {
return HashError{name, sha3_384, targetSha3_384}
}
func NewRequestOptions(mth string, url *url.URL) *requestOptions {
return &requestOptions{
Method: mth,
URL: url,
}
}
func MockRatelimitReader(f func(r io.Reader, bucket *ratelimit.Bucket) io.Reader) (restore func()) {
oldRatelimitReader := ratelimitReader
ratelimitReader = f
return func() {
ratelimitReader = oldRatelimitReader
}
}<|fim▁end|> | func (cm *CacheManager) Cleanup() error { |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>'use strict';
const path = require('path');
const os = require('os');
const fs = require('fs-extra');
const fieHome = require('fie-home');
const debug = require('debug')('core-report');
const fieUser = require('fie-user');
const execSync = require('child_process').execSync;
const spawn = require('cross-spawn');
const cache = require('fie-cache');
/**
* 环境变量获取
*/
const cacheEnvGetter = {
fieVersion() {
return (
process.env.FIE_VERSION ||
execSync('npm view fie version')
.toString()
.replace(/[\nv]/g, '')
);
},
email() {
return fieUser.getEmail();
},
nodeVersion() {
return execSync('node -v')
.toString()
.replace(/[\nv]/g, '');
},
npmVersion() {
try {
return execSync('npm -v')
.toString()
.replace('\n', '');
} catch (e) {
return null;
}
},
tnpmVersion() {
try {
return execSync('tnpm -v')
.toString()
.split('\n')[0]
.match(/\d+\.\d+\.\d+/)[0];
} catch (ex) {
// 外网无tnpm
return null;
}
},
system() {
return `${os.platform()} ${os.release()}`;
},
};
/**
* 获取当前分支版本
* @param cwd
* @returns {string}
*/
exports.getCurBranch = function(cwd) {
const headerFile = path.join(cwd, '.git/HEAD');
let version = '';
if (fs.existsSync(headerFile)) {
const gitVersion = fs.readFileSync(headerFile, { encoding: 'utf8' });
const arr = gitVersion.split(/refs[\\\/]heads[\\\/]/g);
if (arr && arr.length > 1) {
version = arr[1];
}
}
return version.trim();
};
/**
* 获取项目URL
* @returns {*}
*/
exports.getProjectUrl = function() {
let url;
try {
url = (
spawn
.sync('git', ['config', '--get', 'remote.origin.url'], { silent: true })
.stdout.toString() || ''
).trim();
// 有些git的url是http开头的,需要格式化为git@格式,方便统一处理
const match = url.match(/http:\/\/gitlab.alibaba-inc.com\/(.*)/);
if (match && match[1]) {
url = `[email protected]:${match[1]}`;
}
} catch (err) {
debug('git config 错误:', err.message);
}
return url;
};
/**
* 获取项目相关环境
*/
exports.getProjectInfo = function(cwd) {
const branch = exports.getCurBranch(cwd);
const pkgPath = path.join(cwd, 'package.json');
const CONFIG_FILE = process.env.FIE_CONFIG_FILE || 'fie.config.js';
const fiePath = path.join(cwd, CONFIG_FILE);
// 这里不能使用fieConfig这个包,会循环引用
let pkg;
let fie;
let repository = exports.getProjectUrl();
// 判断pkg是否存在
if (fs.existsSync(pkgPath)) {
pkg = fs.readJsonSync(pkgPath, { throws: false });
}
// 判断fie.config.js是否存在
if (fs.existsSync(fiePath)) {
delete require.cache[fiePath];
try {
fie = require(fiePath);
} catch (e) {
fie = null;
}
}
// 如果git中没有则尝试从pkg中获取
if (pkg && pkg.repository && pkg.repository.url) {
repository = pkg.repository.url;
}
return {
cwd,
branch,
pkg,
fie,
repository,
};
};
<|fim▁hole|> * 获取项目的环境信息
* @param force 为true时 则获取实时信息,否则读取缓存
* 对 tnpm, node 版本等重新获取,一般在报错的时候才传入 true
* @returns {*}
*/
exports.getProjectEnv = function(force) {
let cacheEnv = cache.get('reportEnvCache');
if (!cacheEnv || force) {
cacheEnv = {};
const cacheEnvKeys = Object.keys(cacheEnvGetter);
cacheEnvKeys.forEach(item => {
cacheEnv[item] = cacheEnvGetter[item]();
});
// 缓存三天
cache.set('reportEnvCache', cacheEnv, { expires: 259200000 });
}
return cacheEnv;
};
/**
* 获取当前执行的命令,移除用户路径
*/
exports.getCommand = function(arg) {
let argv = arg || process.argv;
argv = argv.map(item => {
const match = item.match(/\\bin\\(((?!bin).)*)$|\/bin\/(.*)/);
// mac
if (match && match[3]) {
// 一般 node fie -v 这种方式则不需要显示 node
return match[3] === 'node' ? '' : match[3];
} else if (match && match[1]) {
// 一般 node fie -v 这种方式则不需要显示 node
return match[1] === 'node.exe' ? '' : match[1];
} else if (!match && item.indexOf('node.exe') !== -1) {
// fix如果C:\\node.exe 这种不带bin的路径
// TODO 当然这里的正则可以再优化兼容一下
return '';
}
return item;
});
return argv.join(' ').trim();
};
/**
* 获取模块的类型和版本
*/
exports.getFieModuleVersion = function(mod) {
const modPkgPath = path.join(fieHome.getModulesPath(), mod, 'package.json');
let pkg = {};
if (fs.existsSync(modPkgPath)) {
pkg = fs.readJsonSync(modPkgPath, { throws: false }) || {};
}
return pkg.version;
};<|fim▁end|> | /** |
<|file_name|>ruby.py<|end_file_name|><|fim▁begin|>from .base_executor import ScriptExecutor
from judgeenv import env
<|fim▁hole|> ext = '.rb'
name = 'RUBY'
address_grace = 65536
fs = ['.*\.(?:so|rb$)', '/etc/localtime$', '/dev/urandom$', '/proc/self', '/usr/lib/ruby/gems/']
test_program = 'puts gets'
@classmethod
def get_command(cls):
return env['runtime'].get(cls.name.lower())<|fim▁end|> |
class RubyExecutor(ScriptExecutor): |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># under the License.
# service type constants:
CORE = "CORE"
DUMMY = "DUMMY"
LOADBALANCER = "LOADBALANCER"
# TODO(salvatore-orlando): Move these (or derive them) from conf file
ALLOWED_SERVICES = [CORE, DUMMY, LOADBALANCER]
COMMON_PREFIXES = {
CORE: "",
DUMMY: "/dummy_svc",
LOADBALANCER: "/lb",
}
# Service operation status constants
ACTIVE = "ACTIVE"
PENDING_CREATE = "PENDING_CREATE"
PENDING_UPDATE = "PENDING_UPDATE"
PENDING_DELETE = "PENDING_DELETE"
INACTIVE = "INACTIVE"
ERROR = "ERROR"<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | import search_duplicated_task |
<|file_name|>bip.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
#################################################################################################
#
# Script Name: bip.py
# Script Usage: This script is the menu system and runs everything else. Do not use other
# files unless you are comfortable with the code.
#
# It has the following:
# 1.
# 2.<|fim▁hole|># 1. Make sure the info in bip_config.py is correct.
# 2. Make sure GAM (Google Apps Manager) is installed and the path is correct.
# 3. Make sure the AD scripts in toos/ are present on the DC running run.ps1.
#
# Script Updates:
# 201709191243 - [email protected] - copied boilerplate.
#
#################################################################################################
import os # os.system for clearing screen and simple gam calls
import subprocess # subprocess.Popen is to capture gam output (needed for user info in particular)
import MySQLdb # MySQLdb is to get data from relevant tables
import csv # CSV is used to read output of drive commands that supply data in CSV form
import bip_config # declare installation specific variables
# setup for MySQLdb connection
varMySQLHost = bip_config.mysqlconfig['host']
varMySQLUser = bip_config.mysqlconfig['user']
varMySQLPassword = bip_config.mysqlconfig['password']
varMySQLDB = bip_config.mysqlconfig['db']
# setup to find GAM
varCommandGam = bip_config.gamconfig['fullpath']
#################################################################################################
#
#################################################################################################<|fim▁end|> | # 3.
# 4.
#
# You will probably want to do the following: |
<|file_name|>windowing.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use embedder_traits::EventLoopWaker;
use euclid::TypedScale;
#[cfg(feature = "gleam")]
use gleam::gl;
use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{TopLevelBrowsingContextId, TraversalDirection};
use script_traits::{MouseButton, TouchEventType, TouchId};
use servo_geometry::{DeviceIndependentPixel, DeviceUintLength};
use servo_url::ServoUrl;
use std::fmt::{Debug, Error, Formatter};
#[cfg(feature = "gleam")]
use std::rc::Rc;
use style_traits::DevicePixel;
use webrender_api::{DeviceIntPoint, DevicePoint, DeviceUintSize, DeviceUintRect, ScrollLocation};
#[derive(Clone)]
pub enum MouseWindowEvent {
Click(MouseButton, DevicePoint),
MouseDown(MouseButton, DevicePoint),
MouseUp(MouseButton, DevicePoint),
}
/// Various debug and profiling flags that WebRender supports.
#[derive(Clone)]
pub enum WebRenderDebugOption {
Profiler,
TextureCacheDebug,
RenderTargetDebug,
}
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent when the window is resized.
Resize,
/// Sent when a new URL is to be loaded.
LoadUrl(TopLevelBrowsingContextId, ServoUrl),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(DevicePoint),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, DevicePoint),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(ScrollLocation, DeviceIntPoint, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(TopLevelBrowsingContextId, TraversalDirection),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
Keyboard(KeyboardEvent),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload(TopLevelBrowsingContextId),
/// Create a new top level browsing context
NewBrowser(ServoUrl, TopLevelBrowsingContextId),
/// Close a top level browsing context
CloseBrowser(TopLevelBrowsingContextId),
/// Panic a top level browsing context.
SendError(Option<TopLevelBrowsingContextId>, String),
/// Make a top level browsing context visible, hiding the previous
/// visible one.
SelectBrowser(TopLevelBrowsingContextId),
/// Toggles a debug flag in WebRender
ToggleWebRenderDebug(WebRenderDebugOption),
/// Capture current WebRender
CaptureWebRender,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::Resize => write!(f, "Resize"),
WindowEvent::Keyboard(..) => write!(f, "Keyboard"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),<|fim▁hole|> WindowEvent::Reload(..) => write!(f, "Reload"),
WindowEvent::NewBrowser(..) => write!(f, "NewBrowser"),
WindowEvent::SendError(..) => write!(f, "SendError"),
WindowEvent::CloseBrowser(..) => write!(f, "CloseBrowser"),
WindowEvent::SelectBrowser(..) => write!(f, "SelectBrowser"),
WindowEvent::ToggleWebRenderDebug(..) => write!(f, "ToggleWebRenderDebug"),
WindowEvent::CaptureWebRender => write!(f, "CaptureWebRender"),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AnimationState {
Idle,
Animating,
}
pub trait WindowMethods {
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: DeviceUintLength, height: DeviceUintLength) -> bool;
/// Return the GL function pointer trait.
#[cfg(feature = "gleam")]
fn gl(&self) -> Rc<gl::Gl>;
/// Returns a thread-safe object to wake up the window's event loop.
fn create_event_loop_waker(&self) -> Box<EventLoopWaker>;
/// Get the coordinates of the native window, the screen and the framebuffer.
fn get_coordinates(&self) -> EmbedderCoordinates;
/// Set whether the application is currently animating.
/// Typically, when animations are active, the window
/// will want to avoid blocking on UI events, and just
/// run the event loop at the vsync interval.
fn set_animation_state(&self, _state: AnimationState);
}
#[derive(Clone, Copy, Debug)]
pub struct EmbedderCoordinates {
/// The pixel density of the display.
pub hidpi_factor: TypedScale<f32, DeviceIndependentPixel, DevicePixel>,
/// Size of the screen.
pub screen: DeviceUintSize,
/// Size of the available screen space (screen without toolbars and docks).
pub screen_avail: DeviceUintSize,
/// Size of the native window.
pub window: (DeviceUintSize, DeviceIntPoint),
/// Size of the GL buffer in the window.
pub framebuffer: DeviceUintSize,
/// Coordinates of the document within the framebuffer.
pub viewport: DeviceUintRect,
}<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import './modal.less'
import { show } from './show'
import { alert } from './alert'
import { confirm } from './confirm'
import { clear } from './clear'
import { attachPropertiesToComponent } from '../../utils/attach-properties-to-component'
import { Modal } from './modal'
export type { ModalProps } from './modal'
export type { Action } from './modal-action-button'
export type { ModalShowProps, ModalShowRef } from './show'
export type { ModalAlertProps } from './alert'
export type { ModalConfirmProps } from './confirm'
<|fim▁hole|> confirm,
clear,
})<|fim▁end|> | export default attachPropertiesToComponent(Modal, {
show,
alert, |
<|file_name|>RabinKarp.java<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Elements of Programming Interviews. All rights reserved.
package com.epi;
import java.util.Random;
public class RabinKarp {
// @include
// Returns the index of the first character of the substring if found, -1
// otherwise.
public static int rabinKarp(String t, String s) {
if (s.length() > t.length()) {
return -1; // s is not a substring of t.
}
final int BASE = 26;
int tHash = 0, sHash = 0; // Hash codes for the substring of t and s.
int powerS = 1; // BASE^|s|.
for (int i = 0; i < s.length(); i++) {
powerS = i > 0 ? powerS * BASE : 1;
tHash = tHash * BASE + t.charAt(i);
sHash = sHash * BASE + s.charAt(i);
}
for (int i = s.length(); i < t.length(); i++) {
// Checks the two substrings are actually equal or not, to protect<|fim▁hole|>
// Uses rolling hash to compute the new hash code.
tHash -= t.charAt(i - s.length()) * powerS;
tHash = tHash * BASE + t.charAt(i);
}
// Tries to match s and t.substring(t.length() - s.length()).
if (tHash == sHash && t.substring(t.length() - s.length()).equals(s)) {
return t.length() - s.length();
}
return -1; // s is not a substring of t.
}
// @exclude
private static int checkAnswer(String t, String s) {
for (int i = 0; i + s.length() - 1 < t.length(); ++i) {
boolean find = true;
for (int j = 0; j < s.length(); ++j) {
if (t.charAt(i + j) != s.charAt(j)) {
find = false;
break;
}
}
if (find) {
return i;
}
}
return -1; // No matching.
}
private static String randString(int len) {
Random r = new Random();
StringBuilder ret = new StringBuilder(len);
while (len-- > 0) {
ret.append((char)(r.nextInt(26) + 'a'));
}
return ret.toString();
}
private static void smallTest() {
assert(rabinKarp("GACGCCA", "CGC") == 2);
assert(rabinKarp("GATACCCATCGAGTCGGATCGAGT", "GAG") == 10);
assert(rabinKarp("FOOBARWIDGET", "WIDGETS") == -1);
assert(rabinKarp("A", "A") == 0);
assert(rabinKarp("A", "B") == -1);
assert(rabinKarp("A", "") == 0);
assert(rabinKarp("ADSADA", "") == 0);
assert(rabinKarp("", "A") == -1);
assert(rabinKarp("", "AAA") == -1);
assert(rabinKarp("A", "AAA") == -1);
assert(rabinKarp("AA", "AAA") == -1);
assert(rabinKarp("AAA", "AAA") == 0);
assert(rabinKarp("BAAAA", "AAA") == 1);
assert(rabinKarp("BAAABAAAA", "AAA") == 1);
assert(rabinKarp("BAABBAABAAABS", "AAA") == 8);
assert(rabinKarp("BAABBAABAAABS", "AAAA") == -1);
assert(rabinKarp("FOOBAR", "BAR") > 0);
}
public static void main(String args[]) {
smallTest();
if (args.length == 2) {
String t = args[0];
String s = args[1];
System.out.println("t = " + t);
System.out.println("s = " + s);
assert(checkAnswer(t, s) == rabinKarp(t, s));
} else {
Random r = new Random();
for (int times = 0; times < 10000; ++times) {
String t = randString(r.nextInt(1000) + 1);
String s = randString(r.nextInt(20) + 1);
System.out.println("t = " + t);
System.out.println("s = " + s);
assert(checkAnswer(t, s) == rabinKarp(t, s));
}
}
}
}<|fim▁end|> | // against hash collision.
if (tHash == sHash && t.substring(i - s.length(), i).equals(s)) {
return i - s.length(); // Found a match.
} |
<|file_name|>hero-list.component.ts<|end_file_name|><|fim▁begin|>// #docregion
import {Component, OnInit} from 'angular2/core';
import {Hero} from './hero';
import {HeroService} from './hero.service';
@Component({
selector: 'hero-list',
template: `
<h3>Heroes:</h3>
<ul>
<li *ngFor="#hero of heroes">
{{ hero.name }}
</li>
</ul>
New Hero:
<input #newHero />
<button (click)="addHero(newHero.value); newHero.value=''">
Add Hero
</button>
<div class="error" *ngIf="errorMessage">{{errorMessage}}</div>
`,
styles: ['.error {color:red;}']
})
// #docregion component
export class HeroListComponent implements OnInit {
constructor (private _heroService: HeroService) {}
<|fim▁hole|> heroes:Hero[];
ngOnInit() { this.getHeroes(); }
// #docregion methods
// #docregion getHeroes
getHeroes() {
this._heroService.getHeroes()
.subscribe(
heroes => this.heroes = heroes,
error => this.errorMessage = <any>error);
}
// #enddocregion getHeroes
// #docregion addHero
addHero (name: string) {
if (!name) {return;}
this._heroService.addHero(name)
.subscribe(
hero => this.heroes.push(hero),
error => this.errorMessage = <any>error);
}
// #enddocregion addHero
// #enddocregion methods
}
// #enddocregion component<|fim▁end|> | errorMessage: string; |
<|file_name|>ide-fetcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Author: Thomas Goodwin <[email protected]>
import urllib2, json, os, sys, re
def download_asset(path, url):
asset_path = None
try:
file_name = os.path.basename(url)
asset_path = os.path.join(path, file_name)
if os.path.exists(asset_path):
# Skip downloading
asset_path = None
else:
if not os.path.exists(path):
os.makedirs(path)
f = urllib2.urlopen(url)
with open(asset_path, "wb") as local_file:
local_file.write(f.read())
except Exception as e:
sys.exit('Failed to fetch IDE. Error: {0}'.format(e))
finally:
return asset_path<|fim▁hole|> sys.exit('Failed to find the IDE asset')
elif len(assets) > 1:
sys.exit('Found too many IDE assets matching that description...?')
return download_asset('downloads', assets[0]['browser_download_url'])
def run(pv):
RELEASES_URL = 'http://api.github.com/repos/RedhawkSDR/redhawk/releases'
ide_asset = ''
try:
releases = json.loads(urllib2.urlopen(RELEASES_URL).read())
releases = [r for r in releases if r['tag_name'] == pv]
if releases:
ide_asset = handle_release_assets(releases[0]['assets'])
else:
sys.exit('Failed to find the release: {0}'.format(pv))
finally:
return ide_asset
if __name__ == '__main__':
# First argument is the version
asset = run(sys.argv[1])
print asset<|fim▁end|> |
def handle_release_assets(assets):
assets = [ asset for asset in assets if re.match(r'redhawk-ide.+?(?=x86_64)', asset['name'])]
if not assets: |
<|file_name|>hbFontaineListController.js<|end_file_name|><|fim▁begin|>(function() {
angular.module('hb5').controller('HbFontaineListController', ['$attrs', '$scope', 'GeoxmlService', '$routeParams', '$log', '$filter', '$timeout', 'hbAlertMessages', 'hbUtil', function($attrs, $scope, GeoxmlService, $routeParams, $log, $filter, $timeout, hbAlertMessages, hbUtil) {
$log.debug(" >>>> HbFontaineListController called...");
// FONTAINE default order is by "Address" stored in NOM field
$scope.predicate = 'IDENTIFIANT.NOM';
$scope.reverse = false;
// Object holding user entered search (filter) criteria
$scope.search = {
"objectif" : "",
"nom" : "",
"alias" : "",
"remark" : "",
"text" : ""
};
// Initialise general search text with search request parameter if defined.
// This is only expected from Dashboard calls.
if ($routeParams.search) {
$scope.search.text = $routeParams.search;
}
/**
* Apply fontaine specific filters and sorting.
*/
var filterSortElfins = function(elfins_p, search_p, predicate_p, reverse_p) {
// Apply prestationListFilter
var filteredSortedElfins = $filter('fontaineListFilter')(elfins_p, search_p);
filteredSortedElfins = $filter('fontaineListAnyFilter')(filteredSortedElfins, search_p.text);
// Apply predicate, reverse sorting
filteredSortedElfins = $filter('orderBy')(filteredSortedElfins, predicate_p, reverse_p);
return filteredSortedElfins;
};
/**
* Update filtered collection when search or sorting criteria are modified.
*/
$scope.$watch('[search,predicate,reverse]', function(newSearch, oldSearch) {
//$log.debug(">>>>> HbFontaineListController search, predicate or reverse UPDATED <<<<< \n" + angular.toJson(newSearch) );
if ($scope.elfins!=null) {
$scope.filteredElfins = filterSortElfins($scope.elfins, $scope.search, $scope.predicate, $scope.reverse);
}
}, true);
/**
* elfins can result from queries taking possibly seconds to tens of seconds to complete.
* This requires watching for elfins result availability before computing filteredElfins.length.
*/
$scope.$watch('elfins', function() {
if ($scope.elfins!=null) {
$scope.filteredElfins = filterSortElfins($scope.elfins, $scope.search, $scope.predicate, $scope.reverse); <|fim▁hole|> }
});
/**
* Set focus on the list global search field
*/
var focusOnSearchField = function() {
$('#globalSearchField').focus();
};
$timeout(focusOnSearchField, 250, false);
}]);
})();<|fim▁end|> | } else {
//$log.debug(">>>>> HbFontaineListController elfins NOT YET LOADED <<<<<"); |
<|file_name|>child.js<|end_file_name|><|fim▁begin|>var lightstep = require("../../../..");
var FileTransport = require("../../../util/file_transport");
var path = require('path');
var reportFilename = path.join(__dirname, "../../../results/on_exit_child.json");<|fim▁hole|>Tracer = new lightstep.Tracer({
access_token : "{your_access_token}",
component_name : "lightstep-tracer/unit-test/on_exit",
override_transport : new FileTransport(reportFilename),
});
for (var i = 0; i < 10; i++) {
var span = Tracer.startSpan("test_span_" + i);
span.log({"log_index" : i});
span.finish();
}<|fim▁end|> | |
<|file_name|>sim_plot.py<|end_file_name|><|fim▁begin|>import sys
from ocelot.adaptors.genesis import *
from ocelot.cpbd.elements import Element, Quadrupole, RBend, Drift, Undulator
from ocelot import MagneticLattice
from ocelot.cpbd.beam import Beam
from ocelot.cpbd.optics import *
import numpy.fft as fft
from sim_info import SimInfo, RunInfo
<|fim▁hole|>#rc('text', usetex=True) # required to have greek fonts on redhat
import argparse
h = 4.135667516e-15
c = 299792458.0
parser = argparse.ArgumentParser(description='FEL simulation postprocessor')
#parser.add_argument('--submit', help='submit to main index file', action='store_true')
parser.add_argument('--path', help='path to the experiment', default='./')
parser.add_argument('--stage', help='undulator/seeding stages 1 through 5', default='1')
parser.add_argument('--range', help='range of runs in the form i1:i2')
parser.add_argument('--field_file', help='read in field file', action='store_true')
args = parser.parse_args()
run_start, run_end = [int(i) for i in args.range.split(':') ]
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
ax1.set_xlabel('Time [fs]')
ax1.set_ylabel('Power [W]')
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
ax2.set_xlabel('Photon Energy [eV]')
ax2.set_ylabel('Spectrum [arb. units]')
ax2.get_xaxis().get_major_formatter().set_useOffset(False)
ax3 = ax2.twiny()
ax3.set_xlabel('Wavelength [nm]')
power_av = None
spec_av = None
runs = xrange(run_start, run_end+1)
for run_id in runs:
run_dir = args.path + '/run_' + str(run_id)
if args.stage in ['1','3','5']:
run_file = run_dir + '/run.' + str(run_id) + '.s' + str(args.stage) + '.gout'
if args.stage == '5' : run_file = run_dir + '/run.' + str(run_id) + '.gout'
print 'reading', run_file
g = readGenesisOutput(run_file)
field_file = run_file + '.dfl'
if args.field_file:
slices = readRadiationFile(fileName=field_file, npoints=g('ncar'))
P = np.zeros_like(slices[:,0,0])
for i in xrange(len(P)):
P[i] = sum( np.abs(np.multiply(slices[i,:,:], slices[i,:,:].conjugate())) )
t = np.linspace(g.t[0], g.t[-1], len(P))
else:
P = g.power_int
t = g.t
w_l_m = g('xlamds')
w_l_ev = h * c / g('xlamds')
x = np.roll(g.freq_ev, len(g.freq_ev)/2)+ w_l_ev
y = np.roll( np.abs(g.spec)**2, len(g.freq_ev)/2)
else:
run_file = run_dir + '/run.' + str(run_id) + '.s' + str( int(args.stage) - 1) + '.gout'
field_file = 'tmp' + str(args.stage) + '.dfl'
print 'reading', run_file, 'and', field_file
g = readGenesisOutput(run_file)
slices = readRadiationFile(fileName=run_dir + '/' + field_file, npoints=g('ncar'))
P = np.zeros_like(slices[:,0,0])
spec = np.zeros_like(slices[:,0,0])
for i in xrange(len(P)):
P[i] = sum( np.abs(np.multiply(slices[i,:,:], slices[i,:,:].conjugate())) )
t = np.linspace(g.t[0], g.t[-1], len(P))
w_l_m = g('xlamds')
w_l_ev = h * c / g('xlamds')
#x = np.roll(g.freq_ev, len(g.freq_ev)/2)+ w_l_ev
spec = fft.fft(slices[:,int( g('ncar')/2),int( g('ncar')/2)])
y = np.abs(spec)**2
x = h * fftfreq(len(spec), d=g('zsep') * g('xlamds') / c) + w_l_ev
if power_av == None:
power_av = P / len(runs)
else:
power_av += P / len(runs)
p1, = ax1.plot(t, P, color='black',alpha=0.4)
if spec_av == None:
spec_av = y / len(runs)
else:
spec_av += y / len(runs)
p2, = ax2.plot(x, y, color='black', alpha = 0.4)
ax2.set_xlim(x[0],x[-1])
ax3.set_xlim(x[0],x[-1])
x_ticks = ax2.get_xticks()[1:]
x2 = h*c/(x_ticks) * 1.e9 # coordinates in nm
ax3.set_xticks(x_ticks)
ax3.set_xticklabels(["%.4f" % z for z in x2])
ax1.plot(t, power_av, 'b')
ax2.plot(x, spec_av, 'b')
plt.show()<|fim▁end|> | #params = {'backend': 'ps', 'axes.labelsize': 18, 'text.fontsize': 18, 'legend.fontsize': 18, 'xtick.labelsize': 18, 'ytick.labelsize': 18, 'text.usetex': True}
#rcParams.update(params) |
<|file_name|>boss_shadow_hunter_voshgajin.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2021 Project SkyFire <https://www.projectskyfire.org/>
* Copyright (C) 2008-2021 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2005-2021 MaNGOS <https://www.getmangos.eu/>
* Copyright (C) 2006-2014 ScriptDev2 <https://github.com/scriptdev2/scriptdev2/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ScriptMgr.h"
#include "ScriptedCreature.h"
#include "blackrock_spire.h"
enum Spells
{
SPELL_CURSEOFBLOOD = 24673,
SPELL_HEX = 16708,
SPELL_CLEAVE = 20691,
};
enum Events
{
EVENT_CURSE_OF_BLOOD = 1,
EVENT_HEX = 2,
EVENT_CLEAVE = 3,
};
class boss_shadow_hunter_voshgajin : public CreatureScript
{
public:
boss_shadow_hunter_voshgajin() : CreatureScript("boss_shadow_hunter_voshgajin") { }
CreatureAI* GetAI(Creature* creature) const OVERRIDE
{
return new boss_shadowvoshAI(creature);
}
struct boss_shadowvoshAI : public BossAI
{
boss_shadowvoshAI(Creature* creature) : BossAI(creature, DATA_SHADOW_HUNTER_VOSHGAJIN) { }
void Reset() OVERRIDE
{
_Reset();
//DoCast(me, SPELL_ICEARMOR, true);
}
void EnterCombat(Unit* /*who*/) OVERRIDE
{
_EnterCombat();
events.ScheduleEvent(EVENT_CURSE_OF_BLOOD, 2 * IN_MILLISECONDS);
events.ScheduleEvent(EVENT_HEX, 8 * IN_MILLISECONDS);
events.ScheduleEvent(EVENT_CLEAVE, 14 * IN_MILLISECONDS);
}
void JustDied(Unit* /*killer*/) OVERRIDE
{
_JustDied();
}
void UpdateAI(uint32 diff) OVERRIDE
{
if (!UpdateVictim())
return;
events.Update(diff);
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_CURSE_OF_BLOOD:
DoCastVictim(SPELL_CURSEOFBLOOD);
events.ScheduleEvent(EVENT_CURSE_OF_BLOOD, 45 * IN_MILLISECONDS);
break;
case EVENT_HEX:
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 100, true))
DoCast(target, SPELL_HEX);
events.ScheduleEvent(EVENT_HEX, 15 * IN_MILLISECONDS);
break;
case EVENT_CLEAVE:
DoCastVictim(SPELL_CLEAVE);
events.ScheduleEvent(EVENT_CLEAVE, 7 * IN_MILLISECONDS);
break;
}
}
DoMeleeAttackIfReady();
}
};
};<|fim▁hole|>
void AddSC_boss_shadowvosh()
{
new boss_shadow_hunter_voshgajin();
}<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>"""Utilities functions which assist in the generation of commonly required data
structures from the products of placement, allocation and routing.
"""
from collections import defaultdict
from six import iteritems, itervalues
import warnings
from rig.place_and_route.machine import Machine, Cores, SDRAM, SRAM
from rig.place_and_route.constraints import ReserveResourceConstraint
from rig.machine_control.consts import AppState
def build_machine(system_info,
core_resource=Cores,
sdram_resource=SDRAM,
sram_resource=SRAM):
"""Build a :py:class:`~rig.place_and_route.Machine` object from a
:py:class:`~rig.machine_control.machine_controller.SystemInfo` object.
.. note::
Links are tested by sending a 'PEEK' command down the link which
checks to see if the remote device responds correctly. If the link
is dead, no response will be received and the link will be assumed
dead. Since peripherals do not generally respond to 'PEEK'
commands, working links attached to peripherals will also be marked
as dead.
.. note::
The returned object does not report how much memory is free, nor
how many cores are idle but rather the total number of working cores
and the size of the heap. See :py:func:`.build_resource_constraints`
for a function which can generate a set of
:py:class:`~rig.place_and_route.constraints` which prevent the use of
already in-use cores and memory.
.. note::
This method replaces the deprecated
:py:meth:`rig.machine_control.MachineController.get_machine` method.
Its functionality may be recreated using
:py:meth:`rig.machine_control.MachineController.get_system_info` along
with this function like so::
>> sys_info = mc.get_system_info()
>> machine = build_machine(sys_info)
Parameters
----------
system_info : :py:class:`rig.machine_control.machine_controller.SystemInfo`
The resource availability information for a SpiNNaker machine,
typically produced by
:py:meth:`rig.machine_control.MachineController.get_system_info`.
core_resource : resource (default: :py:class:`rig.place_and_route.Cores`)
The resource type to use to represent the number of working cores on a
chip, including the monitor, those already in use and all idle cores.
sdram_resource : resource (default: :py:class:`rig.place_and_route.SDRAM`)
The resource type to use to represent SDRAM on a chip. This resource
will be set to the number of bytes in the largest free block in the
SDRAM heap. This gives a conservative estimate of the amount of free
SDRAM on the chip which will be an underestimate in the presence of
memory fragmentation.
sram_resource : resource (default: :py:class:`rig.place_and_route.SRAM`)
The resource type to use to represent SRAM (a.k.a. system RAM) on a
chip. This resource will be set to the number of bytes in the largest
free block in the SRAM heap. This gives a conservative estimate of the
amount of free SRAM on the chip which will be an underestimate in the
presence of memory fragmentation.
Returns
-------
:py:class:`rig.place_and_route.Machine`
A :py:class:`~rig.place_and_route.Machine` object representing the
resources available within a SpiNNaker machine in the form used by the
place-and-route infrastructure.
"""
try:
max_cores = max(c.num_cores for c in itervalues(system_info))
except ValueError:
max_cores = 0
try:
max_sdram = max(c.largest_free_sdram_block
for c in itervalues(system_info))
except ValueError:
max_sdram = 0
try:
max_sram = max(c.largest_free_sram_block
for c in itervalues(system_info))
except ValueError:
max_sram = 0
return Machine(width=system_info.width,
height=system_info.height,
chip_resources={
core_resource: max_cores,
sdram_resource: max_sdram,
sram_resource: max_sram,
},
chip_resource_exceptions={
chip: {
core_resource: info.num_cores,
sdram_resource: info.largest_free_sdram_block,
sram_resource: info.largest_free_sram_block,
}
for chip, info in iteritems(system_info)
if (info.num_cores != max_cores or
info.largest_free_sdram_block != max_sdram or
info.largest_free_sram_block != max_sram)
},
dead_chips=set(system_info.dead_chips()),
dead_links=set(system_info.dead_links()))
def _get_minimal_core_reservations(core_resource, cores, chip=None):
"""Yield a minimal set of
:py:class:`~rig.place_and_route.constraints.ReserveResourceConstraint`
objects which reserve the specified set of cores.
Parameters
----------
core_resource : resource type
The type of resource representing cores.
cores : [int, ...]
The core numbers to reserve *in ascending order*.
chip : None or (x, y)
Which chip the constraints should be applied to or None for a global
constraint.
Yields
------
:py:class:`~rig.place_and_route.constraints.ReserveResourceConstraint`<|fim▁hole|> for core in cores:
if reservation is None:
reservation = slice(core, core + 1)
elif reservation.stop == core:
reservation = slice(reservation.start, core + 1)
else:
yield ReserveResourceConstraint(
core_resource, reservation, chip)
reservation = slice(core, core + 1)
if reservation is not None:
yield ReserveResourceConstraint(core_resource, reservation, chip)
def build_core_constraints(system_info, core_resource=Cores):
"""Return a set of place-and-route
:py:class:`~rig.place_and_route.constraints.ReserveResourceConstraint`
which reserve any cores that that are already in use.
The returned list of
:py:class:`~rig.place_and_route.constraints.ReserveResourceConstraint`\ s
reserves all cores not in an Idle state (i.e. not a monitor and not already
running an application).
.. note::
Historically, every application was required to add a
:py:class:~rig.place_and_route.constraints.ReserveResourceConstraint to
reserve the monitor processor on each chip. This method improves upon
this approach by automatically generating constraints which reserve not
just the monitor core but also any other cores which are already in
use.
Parameters
----------
system_info : :py:class:`rig.machine_control.machine_controller.SystemInfo`
The resource availability information for a SpiNNaker machine,
typically produced by
:py:meth:`rig.machine_control.MachineController.get_system_info`.
core_resource : resource (Default: :py:data:`~rig.place_and_route.Cores`)
The resource identifier used for cores.
Returns
-------
[:py:class:`rig.place_and_route.constraints.ReserveResourceConstraint`, \
...]
A set of place-and-route constraints which reserves all non-idle cores.
The resource type given in the ``core_resource`` argument will be
reserved accordingly.
"""
constraints = []
# Find the set of cores which are universally reserved
globally_reserved = None
for chip_info in itervalues(system_info):
reserved = sum(1 << c for c, state in enumerate(chip_info.core_states)
if state != AppState.idle)
if globally_reserved is None:
globally_reserved = reserved
else:
globally_reserved &= reserved
if globally_reserved is None:
globally_reserved = 0
constraints.extend(_get_minimal_core_reservations(
core_resource,
[core for core in range(18) if (1 << core) & globally_reserved]))
# Create chip-specific resource reservations for any special cases
for chip, chip_info in iteritems(system_info):
constraints.extend(_get_minimal_core_reservations(
core_resource,
[core for core, state in enumerate(chip_info.core_states)
if state != AppState.idle and
not globally_reserved & (1 << core)],
chip))
return constraints
def build_application_map(vertices_applications, placements, allocations,
core_resource=Cores):
"""Build a mapping from application to a list of cores where the
application is used.
This utility function assumes that each vertex is associated with a
specific application.
Parameters
----------
vertices_applications : {vertex: application, ...}
Applications are represented by the path of their APLX file.
placements : {vertex: (x, y), ...}
allocations : {vertex: {resource: slice, ...}, ...}
One of these resources should match the `core_resource` argument.
core_resource : object
The resource identifier which represents cores.
Returns
-------
{application: {(x, y) : set([c, ...]), ...}, ...}
For each application, for each used chip a set of core numbers onto
which the application should be loaded.
"""
application_map = defaultdict(lambda: defaultdict(set))
for vertex, application in iteritems(vertices_applications):
chip_cores = application_map[application][placements[vertex]]
core_slice = allocations[vertex].get(core_resource, slice(0, 0))
chip_cores.update(range(core_slice.start, core_slice.stop))
return application_map
def build_routing_tables(routes, net_keys, omit_default_routes=True):
"""**DEPRECATED** Convert a set of RoutingTrees into a per-chip set of
routing tables.
.. warning::
This method has been deprecated in favour of
:py:meth:`rig.routing_table.routing_tree_to_tables` and
:py:meth:`rig.routing_table.minimise`.
E.g. most applications should use something like::
from rig.routing_table import routing_tree_to_tables, minimise
tables = minimise(routing_tree_to_tables(routes, net_keys),
target_lengths)
Where target_length gives the number of available routing entries on
the chips in your SpiNNaker system (see
:py:func:~rig.routing_table.utils.build_routing_table_target_lengths)
This command produces routing tables with entries optionally omitted when
the route does not change direction (i.e. when default routing can be
used).
.. warning::
A :py:exc:`rig.routing_table.MultisourceRouteError` will
be raised if entries with identical keys and masks but with differing
routes are generated. This is not a perfect test, entries which would
otherwise collide are not spotted.
.. warning::
The routing trees provided are assumed to be correct and continuous
(not missing any hops). If this is not the case, the output is
undefined.
.. note::
If a routing tree has a terminating vertex whose route is set to None,
that vertex is ignored.
Parameters
----------
routes : {net: :py:class:`~rig.place_and_route.routing_tree.RoutingTree`, \
...}
The complete set of RoutingTrees representing all routes in the system.
(Note: this is the same datastructure produced by routers in the
`place_and_route` module.)
net_keys : {net: (key, mask), ...}
The key and mask associated with each net.
omit_default_routes : bool
Do not create routing entries for routes which do not change direction
(i.e. use default routing).
Returns
-------
{(x, y): [:py:class:`~rig.routing_table.RoutingTableEntry`, ...]
"""
from rig.routing_table import routing_tree_to_tables, remove_default_routes
warnings.warn(
"build_routing_tables() is deprecated, see "
"rig.routing_table.routing_tree_to_tables()"
"and rig.routing_table.minimise()", DeprecationWarning
)
# Build full routing tables and then remove default entries from them
tables = dict()
for chip, table in iteritems(routing_tree_to_tables(routes, net_keys)):
if omit_default_routes:
table = remove_default_routes.minimise(table, target_length=None)
# If the table is empty don't add it to the dictionary of tables.
if table:
tables[chip] = table
return tables<|fim▁end|> | """
reservation = None
# Cores is in ascending order |
<|file_name|>page_sets.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import py_utils
from telemetry import story as story_module
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class LeakDetectionSharedState(shared_page_state.SharedDesktopPageState):
def ShouldReuseBrowserForAllStoryRuns(self):
return True
class LeakDetectionPage(page_module.Page):
def __init__(self, url, page_set, name=''):
super(LeakDetectionPage, self).__init__(
url=url, page_set=page_set, name=name,
shared_page_state_class=LeakDetectionSharedState)
def RunNavigateSteps(self, action_runner):
tabs = action_runner.tab.browser.tabs
new_tab = tabs.New()
new_tab.action_runner.Navigate('about:blank')
new_tab.action_runner.PrepareForLeakDetection()
new_tab.action_runner.MeasureMemory()
new_tab.action_runner.Navigate(self.url)
self._WaitForPageLoadToComplete(new_tab.action_runner)
new_tab.action_runner.Navigate('about:blank')
new_tab.action_runner.PrepareForLeakDetection()
new_tab.action_runner.MeasureMemory()
new_tab.Close()
def _WaitForPageLoadToComplete(self, action_runner):
py_utils.WaitFor(action_runner.tab.HasReachedQuiescence, timeout=30)
# Some websites have a script that loads resources continuously, in which cases
# HasReachedQuiescence would not be reached. This class waits for document ready
# state to be complete to avoid timeout for those pages.
class ResourceLoadingLeakDetectionPage(LeakDetectionPage):
def _WaitForPageLoadToComplete(self, action_runner):
action_runner.tab.WaitForDocumentReadyStateToBeComplete()
class LeakDetectionStorySet(story_module.StorySet):
def __init__(self):
super(LeakDetectionStorySet, self).__init__(
archive_data_file='data/leak_detection.json',
cloud_storage_bucket=story_module.PARTNER_BUCKET)
urls_list = [
# Alexa top websites
'https://www.google.com',
'https://www.youtube.com',
'https://www.facebook.com',
'https://www.baidu.com',
'https://www.wikipedia.org',
'https://world.taobao.com/',
'https://www.tmall.com/',
'http://www.amazon.com',
'http://www.twitter.com',
'https://www.instagram.com/',
'http://www.jd.com/',
'https://vk.com/',
'https://outlook.live.com',
'https://www.reddit.com/',
'https://weibo.com/',
'https://www.sina.com.cn/',
'https://www.360.cn/',
'https://yandex.ru/',
'https://www.blogger.com/',
'https://www.netflix.com/',
'https://www.pornhub.com/',
'https://www.linkedin.com/',
'https://www.yahoo.co.jp/',
'https://www.csdn.net/',
'https://www.alipay.com/',
'https://www.twitch.tv/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.ebay.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.microsoft.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.xvideos.com/',
'https://mail.ru/',
'https://www.bing.com/',
'http://www.wikia.com/',
'https://www.office.com/',
'https://www.imdb.com/',
'https://www.aliexpress.com/',
'https://www.msn.com/',
'https://news.google.com/',
'https://www.theguardian.com/',
'https://www.indiatimes.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'http://www.foxnews.com/',
'https://weather.com/',
'https://www.shutterstock.com/',
'https://docs.google.com/',
'https://wordpress.com/',
# TODO(yuzus): This test crashes.
# 'https://www.apple.com/',
'https://play.google.com/store',
'https://www.dropbox.com/',
'https://soundcloud.com/',
'https://vimeo.com/',
'https://www.slideshare.net/',
'https://www.mediafire.com/',
'https://www.etsy.com/',
'https://www.ikea.com/',
'https://www.bestbuy.com/',
'https://www.homedepot.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.target.com/',
'https://www.booking.com/',
'https://www.tripadvisor.com/',
'https://9gag.com/',
'https://www.expedia.com/',
'https://www.roblox.com/',
'https://www.gamespot.com/',
'https://www.blizzard.com',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://ign.com/',
'https://www.yelp.com/',
# Times out waiting for HasReachedQuiescence - crbug.com/927427
# 'https://gizmodo.com/',
'https://www.gsmarena.com/',
'https://www.theverge.com/',
'https://www.nlm.nih.gov/',
'https://archive.org/',
'https://www.udemy.com/',
'https://answers.yahoo.com/',
# TODO(crbug.com/985552): Memory dump fails flakily.
# 'https://www.goodreads.com/',
'https://www.cricbuzz.com/',
'http://www.goal.com/',
'http://siteadvisor.com/',
'https://www.patreon.com/',
'https://www.jw.org/',
'http://europa.eu/',
'https://translate.google.com/',
'https://www.epicgames.com/',
'http://www.reverso.net/',
'https://play.na.leagueoflegends.com/',
'https://www.thesaurus.com/',
'https://www.weebly.com/',
'https://www.deviantart.com/',
'https://www.scribd.com/',
'https://www.hulu.com/',
'https://www.xfinity.com/',
# India Alexa top websites
'https://porn555.com/',
'https://www.onlinesbi.com/',
'https://www.flipkart.com/',
'https://www.hotstar.com/',
'https://www.incometaxindiaefiling.gov.in/',
'https://stackoverflow.com/',
# TODO(crbug.com/1005035) Memory dump fails flakily.
# 'https://www.irctc.co.in/nget/',
'https://www.hdfcbank.com/',
'https://www.whatsapp.com/',
'https://uidai.gov.in/',
'https://billdesk.com/',
'https://www.icicibank.com/',
# US Alexa top websites
'https://imgur.com/',
'https://www.craigslist.org/',
'https://www.chase.com/',
# TODO(892352): tumblr started timing out due to a catapult roll. See
# https://crbug.com/892352
# 'https://www.tumblr.com/',
'https://www.paypal.com/',
# TODO(yuzus): espn.com is flaky. https://crbug.com/959796
#'http://www.espn.com/',
'https://edition.cnn.com/',
'https://www.pinterest.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.nytimes.com/',
'https://github.com/',
'https://www.salesforce.com/',
# Japan Alexa top websites
'https://www.rakuten.co.jp/',
'http://www.nicovideo.jp/',
'https://fc2.com/',
'https://ameblo.jp/',
'http://kakaku.com/',
'https://www.goo.ne.jp/',
'https://www.pixiv.net/',
# websites which were found to be leaking in the past
'https://www.prezi.com',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'http://www.time.com',
'http://www.cheapoair.com',
'http://www.onlinedown.net',
'http://www.dailypost.ng',
'http://www.aljazeera.net',
'http://www.googleapps.com',
'http://www.airbnb.ch',
'http://www.livedoor.jp',
'http://www.blu-ray.com',
# TODO(953195): Test times out.
# 'http://www.block.io',
'http://www.hockeybuzz.com',
'http://www.silverpop.com',
'http://www.ansa.it',
'http://www.gulfair.com',
'http://www.nusatrip.com',
'http://www.samsung-fun.ru',
'http://www.opentable.com',
'http://www.magnetmail.net',
'http://zzz.com.ua',
'http://a-rakumo.appspot.com',
'http://www.sakurafile.com',
'http://www.psiexams.com',
'http://www.contentful.com',
'http://www.estibot.com',
'http://www.mbs.de',
'http://www.zhengjie.com',
'http://www.sjp.pl',
'http://www.mastodon.social',
'http://www.horairetrain.net',
'http://www.torrentzeu.to',
'http://www.inbank.it',
'http://www.gradpoint.com',
'http://www.mail.bg',
'http://www.aaannunci.it',
'http://www.leandomainsearch.com',
'http://www.wpjam.com',
'http://www.nigma.ru',
'http://www.do-search.com',
'http://www.omniboxes.com',
'http://whu.edu.cn',
'http://support.wordpress.com',
'http://www.webwebweb.com',
'http://www.sick.com',
'http://www.iowacconline.com',
'http://hdu.edu.cn',
'http://www.register.com',
'http://www.careesma.in',
'http://www.bestdic.ir',
'http://www.privacyassistant.net',
'http://www.sklavenzentrale.com',
'http://www.podbay.fm',
'http://www.coco.fr',
'http://www.skipaas.com',
'http://www.chatword.org',
'http://www.ezcardinfo.com',
'http://www.daydao.com',
'http://www.expediapartnercentral.com',
'http://www.22find.com',
'http://www.e-shop.gr',
'http://www.indeed.com',
'http://www.highwaybus.com',
'http://www.pingpang.info',
'http://www.besgold.com',
'http://www.arabam.com',
'http://makfax.com.mk',
'http://game.co.za',
'http://www.savaari.com',
'http://www.railsguides.jp',
]
resource_loading_urls_list = [
'https://www.hotels.com/',
'https://www.livejournal.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.yahoo.com',
'http://www.quora.com',
'https://www.macys.com',
'http://infomoney.com.br',
'http://www.listindiario.com',
'https://www.engadget.com/',
'https://www.sohu.com/',
'http://www.qq.com',
'http://www.benzworld.org',
'http://www.520mojing.com',
]<|fim▁hole|> for url in resource_loading_urls_list:
self.AddStory(ResourceLoadingLeakDetectionPage(url, self, url))<|fim▁end|> | for url in urls_list:
self.AddStory(LeakDetectionPage(url, self, url)) |
<|file_name|>showdesktop_da_DK.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="da_DK">
<context>
<name>ShowDesktop</name>
<message><|fim▁hole|> <translation type="unfinished"></translation>
</message>
<message>
<location filename="../showdesktop.cpp" line="58"/>
<source>Show Desktop: Global shortcut '%1' cannot be registered</source>
<translation>Vis Skrivebord: Global genvej '%1' kan ikke registreres</translation>
</message>
<message>
<location filename="../showdesktop.cpp" line="63"/>
<source>Show Desktop</source>
<translation>Vis Skrivebord</translation>
</message>
</context>
</TS><|fim▁end|> | <location filename="../showdesktop.cpp" line="48"/>
<source>Show desktop</source> |
<|file_name|>network_environment.py<|end_file_name|><|fim▁begin|># -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Network environment formatter."""<|fim▁hole|>
class NetworkEnvironmentFormatter(ObjectFormatter):
def format_raw(self, netenv, indent="", embedded=True,
indirect_attrs=True):
details = [indent + "{0:c}: {0.name}".format(netenv)]
details.append(self.redirect_raw(netenv.dns_environment, indent + " "))
if netenv.location:
details.append(self.redirect_raw(netenv.location, indent + " "))
if netenv.comments:
details.append(indent + " Comments: %s" % netenv.comments)
return "\n".join(details)
def fill_proto(self, netenv, skeleton, embedded=True, indirect_attrs=True):
skeleton.name = netenv.name
self.redirect_proto(netenv.dns_environment, skeleton.dns_environment)
if netenv.location is not None:
self.redirect_proto(netenv.location, skeleton.location)
ObjectFormatter.handlers[NetworkEnvironment] = NetworkEnvironmentFormatter()<|fim▁end|> |
from aquilon.aqdb.model import NetworkEnvironment
from aquilon.worker.formats.formatters import ObjectFormatter |
<|file_name|>plotter.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals, print_function
import logging
from collections import OrderedDict
import numpy as np
from monty.json import jsanitize
from pymatgen.phonon.bandstructure import PhononBandStructureSymmLine
from pymatgen.util.plotting import pretty_plot
from pymatgen.electronic_structure.plotter import plot_brillouin_zone
"""
This module implements plotter for DOS and band structure.
"""
logger = logging.getLogger(__name__)
class PhononDosPlotter(object):
"""
Class for plotting phonon DOSs. Note that the interface is extremely flexible
given that there are many different ways in which people want to view
DOS. The typical usage is::
# Initializes plotter with some optional args. Defaults are usually
# fine,
plotter = PhononDosPlotter()
# Adds a DOS with a label.
plotter.add_dos("Total DOS", dos)
# Alternatively, you can add a dict of DOSs. This is the typical
# form returned by CompletePhononDos.get_element_dos().
Args:
stack: Whether to plot the DOS as a stacked area graph
key_sort_func: function used to sort the dos_dict keys.
sigma: A float specifying a standard deviation for Gaussian smearing
the DOS for nicer looking plots. Defaults to None for no
smearing.
"""
def __init__(self, stack=False, sigma=None):
self.stack = stack
self.sigma = sigma
self._doses = OrderedDict()
def add_dos(self, label, dos):
"""
Adds a dos for plotting.
Args:
label:
label for the DOS. Must be unique.
dos:
PhononDos object
"""
densities = dos.get_smeared_densities(self.sigma) if self.sigma \
else dos.densities
self._doses[label] = {'frequencies': dos.frequencies, 'densities': densities}
def add_dos_dict(self, dos_dict, key_sort_func=None):
"""
Add a dictionary of doses, with an optional sorting function for the
keys.
Args:
dos_dict: dict of {label: Dos}
key_sort_func: function used to sort the dos_dict keys.
"""
if key_sort_func:
keys = sorted(dos_dict.keys(), key=key_sort_func)
else:
keys = dos_dict.keys()
for label in keys:
self.add_dos(label, dos_dict[label])
def get_dos_dict(self):
"""
Returns the added doses as a json-serializable dict. Note that if you
have specified smearing for the DOS plot, the densities returned will
be the smeared densities, not the original densities.
Returns:
Dict of dos data. Generally of the form, {label: {'frequencies':..,
'densities': ...}}
"""
return jsanitize(self._doses)
def get_plot(self, xlim=None, ylim=None):
"""
Get a matplotlib plot showing the DOS.
Args:
xlim: Specifies the x-axis limits. Set to None for automatic
determination.
ylim: Specifies the y-axis limits.
"""
import prettyplotlib as ppl
from prettyplotlib import brewer2mpl
ncolors = max(3, len(self._doses))
ncolors = min(9, ncolors)
colors = brewer2mpl.get_map('Set1', 'qualitative', ncolors).mpl_colors
y = None
alldensities = []
allfrequencies = []
plt = pretty_plot(12, 8)
# Note that this complicated processing of frequencies is to allow for
# stacked plots in matplotlib.
for key, dos in self._doses.items():
frequencies = dos['frequencies']
densities = dos['densities']
if y is None:
y = np.zeros(frequencies.shape)
if self.stack:
y += densities
newdens = y.copy()
else:
newdens = densities
allfrequencies.append(frequencies)
alldensities.append(newdens)
keys = list(self._doses.keys())
keys.reverse()
alldensities.reverse()
allfrequencies.reverse()
allpts = []
for i, (key, frequencies, densities) in enumerate(zip(keys, allfrequencies, alldensities)):
allpts.extend(list(zip(frequencies, densities)))
if self.stack:
plt.fill(frequencies, densities, color=colors[i % ncolors],
label=str(key))
else:
ppl.plot(frequencies, densities, color=colors[i % ncolors],
label=str(key), linewidth=3)
if xlim:
plt.xlim(xlim)
if ylim:
plt.ylim(ylim)
else:
xlim = plt.xlim()
relevanty = [p[1] for p in allpts
if xlim[0] < p[0] < xlim[1]]
plt.ylim((min(relevanty), max(relevanty)))
ylim = plt.ylim()
plt.plot([0, 0], ylim, 'k--', linewidth=2)
plt.xlabel('Frequencies (THz)')
plt.ylabel('Density of states')
plt.legend()
leg = plt.gca().get_legend()
ltext = leg.get_texts() # all the text.Text instance in the legend
plt.setp(ltext, fontsize=30)
plt.tight_layout()
return plt
def save_plot(self, filename, img_format="eps", xlim=None, ylim=None):
"""
Save matplotlib plot to a file.
Args:
filename: Filename to write to.
img_format: Image format to use. Defaults to EPS.
xlim: Specifies the x-axis limits. Set to None for automatic
determination.
ylim: Specifies the y-axis limits.
"""
plt = self.get_plot(xlim, ylim)
plt.savefig(filename, format=img_format)
def show(self, xlim=None, ylim=None):
"""
Show the plot using matplotlib.
Args:
xlim: Specifies the x-axis limits. Set to None for automatic
determination.
ylim: Specifies the y-axis limits.
"""
plt = self.get_plot(xlim, ylim)
plt.show()
class PhononBSPlotter(object):
"""
Class to plot or get data to facilitate the plot of band structure objects.
Args:
bs: A BandStructureSymmLine object.
"""
def __init__(self, bs):
if not isinstance(bs, PhononBandStructureSymmLine):
raise ValueError(
"PhononBSPlotter only works with PhononBandStructureSymmLine objects. "
"A PhononBandStructure object (on a uniform grid for instance and "
"not along symmetry lines won't work)")
self._bs = bs
self._nb_bands = self._bs.nb_bands
def _maketicks(self, plt):
"""
utility private method to add ticks to a band structure
"""
ticks = self.get_ticks()
# Sanitize only plot the uniq values
uniq_d = []
uniq_l = []
temp_ticks = list(zip(ticks['distance'], ticks['label']))
for i in range(len(temp_ticks)):
if i == 0:
uniq_d.append(temp_ticks[i][0])
uniq_l.append(temp_ticks[i][1])
logger.debug("Adding label {l} at {d}".format(
l=temp_ticks[i][0], d=temp_ticks[i][1]))
else:
if temp_ticks[i][1] == temp_ticks[i - 1][1]:
logger.debug("Skipping label {i}".format(
i=temp_ticks[i][1]))
else:
logger.debug("Adding label {l} at {d}".format(
l=temp_ticks[i][0], d=temp_ticks[i][1]))
uniq_d.append(temp_ticks[i][0])<|fim▁hole|> plt.gca().set_xticklabels(uniq_l)
for i in range(len(ticks['label'])):
if ticks['label'][i] is not None:
# don't print the same label twice
if i != 0:
if ticks['label'][i] == ticks['label'][i - 1]:
logger.debug("already print label... "
"skipping label {i}".format(
i=ticks['label'][i]))
else:
logger.debug("Adding a line at {d}"
" for label {l}".format(
d=ticks['distance'][i], l=ticks['label'][i]))
plt.axvline(ticks['distance'][i], color='k')
else:
logger.debug("Adding a line at {d} for label {l}".format(
d=ticks['distance'][i], l=ticks['label'][i]))
plt.axvline(ticks['distance'][i], color='k')
return plt
def bs_plot_data(self):
"""
Get the data nicely formatted for a plot
Returns:
A dict of the following format:
ticks: A dict with the 'distances' at which there is a qpoint (the
x axis) and the labels (None if no label)
frequencies: A list (one element for each branch) of frequencies for
each qpoint: [branch][qpoint][mode]. The data is
stored by branch to facilitate the plotting
lattice: The reciprocal lattice.
"""
distance = []
frequency = []
ticks = self.get_ticks()
for b in self._bs.branches:
frequency.append([])
distance.append([self._bs.distance[j]
for j in range(b['start_index'],
b['end_index'] + 1)])
for i in range(self._nb_bands):
frequency[-1].append(
[self._bs.bands[i][j]
for j in range(b['start_index'], b['end_index'] + 1)])
return {'ticks': ticks, 'distances': distance, 'frequency': frequency,
'lattice': self._bs.lattice_rec.as_dict()}
def get_plot(self, ylim=None):
"""
Get a matplotlib object for the bandstructure plot.
Args:
ylim: Specify the y-axis (frequency) limits; by default None let
the code choose.
"""
plt = pretty_plot(12, 8)
from matplotlib import rc
import scipy.interpolate as scint
try:
rc('text', usetex=True)
except:
# Fall back on non Tex if errored.
rc('text', usetex=False)
band_linewidth = 1
data = self.bs_plot_data()
for d in range(len(data['distances'])):
for i in range(self._nb_bands):
plt.plot(data['distances'][d],
[data['frequency'][d][i][j]
for j in range(len(data['distances'][d]))], 'b-',
linewidth=band_linewidth)
self._maketicks(plt)
# plot y=0 line
plt.axhline(0, linewidth=1, color='k')
# Main X and Y Labels
plt.xlabel(r'$\mathrm{Wave\ Vector}$', fontsize=30)
ylabel = r'$\mathrm{Frequency\ (THz)}$'
plt.ylabel(ylabel, fontsize=30)
# X range (K)
# last distance point
x_max = data['distances'][-1][-1]
plt.xlim(0, x_max)
if ylim is not None:
plt.ylim(ylim)
plt.tight_layout()
return plt
def show(self, ylim=None):
"""
Show the plot using matplotlib.
Args:
ylim: Specify the y-axis (frequency) limits; by default None let
the code choose.
"""
plt = self.get_plot(ylim)
plt.show()
def save_plot(self, filename, img_format="eps", ylim=None):
"""
Save matplotlib plot to a file.
Args:
filename: Filename to write to.
img_format: Image format to use. Defaults to EPS.
ylim: Specifies the y-axis limits.
"""
plt = self.get_plot(ylim=ylim)
plt.savefig(filename, format=img_format)
plt.close()
def get_ticks(self):
"""
Get all ticks and labels for a band structure plot.
Returns:
A dict with 'distance': a list of distance at which ticks should
be set and 'label': a list of label for each of those ticks.
"""
tick_distance = []
tick_labels = []
previous_label = self._bs.qpoints[0].label
previous_branch = self._bs.branches[0]['name']
for i, c in enumerate(self._bs.qpoints):
if c.label is not None:
tick_distance.append(self._bs.distance[i])
this_branch = None
for b in self._bs.branches:
if b['start_index'] <= i <= b['end_index']:
this_branch = b['name']
break
if c.label != previous_label \
and previous_branch != this_branch:
label1 = c.label
if label1.startswith("\\") or label1.find("_") != -1:
label1 = "$" + label1 + "$"
label0 = previous_label
if label0.startswith("\\") or label0.find("_") != -1:
label0 = "$" + label0 + "$"
tick_labels.pop()
tick_distance.pop()
tick_labels.append(label0 + "$\\mid$" + label1)
else:
if c.label.startswith("\\") or c.label.find("_") != -1:
tick_labels.append("$" + c.label + "$")
else:
tick_labels.append(c.label)
previous_label = c.label
previous_branch = this_branch
return {'distance': tick_distance, 'label': tick_labels}
def plot_compare(self, other_plotter):
"""
plot two band structure for comparison. One is in red the other in blue.
The two band structures need to be defined on the same symmetry lines!
and the distance between symmetry lines is
the one of the band structure used to build the PhononBSPlotter
Args:
another PhononBSPlotter object defined along the same symmetry lines
Returns:
a matplotlib object with both band structures
"""
data_orig = self.bs_plot_data()
data = other_plotter.bs_plot_data()
if len(data_orig['distances']) != len(data['distances']):
raise ValueError('The two objects are not compatible.')
plt = self.get_plot()
band_linewidth = 1
for i in range(other_plotter._nb_bands):
for d in range(len(data_orig['distances'])):
plt.plot(data_orig['distances'][d],
[e[i] for e in data['frequency']][d],
'r-', linewidth=band_linewidth)
return plt
def plot_brillouin(self):
"""
plot the Brillouin zone
"""
# get labels and lines
labels = {}
for q in self._bs.qpoints:
if q.label:
labels[q.label] = q.frac_coords
lines = []
for b in self._bs.branches:
lines.append([self._bs.qpoints[b['start_index']].frac_coords,
self._bs.qpoints[b['end_index']].frac_coords])
plot_brillouin_zone(self._bs.lattice_rec, lines=lines, labels=labels)<|fim▁end|> | uniq_l.append(temp_ticks[i][1])
logger.debug("Unique labels are %s" % list(zip(uniq_d, uniq_l)))
plt.gca().set_xticks(uniq_d) |
<|file_name|>OperatorStats.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import java.util.Optional;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.units.DataSize.succinctBytes;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
@Immutable
public class OperatorStats
{
private final int operatorId;
private final PlanNodeId planNodeId;
private final String operatorType;
private final long totalDrivers;
private final long addInputCalls;
private final Duration addInputWall;
private final Duration addInputCpu;
private final Duration addInputUser;
private final DataSize inputDataSize;
private final long inputPositions;
private final double sumSquaredInputPositions;
private final long getOutputCalls;
private final Duration getOutputWall;
private final Duration getOutputCpu;
private final Duration getOutputUser;
private final DataSize outputDataSize;
private final long outputPositions;
private final Duration blockedWall;
private final long finishCalls;
private final Duration finishWall;
private final Duration finishCpu;
private final Duration finishUser;
private final DataSize memoryReservation;
private final DataSize systemMemoryReservation;
private final Optional<BlockedReason> blockedReason;
private final OperatorInfo info;
@JsonCreator
public OperatorStats(
@JsonProperty("operatorId") int operatorId,
@JsonProperty("planNodeId") PlanNodeId planNodeId,
@JsonProperty("operatorType") String operatorType,
@JsonProperty("totalDrivers") long totalDrivers,
@JsonProperty("addInputCalls") long addInputCalls,
@JsonProperty("addInputWall") Duration addInputWall,
@JsonProperty("addInputCpu") Duration addInputCpu,
@JsonProperty("addInputUser") Duration addInputUser,
@JsonProperty("inputDataSize") DataSize inputDataSize,
@JsonProperty("inputPositions") long inputPositions,
@JsonProperty("sumSquaredInputPositions") double sumSquaredInputPositions,
@JsonProperty("getOutputCalls") long getOutputCalls,
@JsonProperty("getOutputWall") Duration getOutputWall,
@JsonProperty("getOutputCpu") Duration getOutputCpu,
@JsonProperty("getOutputUser") Duration getOutputUser,
@JsonProperty("outputDataSize") DataSize outputDataSize,
@JsonProperty("outputPositions") long outputPositions,
@JsonProperty("blockedWall") Duration blockedWall,
@JsonProperty("finishCalls") long finishCalls,
@JsonProperty("finishWall") Duration finishWall,
@JsonProperty("finishCpu") Duration finishCpu,
@JsonProperty("finishUser") Duration finishUser,
@JsonProperty("memoryReservation") DataSize memoryReservation,
@JsonProperty("systemMemoryReservation") DataSize systemMemoryReservation,
@JsonProperty("blockedReason") Optional<BlockedReason> blockedReason,
@JsonProperty("info") OperatorInfo info)
{
checkArgument(operatorId >= 0, "operatorId is negative");
this.operatorId = operatorId;
this.planNodeId = requireNonNull(planNodeId, "planNodeId is null");
this.operatorType = requireNonNull(operatorType, "operatorType is null");
this.totalDrivers = totalDrivers;
this.addInputCalls = addInputCalls;
this.addInputWall = requireNonNull(addInputWall, "addInputWall is null");
this.addInputCpu = requireNonNull(addInputCpu, "addInputCpu is null");
this.addInputUser = requireNonNull(addInputUser, "addInputUser is null");
this.inputDataSize = requireNonNull(inputDataSize, "inputDataSize is null");
checkArgument(inputPositions >= 0, "inputPositions is negative");
this.inputPositions = inputPositions;
this.sumSquaredInputPositions = sumSquaredInputPositions;
this.getOutputCalls = getOutputCalls;
this.getOutputWall = requireNonNull(getOutputWall, "getOutputWall is null");
this.getOutputCpu = requireNonNull(getOutputCpu, "getOutputCpu is null");
this.getOutputUser = requireNonNull(getOutputUser, "getOutputUser is null");
this.outputDataSize = requireNonNull(outputDataSize, "outputDataSize is null");
checkArgument(outputPositions >= 0, "outputPositions is negative");
this.outputPositions = outputPositions;
this.blockedWall = requireNonNull(blockedWall, "blockedWall is null");
this.finishCalls = finishCalls;
this.finishWall = requireNonNull(finishWall, "finishWall is null");
this.finishCpu = requireNonNull(finishCpu, "finishCpu is null");
this.finishUser = requireNonNull(finishUser, "finishUser is null");
this.memoryReservation = requireNonNull(memoryReservation, "memoryReservation is null");
this.systemMemoryReservation = requireNonNull(systemMemoryReservation, "systemMemoryReservation is null");
this.blockedReason = blockedReason;
this.info = info;
}
@JsonProperty
public int getOperatorId()
{
return operatorId;
}
@JsonProperty
public PlanNodeId getPlanNodeId()
{
return planNodeId;
}
@JsonProperty
public String getOperatorType()
{
return operatorType;
}
@JsonProperty
public long getTotalDrivers()
{
return totalDrivers;
}
@JsonProperty
public long getAddInputCalls()
{
return addInputCalls;
}
@JsonProperty
public Duration getAddInputWall()
{
return addInputWall;
}
@JsonProperty
public Duration getAddInputCpu()
{
return addInputCpu;
}
@JsonProperty
public Duration getAddInputUser()
{
return addInputUser;
}
@JsonProperty
public DataSize getInputDataSize()
{
return inputDataSize;
}
@JsonProperty
public long getInputPositions()
{
return inputPositions;
}
@JsonProperty
public double getSumSquaredInputPositions()
{
return sumSquaredInputPositions;
}
@JsonProperty
public long getGetOutputCalls()
{
return getOutputCalls;
}
@JsonProperty
public Duration getGetOutputWall()
{
return getOutputWall;
}
@JsonProperty
public Duration getGetOutputCpu()
{
return getOutputCpu;
}
@JsonProperty
public Duration getGetOutputUser()
{
return getOutputUser;
}
@JsonProperty
public DataSize getOutputDataSize()
{
return outputDataSize;
}
@JsonProperty
public long getOutputPositions()
{
return outputPositions;
}
@JsonProperty
public Duration getBlockedWall()
{
return blockedWall;
}
@JsonProperty
public long getFinishCalls()
{
return finishCalls;
}
@JsonProperty
public Duration getFinishWall()
{
return finishWall;
}
@JsonProperty
public Duration getFinishCpu()
{
return finishCpu;
}
@JsonProperty
public Duration getFinishUser()
{
return finishUser;
}
@JsonProperty
public DataSize getMemoryReservation()
{
return memoryReservation;
}
@JsonProperty
public DataSize getSystemMemoryReservation()
{
return systemMemoryReservation;
}
@JsonProperty
public Optional<BlockedReason> getBlockedReason()
{
return blockedReason;
}
@Nullable
@JsonProperty
public OperatorInfo getInfo()
{
return info;
}
public OperatorStats add(OperatorStats... operators)
{
return add(ImmutableList.copyOf(operators));
}
public OperatorStats add(Iterable<OperatorStats> operators)
{
long totalDrivers = this.totalDrivers;
long addInputCalls = this.addInputCalls;
long addInputWall = this.addInputWall.roundTo(NANOSECONDS);
long addInputCpu = this.addInputCpu.roundTo(NANOSECONDS);
long addInputUser = this.addInputUser.roundTo(NANOSECONDS);
long inputDataSize = this.inputDataSize.toBytes();
long inputPositions = this.inputPositions;
double sumSquaredInputPositions = this.sumSquaredInputPositions;
long getOutputCalls = this.getOutputCalls;
long getOutputWall = this.getOutputWall.roundTo(NANOSECONDS);
long getOutputCpu = this.getOutputCpu.roundTo(NANOSECONDS);
long getOutputUser = this.getOutputUser.roundTo(NANOSECONDS);
long outputDataSize = this.outputDataSize.toBytes();
long outputPositions = this.outputPositions;
long blockedWall = this.blockedWall.roundTo(NANOSECONDS);
long finishCalls = this.finishCalls;
long finishWall = this.finishWall.roundTo(NANOSECONDS);
long finishCpu = this.finishCpu.roundTo(NANOSECONDS);
long finishUser = this.finishUser.roundTo(NANOSECONDS);
long memoryReservation = this.memoryReservation.toBytes();
long systemMemoryReservation = this.systemMemoryReservation.toBytes();
Optional<BlockedReason> blockedReason = this.blockedReason;
Mergeable<OperatorInfo> base = getMergeableInfoOrNull(info);
for (OperatorStats operator : operators) {
checkArgument(operator.getOperatorId() == operatorId, "Expected operatorId to be %s but was %s", operatorId, operator.getOperatorId());
totalDrivers += operator.totalDrivers;
addInputCalls += operator.getAddInputCalls();
addInputWall += operator.getAddInputWall().roundTo(NANOSECONDS);
addInputCpu += operator.getAddInputCpu().roundTo(NANOSECONDS);
addInputUser += operator.getAddInputUser().roundTo(NANOSECONDS);
inputDataSize += operator.getInputDataSize().toBytes();
inputPositions += operator.getInputPositions();
sumSquaredInputPositions += operator.getSumSquaredInputPositions();
getOutputCalls += operator.getGetOutputCalls();
getOutputWall += operator.getGetOutputWall().roundTo(NANOSECONDS);
getOutputCpu += operator.getGetOutputCpu().roundTo(NANOSECONDS);
getOutputUser += operator.getGetOutputUser().roundTo(NANOSECONDS);
outputDataSize += operator.getOutputDataSize().toBytes();
outputPositions += operator.getOutputPositions();
finishCalls += operator.getFinishCalls();
finishWall += operator.getFinishWall().roundTo(NANOSECONDS);
finishCpu += operator.getFinishCpu().roundTo(NANOSECONDS);
finishUser += operator.getFinishUser().roundTo(NANOSECONDS);
blockedWall += operator.getBlockedWall().roundTo(NANOSECONDS);
memoryReservation += operator.getMemoryReservation().toBytes();
systemMemoryReservation += operator.getSystemMemoryReservation().toBytes();
if (operator.getBlockedReason().isPresent()) {
blockedReason = operator.getBlockedReason();
}
OperatorInfo info = operator.getInfo();
if (base != null && info != null && base.getClass() == info.getClass()) {
base = mergeInfo(base, info);
}
}
return new OperatorStats(
operatorId,
planNodeId,
operatorType,
totalDrivers,
addInputCalls,
new Duration(addInputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),<|fim▁hole|> new Duration(addInputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(inputDataSize),
inputPositions,
sumSquaredInputPositions,
getOutputCalls,
new Duration(getOutputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(getOutputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(getOutputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(outputDataSize),
outputPositions,
new Duration(blockedWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
finishCalls,
new Duration(finishWall, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(finishCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(finishUser, NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(memoryReservation),
succinctBytes(systemMemoryReservation),
blockedReason,
(OperatorInfo) base);
}
@SuppressWarnings("unchecked")
private static Mergeable<OperatorInfo> getMergeableInfoOrNull(OperatorInfo info)
{
Mergeable<OperatorInfo> base = null;
if (info instanceof Mergeable) {
base = (Mergeable<OperatorInfo>) info;
}
return base;
}
@SuppressWarnings("unchecked")
private static <T> Mergeable<T> mergeInfo(Mergeable<T> base, T other)
{
return (Mergeable<T>) base.mergeWith(other);
}
public OperatorStats summarize()
{
return new OperatorStats(
operatorId,
planNodeId,
operatorType,
totalDrivers,
addInputCalls,
addInputWall,
addInputCpu,
addInputUser,
inputDataSize,
inputPositions,
sumSquaredInputPositions,
getOutputCalls,
getOutputWall,
getOutputCpu,
getOutputUser,
outputDataSize,
outputPositions,
blockedWall,
finishCalls,
finishWall,
finishCpu,
finishUser,
memoryReservation,
systemMemoryReservation,
blockedReason,
(info != null && info.isFinal()) ? info : null);
}
}<|fim▁end|> | new Duration(addInputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(), |
<|file_name|>AgentProfileManager.py<|end_file_name|><|fim▁begin|>import json
import datetime
from django.core.files.base import ContentFile
from django.core.exceptions import ValidationError
from django.utils.timezone import utc
from ..models import AgentProfile
from ..exceptions import IDNotFoundError, ParamError
from ..utils import etag
class AgentProfileManager():
def __init__(self, agent):
self.Agent = agent
def save_non_json_profile(self, p, profile, request_dict):
p.content_type = request_dict['headers']['CONTENT_TYPE']
p.etag = etag.create_tag(profile.read())
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
# Go to beginning of file
profile.seek(0)
fn = "%s_%s" % (p.agent_id, request_dict.get('filename', p.id))
p.profile.save(fn, profile)
p.save()
def post_profile(self, request_dict):
# get/create profile
p, created = AgentProfile.objects.get_or_create(
profile_id=request_dict['params']['profileId'], agent=self.Agent)
post_profile = request_dict['profile']
# If incoming profile is application/json and if a profile didn't
# already exist with the same agent and profileId
if created:
p.json_profile = post_profile
p.content_type = "application/json"
p.etag = etag.create_tag(post_profile)
# If incoming profile is application/json and if a profile already
# existed with the same agent and profileId
else:
orig_prof = json.loads(p.json_profile)
post_profile = json.loads(post_profile)
merged = json.dumps(
dict(list(orig_prof.items()) + list(post_profile.items())))
p.json_profile = merged
p.etag = etag.create_tag(merged)
# Set updated
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
p.save()
def put_profile(self, request_dict):
# get/create profile
p, created = AgentProfile.objects.get_or_create(
profile_id=request_dict['params']['profileId'], agent=self.Agent)
# Profile being PUT is not json
if "application/json" not in request_dict['headers']['CONTENT_TYPE']:
try:
profile = ContentFile(request_dict['profile'].read())
except:
try:
profile = ContentFile(request_dict['profile'])
except:
profile = ContentFile(str(request_dict['profile']))
etag.check_preconditions(request_dict, p, created)
# If it already exists delete it
if p.profile:
try:
p.profile.delete()
except OSError:
# probably was json before
p.json_profile = {}
self.save_non_json_profile(p, profile, request_dict)
# Profile being PUT is json
else:
# (overwrite existing profile data)
etag.check_preconditions(request_dict, p, created)
the_profile = request_dict['profile']
p.json_profile = the_profile
p.content_type = request_dict['headers']['CONTENT_TYPE']
p.etag = etag.create_tag(the_profile)
# Set updated
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
p.save()
def get_profile(self, profile_id):
try:
return self.Agent.agentprofile_set.get(profile_id=profile_id)
except:
err_msg = 'There is no agent profile associated with the id: %s' % profile_id
raise IDNotFoundError(err_msg)
def get_profile_ids(self, since=None):
ids = []
if since:
try:
# this expects iso6801 date/time format
# "2013-02-15T12:00:00+00:00"
profs = self.Agent.agentprofile_set.filter(updated__gt=since)
except ValidationError:
err_msg = 'Since field is not in correct format for retrieval of agent profiles'
raise ParamError(err_msg)
ids = [p.profile_id for p in profs]
else:
ids = self.Agent.agentprofile_set.values_list(
'profile_id', flat=True)
return ids
def delete_profile(self, profile_id):
try:
self.get_profile(profile_id).delete()
# we don't want it anyway
except AgentProfile.DoesNotExist:
pass<|fim▁hole|><|fim▁end|> | except IDNotFoundError:
pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.