prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>ContactPhone.java<|end_file_name|><|fim▁begin|>package ru.stqa.pft.addressbook.tests;
<|fim▁hole|>import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import ru.stqa.pft.addressbook.tests.TestBase;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by mocius on 2017-04-16.
*/
public class ContactPhone extends TestBase {
@Test
public void testContactPhones(){
app.goTo().homePage();
ContactData contact = app.contactHelper().all().iterator().next();
ContactData contactInfoFromEditForm = app.contactHelper().infoFromEditForm(contact);
assertThat(contact.getAllPhones(), equalTo(mergePhones(contactInfoFromEditForm)));
}
private <T> String mergePhones(ContactData contact) {
return Arrays.asList(contact.getHomePhone(), contact.getMobilePhone(),contact.getWorkPhone()).stream().
filter((s) -> ! s.equals("")).map(ContactPhone::cleaned)
.collect(Collectors.joining("\n"));
}
public static String cleaned(String phone){
return phone.replaceAll("\\s", "").replaceAll("[-()]", "");
}
}<|fim▁end|> | import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert; |
<|file_name|>NaN.js<|end_file_name|><|fim▁begin|>var test = require('tape');
var equal = require('../');
test('NaN and 0 values', function (t) {
t.ok(equal(NaN, NaN));
t.notOk(equal(0, NaN));
t.ok(equal(0, 0));
t.notOk(equal(0, 1));
t.end();
});
<|fim▁hole|> t.ok(equal([ NaN, 1, NaN ], [ NaN, 1, NaN ]));
t.end();
});<|fim▁end|> | test('nested NaN values', function (t) {
|
<|file_name|>GradebookExternalAssessmentService.java<|end_file_name|><|fim▁begin|>/**********************************************************************************
*
* $Id$
*
***********************************************************************************
*
* Copyright (c) 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.service.gradebook.shared;
import java.util.Date;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* This service is designed for use by external assessment engines. These use
* the Gradebook as a passive mirror of their own assignments and scores,
* letting Gradebook users see those assignments alongside Gradebook-managed
* assignments, and combine them when calculating a course grade. The Gradebook
* application itself will not modify externally-managed assignments and scores.
*
* <b>WARNING</b>: Because the Gradebook project team is not responsible for
* defining the external clients' requirements, the Gradebook service does not
* attempt to guess at their authorization needs. Our administrative and
* external-assessment methods simply follow orders and assume that the caller
* has taken the responsibility of "doing the right thing." DO NOT wrap these
* methods in an open web service!
*/
public interface GradebookExternalAssessmentService {
/**
* @deprecated Replaced by
* {@link addExternalAssessment(String, String, String, String, Double, Date, String, Boolean)}
*/
public void addExternalAssessment(String gradebookUid, String externalId, String externalUrl,
String title, double points, Date dueDate, String externalServiceDescription)
throws GradebookNotFoundException, ConflictingAssignmentNameException,
ConflictingExternalIdException, AssignmentHasIllegalPointsException;
/**
* Add an externally-managed assessment to a gradebook to be treated as a
* read-only assignment. The gradebook application will not modify the
* assessment properties or create any scores for the assessment.
* Since each assignment in a given gradebook must have a unique name,
* conflicts are possible.
*
* @param gradebookUid
* @param externalId
* some unique identifier which Samigo uses for the assessment.
* The externalId is globally namespaced within the gradebook, so
* if other apps decide to put assessments into the gradebook,
* they should prefix their externalIds with a well known (and
* unique within sakai) string.
* @param externalUrl
* a link to go to if the instructor or student wants to look at the assessment
* in Samigo; if null, no direct link will be provided in the
* gradebook, and the user will have to navigate to the assessment
* within the other application
* @param title
* @param points
* this is the total amount of points available and must be greater than zero.
* it could be null if it's an ungraded item.
* @param dueDate
* @param externalServiceDescription
* @param ungraded
*
* @param externalServiceDescription
* what to display as the source of the assignment (e.g., "from Samigo")
*
*/
public void addExternalAssessment(String gradebookUid, String externalId, String externalUrl,
String title, Double points, Date dueDate, String externalServiceDescription, Boolean ungraded)
throws GradebookNotFoundException, ConflictingAssignmentNameException,
ConflictingExternalIdException, AssignmentHasIllegalPointsException;
/**
* This method is identical to {@link #addExternalAssessment(String, String, String, String, Double, Date, String, Boolean)} but
* allows you to also specify the associated Category for this assignment. If the gradebook is set up for categories and
* categoryId is null, assignment category will be unassigned
* @param gradebookUid
* @param externalId
* @param externalUrl
* @param title
* @param points
* @param dueDate
* @param externalServiceDescription
* @param ungraded
* @param categoryId
* @throws GradebookNotFoundException
* @throws ConflictingAssignmentNameException
* @throws ConflictingExternalIdException
* @throws AssignmentHasIllegalPointsException
* @throws InvalidCategoryException
*/
public void addExternalAssessment(String gradebookUid, String externalId, String externalUrl,
String title, Double points, Date dueDate, String externalServiceDescription, Boolean ungraded, Long categoryId)
throws GradebookNotFoundException, ConflictingAssignmentNameException,
ConflictingExternalIdException, AssignmentHasIllegalPointsException, InvalidCategoryException;
/**
* @deprecated Replaced by
* {@link updateExternalAssessment(String, String, String, String, Double, Date, Boolean)}
*/
public void updateExternalAssessment(String gradebookUid, String externalId, String externalUrl,
String title, double points, Date dueDate)
throws GradebookNotFoundException, AssessmentNotFoundException,
ConflictingAssignmentNameException, AssignmentHasIllegalPointsException;
/**
* Update an external assessment
* @param gradebookUid
* @param externalId
* @param externalUrl
* @param title
* @param points
* @param dueDate
* @param ungraded
* @throws GradebookNotFoundException
* @throws AssessmentNotFoundException
* @throws ConflictingAssignmentNameException
* @throws AssignmentHasIllegalPointsException
*/
public void updateExternalAssessment(String gradebookUid, String externalId, String externalUrl,
String title, Double points, Date dueDate, Boolean ungraded)
throws GradebookNotFoundException, AssessmentNotFoundException,
ConflictingAssignmentNameException, AssignmentHasIllegalPointsException;
/**
* Remove the assessment reference from the gradebook. Although Samigo
* doesn't currently delete assessments, an instructor can retract an
* assessment to keep it from students. Since such an assessment would
* presumably no longer be used to calculate final grades, Samigo should
* also remove that assessment from the gradebook.
*
* @param externalId
* the UID of the assessment
*/
public void removeExternalAssessment(String gradebookUid, String externalId)
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
* Updates an external score for an external assignment in the gradebook.
*
* @param gradebookUid
* The Uid of the gradebook
* @param externalId
* The external ID of the assignment/assessment
* @param studentUid
* The unique id of the student
* @param points
* The number of points earned on this assessment, or null if a score
* should be removed
*/
public void updateExternalAssessmentScore(String gradebookUid, String externalId,
String studentUid, String points)
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
*
* @param gradebookUid
* @param externalId
* @param studentUidsToScores
* @throws GradebookNotFoundException
* @throws AssessmentNotFoundException
*
* @deprecated Replaced by
* {@link updateExternalAssessmentScoresString(String, String, Map<String, String)}
*/
public void updateExternalAssessmentScores(String gradebookUid,
String externalId, Map<String, Double> studentUidsToScores)
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
* Updates a set of external scores for an external assignment in the gradebook.
*
* @param gradebookUid
* The Uid of the gradebook
* @param externalId
* The external ID of the assignment/assessment
* @param studentUidsToScores
* A map whose String keys are the unique ID strings of the students and whose
* String values are points earned on this assessment or null if the score
* should be removed.
*/
public void updateExternalAssessmentScoresString(String gradebookUid,
String externalId, Map<String, String> studentUidsToScores)
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
* Updates an external comment for an external assignment in the gradebook.
*
* @param gradebookUid
* The Uid of the gradebook
* @param externalId
* The external ID of the assignment/assessment
* @param studentUid
* The unique id of the student
* @param comment
* The comment to be added to this grade, or null if a comment
* should be removed
*/
public void updateExternalAssessmentComment(String gradebookUid,
String externalId, String studentUid, String comment )
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
* Updates a set of external comments for an external assignment in the gradebook.
*
* @param gradebookUid
* The Uid of the gradebook
* @param externalId
* The external ID of the assignment/assessment
* @param studentUidsToScores
* A map whose String keys are the unique ID strings of the students and whose
* String values are comments or null if the comments
* should be removed.
*/
public void updateExternalAssessmentComments(String gradebookUid,
String externalId, Map<String, String> studentUidsToComments)
throws GradebookNotFoundException, AssessmentNotFoundException;
/**
* Check to see if an assignment with the given name already exists
* in the given gradebook. This will give external assessment systems
* a chance to avoid the ConflictingAssignmentNameException.
*/
public boolean isAssignmentDefined(String gradebookUid, String assignmentTitle)
throws GradebookNotFoundException;
/**
* Check to see if an assignment with the given external id already exists
* in the given gradebook. This will give external assessment systems
* a chance to avoid the ConflictingExternalIdException.
*
* @param gradebookUid The gradebook's unique identifier
* @param externalId The external assessment's external identifier
*/
public boolean isExternalAssignmentDefined(String gradebookUid, String externalId)
throws GradebookNotFoundException;
/**
* Check with the appropriate external service if a specific assignment is
* available only to groups.
*
* @param gradebookUid The gradebook's unique identifier
* @param externalId The external assessment's external identifier
*/
public boolean isExternalAssignmentGrouped(String gradebookUid, String externalId)
throws GradebookNotFoundException;
/**
* Check with the appropriate external service if a specific assignment is
* available to a specific user (i.e., the user is in an appropriate group).
* Note that this method will return true if the assignment exists in the
* gradebook and is marked as externally maintained while no provider
* recognizes it; this is to maintain a safer default (no change from the
* 2.8 release) for tools that have not implemented a provider.
*
* @param gradebookUid The gradebook's unique identifier
* @param externalId The external assessment's external identifier
* @param userId The user ID to check
*/
public boolean isExternalAssignmentVisible(String gradebookUid, String externalId, String userId)
throws GradebookNotFoundException;<|fim▁hole|> * maintained and are visible to the current user. Assignments may be included
* with a null providerAppKey, indicating that the gradebook references the
* assignment, but no provider claims responsibility for it.
*
* @param gradebookUid The gradebook's unique identifier
* @return A map from the externalId of each activity to the providerAppKey
*/
public Map<String, String> getExternalAssignmentsForCurrentUser(String gradebookUid)
throws GradebookNotFoundException;
/**
* Retrieve a list of all visible, external assignments for a set of users.
*
* @param gradebookUid The gradebook's unique identifier
* @param studentIds The collection of student IDs for which to retrieve assignments
* @return A map from the student ID to all visible, external activity IDs
*/
public Map<String, List<String>> getVisibleExternalAssignments(String gradebookUid, Collection<String> studentIds)
throws GradebookNotFoundException;
/**
* Register a new ExternalAssignmentProvider for handling the integration of external
* assessment sources with the sakai gradebook
* Registering more than once will overwrite the current with the new one
*
* @param provider the provider implementation object
*/
public void registerExternalAssignmentProvider(ExternalAssignmentProvider provider);
/**
* Remove/unregister any ExternalAssignmentProvider which is currently registered,
* does nothing if they provider does not exist
*
* @param providerAppKey the unique app key for a provider
*/
public void unregisterExternalAssignmentProvider(String providerAppKey);
/**
* Checks to see whether a gradebook with the given uid exists.
*
* @param gradebookUid
* The gradebook UID to check
* @return Whether the gradebook exists
*/
public boolean isGradebookDefined(String gradebookUid);
/**
* Break the connection between an external assessment engine and an assessment which
* it created, giving it up to the Gradebook application to control from now on.
*
* @param gradebookUid
* @param externalId
*/
public void setExternalAssessmentToGradebookAssignment(String gradebookUid, String externalId);
/**
* Get the category of a gradebook with the externalId given
*
* @param gradebookUId
* @param externalId
* @return
*/
public Long getExternalAssessmentCategoryId(String gradebookUId, String externalId);
}<|fim▁end|> |
/**
* Retrieve all assignments for a gradebook that are marked as externally |
<|file_name|>TransformerStepPlugin.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Mirth Corporation. All rights reserved.
*
* http://www.mirthcorp.com
*
* The software in this package is published under the terms of the MPL license a copy of which has
* been included with this distribution in the LICENSE.txt file.
*/
package com.mirth.connect.plugins;
import com.mirth.connect.client.ui.editors.transformer.TransformerPane;
public abstract class TransformerStepPlugin extends MirthEditorPanePlugin {
<|fim▁hole|> public abstract void initialize(TransformerPane pane);
}<|fim▁end|> | public TransformerStepPlugin(String name) {
super(name);
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
__all__ = [
'get_filepath',
]
def get_filepath(name='sherpa_wz.hepmc'):
return resource_filename('deepjets', os.path.join('testdata', name))<|fim▁end|> | import os
from pkg_resources import resource_filename
|
<|file_name|>video_mode.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
//
// Copyright (c) 2014 Jeremy Letang
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Mode to create a window
/// Mode to create a window
#[deriving(Clone, Show, PartialEq, PartialOrd, Eq, Ord, Hash)]<|fim▁hole|> pub height: i32
}
impl VideoMode {
pub fn new() -> VideoMode {
VideoMode {
width: 640i32,
height: 480i32
}
}
}<|fim▁end|> | pub struct VideoMode {
pub width: i32, |
<|file_name|>model.py<|end_file_name|><|fim▁begin|>from keras.models import Sequential, model_from_json
from keras.layers import Dense, Dropout, Activation, Flatten, Convolution2D, MaxPooling2D, Lambda, ELU
from keras.layers.normalization import BatchNormalization
from keras.optimizers import Adam
import cv2
import csv
import numpy as np
import os
from random import random
from sklearn.model_selection import train_test_split
DATA_PATH = './data/t1/'
def trans_image(image,steer,trans_range):<|fim▁hole|> # Translate image
# Ref: https://chatbotslife.com/using-augmentation-to-mimic-human-driving-496b569760a9#.s1pwczi3q
#
rows, cols, _ = image.shape
tr_x = trans_range*np.random.uniform()-trans_range/2
steer_ang = steer + tr_x/trans_range*2*.2
tr_y = 40*np.random.uniform()-40/2
Trans_M = np.float32([[1,0,tr_x],[0,1,tr_y]])
image_tr = cv2.warpAffine(image,Trans_M,(cols,rows))
return image_tr, steer_ang
def gen_data(X, y, batch_size=128, validation=False):
#
# Generate data for fit_generator
#
gen_start = 0
while True:
features = []
labels = []
if gen_start >= len(y):
gen_start = 0
ending = min(gen_start+batch_size, len(y))
for idx, row in enumerate(y[gen_start:ending]):
center_img = cv2.imread(DATA_PATH + X[gen_start+idx][0].strip())
center_img = cv2.cvtColor(center_img, cv2.COLOR_BGR2HSV)
center_label = float(row[0])
# Augmentation 1: Jitter image
center_img, center_label = trans_image(center_img, center_label, 100)
# Augmentation 2: Occasionally flip straight
if random() > 0.5 and abs(center_label) > 0.1:
center_img = cv2.flip(center_img, 1)
labels.append(-center_label)
else:
labels.append(center_label)
# Augmentation 3: Random brightness
random_bright = .25 + np.random.uniform()
center_img[:,:,2] = center_img[:,:,2]*random_bright
features.append(center_img)
if not validation:
# Augmentation 4: +0.25 to Left Image
left_img = cv2.imread(DATA_PATH + X[gen_start+idx][1].strip())
features.append(left_img)
labels.append(float(row[0]) + 0.15)
# Augmentation 5: -0.25 to Right Image
right_img = cv2.imread(DATA_PATH + X[gen_start+idx][2].strip())
features.append(right_img)
labels.append(float(row[0]) - 0.15)
gen_start += batch_size
features = np.array(features)
labels = np.array(labels)
yield features, labels
def nvidia_model(row=66, col=200, ch=3, dropout=0.3, lr=0.0001):
#
# NVIDIA CNN model
# Ref: https://arxiv.org/abs/1604.07316
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(48, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Convolution2D(64, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def nvidialite_model(row=33, col=100, ch=3, dropout=0.3, lr=0.0001):
#
# Modified of NVIDIA CNN Model (Dysfunctional)
#
input_shape = (row, col, ch)
model = Sequential()
model.add(BatchNormalization(axis=1, input_shape=input_shape))
model.add(Convolution2D(24, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(36, 5, 5, border_mode='valid',
subsample=(2, 2), activation='elu'))
model.add(Convolution2D(48, 3, 3, border_mode='valid',
subsample=(1, 1), activation='elu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(50))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(10))
model.add(Activation('elu'))
model.add(Dropout(dropout))
model.add(Dense(1))
model.add(Activation('elu'))
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
print(model.summary())
return model
def load_data(filter=True):
#
# Load and split data
# CSV: center,left,right,steering,throttle,brake,speed
#
total = 0
with open(DATA_PATH + 'driving_log.csv', 'r') as f:
reader = csv.reader(f)
data = [row for row in reader]
data = np.array(data)
X = data[:,[0,1,2]]
y = data[:,[3]]
print('Total samples:', total)
print('Total samples (after filter):', len(X))
return train_test_split(X, y, test_size=0.2, random_state=42)
def load_model(lr=0.001):
#
# Load the existing model and weight
#
with open('model.json', 'r') as jfile:
model = model_from_json(jfile.read())
model.compile(optimizer=Adam(lr=lr), loss='mse', metrics=['accuracy'])
model.load_weights('model.h5')
return model
def main():
# Load data
X_train, X_val, y_train, y_val = load_data()
print('X_train shape:', X_train.shape)
print('X_val shape:', X_val.shape)
# Build model
if 'model.json' in os.listdir():
model = load_model()
else:
model = nvidia_model()
model.fit_generator(gen_data(X_train, y_train),
samples_per_epoch=len(X_train)*3, nb_epoch=8,
validation_data=gen_data(X_val, y_val, validation=True),
nb_val_samples=len(X_val))
# Save model
json = model.to_json()
model.save_weights('model.h5')
with open('model.json', 'w') as f:
f.write(json)
if __name__ == "__main__": main()<|fim▁end|> | # |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import requests
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.simplejson import dumps, loads
from common.models import Singleton
from lock_manager import Lock, LockError
from .literals import FORM_SUBMIT_URL, FORM_KEY, FORM_RECEIVER_FIELD, TIMEOUT
from .exceptions import AlreadyRegistered
class RegistrationSingleton(Singleton):
_cached_name = None
_registered = None
registered = models.BooleanField(default=False, verbose_name=_('registered'))
registration_data = models.TextField(verbose_name=_(u'registration data'), blank=True)
@classmethod
def registration_state(cls):
if cls._registered:
return cls._registered
else:
instance = cls.objects.get()<|fim▁hole|> if instance.is_registered:
cls._registered = instance.is_registered
return instance.is_registered
@classmethod
def registered_name(cls):
if cls._cached_name:
return cls._cached_name
else:
instance = cls.objects.get()
try:
dictionary = loads(instance.registration_data)
except ValueError:
dictionary = {}
name_value = dictionary.get('company') or dictionary.get('name')
if name_value:
cls._cached_name = name_value
return name_value or _(u'No name')
@property
def is_registered(self):
return self.registered
def register(self, form):
from installation.models import Installation
if self.is_registered:
raise AlreadyRegistered
installation = Installation.objects.get()
dictionary = {}
dictionary.update(form.cleaned_data)
dictionary.update({
'uuid': installation.uuid
})
self.registration_data = dumps(dictionary)
self.save()
self.submit()
def submit(self):
try:
lock = Lock.acquire_lock('upload_registration')
except LockError:
pass
else:
try:
requests.post(FORM_SUBMIT_URL, data={'formkey': FORM_KEY, FORM_RECEIVER_FIELD: self.registration_data}, timeout=TIMEOUT)
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError):
pass
else:
self.registered = True
self.save()
finally:
lock.release()
class Meta:
verbose_name = verbose_name_plural = _(u'registration properties')<|fim▁end|> | |
<|file_name|>backuptool.py<|end_file_name|><|fim▁begin|>import tarfile
import time
import os
import json
class BackupTool(object):
"""Simple backup utility."""
def __init__(self):
pass
@staticmethod
def backup(openbazaar_installation_path,
backup_folder_path,
on_success_callback=None,
on_error_callback=None):
"""
Creates an 'openbazaar-YYYY-MM-DD-hh-mm-ss.tar.gz' file
inside the html/backups/ folder.
@param openbazaar_installation_path: str
The path to OpenBazaar's installation folder,
where the db/ folder lives.
@param backup_folder_path: str
The folder where the backup file will reside.
Optional callback functions can be passed:
@param on_success_callback(backupFilePath: str)
@param on_error_callback(errorMessage: str)
"""<|fim▁hole|> output_file_path = os.path.join(
backup_folder_path,
"openbazaar-%s.tar.gz" % date_time
)
# Create the folder for the backup, if it doesn't exist.
try:
os.makedirs(backup_folder_path)
except os.error:
pass
db_folder = os.path.join(openbazaar_installation_path, "db")
try:
with tarfile.open(output_file_path, "w:gz") as tar:
tar.add(db_folder, arcname=os.path.basename(db_folder))
except tarfile.TarError as exc:
# TODO: Install proper error logging.
print "Error while backing up to:", output_file_path
if on_error_callback is not None:
on_error_callback(exc)
return
if on_success_callback is not None:
on_success_callback(output_file_path)
@staticmethod
def restore(backup_tar_filepath):
raise NotImplementedError
@staticmethod
def get_installation_path():
"""Return the Project Root path."""
file_abs_path = os.path.abspath(__file__)
real_file_abs_path = os.path.realpath(file_abs_path)
return real_file_abs_path[:real_file_abs_path.find('/node')]
@classmethod
def get_backup_path(cls):
"""Return the backup path."""
# TODO: Make backup path configurable on server settings.
return os.path.join(
cls.get_installation_path(), 'html', 'backups'
)
class Backup(json.JSONEncoder):
"""
A (meant to be immutable) POPO to represent a backup.
So that we can tell our Web client about the backups available.
"""
def __init__(self,
file_name=None,
full_file_path=None,
created_timestamp_millis=None,
size_in_bytes=None):
super(Backup, self).__init__()
self.file_name = file_name
self.full_file_path = full_file_path
self.created_timestamp_millis = created_timestamp_millis
self.size_in_bytes = size_in_bytes
def to_dict(self):
"""Return a dictionary with attributes of self."""
return {
"file_name": self.file_name,
"full_file_path": self.full_file_path,
"created_timestamp_millis": self.created_timestamp_millis,
"size_in_bytes": self.size_in_bytes
}
def __repr__(self):
return repr(self.to_dict())
@classmethod
def get_backups(cls, backup_folder_path=None):
"""
Return a list of Backup objects found in the backup folder path given.
"""
if backup_folder_path is None or not os.path.isdir(backup_folder_path):
return []
result_gen = (
cls.get_backup(os.path.join(backup_folder_path, x))
for x in os.listdir(backup_folder_path)
)
result = [backup for backup in result_gen if backup is not None]
result.reverse()
return result
@classmethod
def get_backup(cls, backup_file_path):
"""
Create and return a Backup object from a backup path.
Return None if the path was invalid.
"""
try:
file_stat = os.stat(backup_file_path)
file_name = os.path.basename(backup_file_path)
except os.error:
print "Invalid backup path:", backup_file_path
return None
created_timestamp_millis = file_stat.st_ctime
size_in_bytes = file_stat.st_size
return cls(
file_name=file_name,
full_file_path=backup_file_path,
created_timestamp_millis=created_timestamp_millis,
size_in_bytes=size_in_bytes
)
class BackupJSONEncoder(json.JSONEncoder):
# pylint: disable=method-hidden
def default(self, o):
if isinstance(o, Backup):
return o.to_dict()<|fim▁end|> |
date_time = time.strftime('%Y-%h-%d-%H-%M-%S') |
<|file_name|>PokemonAdapter.java<|end_file_name|><|fim▁begin|>package org.foi.androidworkshop.adapters;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import org.foi.androidworkshop.R;
import org.foi.androidworkshop.models.Pokemon;
import java.util.List;
public class PokemonAdapter extends RecyclerView.Adapter<PokemonAdapter.ViewHolder> {
private List<Pokemon> pokemons;
public PokemonAdapter(List<Pokemon> pokemons) {<|fim▁hole|> @Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.pokemon_item, parent, false));
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.ivPokemonThumbnail.setImageResource(R.mipmap.ic_launcher);
holder.tvPokeName.setText(pokemons.get(position).getName());
holder.tvPokeUrl.setText(pokemons.get(position).getUrl());
}
@Override
public int getItemCount() {
return pokemons.size();
}
public class ViewHolder extends RecyclerView.ViewHolder {
private ImageView ivPokemonThumbnail;
private TextView tvPokeName;
private TextView tvPokeUrl;
public ViewHolder(View itemView) {
super(itemView);
ivPokemonThumbnail = (ImageView) itemView.findViewById(R.id.ivPokeThumbnail);
tvPokeName = (TextView) itemView.findViewById(R.id.tvPokeName);
tvPokeUrl = (TextView) itemView.findViewById(R.id.tvPokeUrl);
}
}
}<|fim▁end|> | this.pokemons = pokemons;
}
|
<|file_name|>OutsideGroupDependentConfigsTest.java<|end_file_name|><|fim▁begin|>/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.coordinator.test;
import org.jboss.pnc.common.json.ConfigurationParseException;
import org.jboss.pnc.mock.repository.BuildConfigurationRepositoryMock;
import org.jboss.pnc.model.BuildConfiguration;
import org.jboss.pnc.model.BuildConfigurationSet;
import org.jboss.pnc.enums.RebuildMode;
import org.jboss.pnc.spi.datastore.DatastoreException;
import org.jboss.pnc.spi.exception.CoreException;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeoutException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
/**
* Group consists of configA,config B and configC. <br/>
* configC is independent, configB depends on configA. <br/>
*
*
* config1 is an "outside" dependency of configA
*
* <p>
* Author: Michal Szynkiewicz, [email protected] Date: 9/14/16 Time: 12:09 PM
* </p>
*/
public class OutsideGroupDependentConfigsTest extends AbstractDependentBuildTest {
private BuildConfiguration config1;
private BuildConfiguration configA;
private BuildConfiguration configB;
private BuildConfigurationSet configSet;
@Before
public void initialize() throws DatastoreException, ConfigurationParseException {
config1 = buildConfig("1");
configA = buildConfig("A", config1);
configB = buildConfig("B", configA);
BuildConfiguration configC = buildConfig("C");
configSet = configSet(configA, configB, configC);
buildConfigurationRepository = spy(new BuildConfigurationRepositoryMock());
when(buildConfigurationRepository.queryWithPredicates(any()))
.thenReturn(new ArrayList<>(configSet.getBuildConfigurations()));
<|fim▁hole|> configSet.getBuildConfigurations().forEach(this::saveConfig);
insertNewBuildRecords(config1, configA, configB, configC);
makeResult(configA).dependOn(config1);
}
@Test
public void shouldNotRebuildIfDependencyIsNotRebuilt()
throws CoreException, TimeoutException, InterruptedException {
build(configSet, RebuildMode.IMPLICIT_DEPENDENCY_CHECK);
waitForEmptyBuildQueue();
List<BuildConfiguration> configsWithTasks = getBuiltConfigs();
assertThat(configsWithTasks).isEmpty();
}
@Test
public void shouldRebuildOnlyDependent() throws CoreException, TimeoutException, InterruptedException {
insertNewBuildRecords(config1);
build(configSet, RebuildMode.IMPLICIT_DEPENDENCY_CHECK);
waitForEmptyBuildQueue();
List<BuildConfiguration> configsWithTasks = getBuiltConfigs();
assertThat(configsWithTasks).hasSameElementsAs(Arrays.asList(configA, configB));
}
}<|fim▁end|> | super.initialize();
saveConfig(config1); |
<|file_name|>StreamHandler.java<|end_file_name|><|fim▁begin|>/* StreamHandler.java --
A class for publishing log messages to instances of java.io.OutputStream
Copyright (C) 2002 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.util.logging;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
/**
* A <code>StreamHandler</code> publishes <code>LogRecords</code> to
* a instances of <code>java.io.OutputStream</code>.
*
* @author Sascha Brawer ([email protected])
*/
public class StreamHandler
extends Handler
{
private OutputStream out;
private Writer writer;
/**
* Indicates the current state of this StreamHandler. The value
* should be one of STATE_FRESH, STATE_PUBLISHED, or STATE_CLOSED.
*/
private int streamState = STATE_FRESH;
/**
* streamState having this value indicates that the StreamHandler
* has been created, but the publish(LogRecord) method has not been
* called yet. If the StreamHandler has been constructed without an
* OutputStream, writer will be null, otherwise it is set to a
* freshly created OutputStreamWriter.
*/
private static final int STATE_FRESH = 0;
/**
* streamState having this value indicates that the publish(LocRecord)
* method has been called at least once.
*/
private static final int STATE_PUBLISHED = 1;
/**
* streamState having this value indicates that the close() method
* has been called.
*/
private static final int STATE_CLOSED = 2;
/**
* Creates a <code>StreamHandler</code> without an output stream.
* Subclasses can later use {@link
* #setOutputStream(java.io.OutputStream)} to associate an output
* stream with this StreamHandler.
*/
public StreamHandler()
{
this(null, null);
}
/**
* Creates a <code>StreamHandler</code> that formats log messages
* with the specified Formatter and publishes them to the specified
* output stream.
*
* @param out the output stream to which the formatted log messages
* are published.
*
* @param formatter the <code>Formatter</code> that will be used
* to format log messages.
*/
public StreamHandler(OutputStream out, Formatter formatter)
{
this(out, "java.util.logging.StreamHandler", Level.INFO,
formatter, SimpleFormatter.class);
}
StreamHandler(
OutputStream out,
String propertyPrefix,
Level defaultLevel,
Formatter formatter, Class defaultFormatterClass)
{
this.level = LogManager.getLevelProperty(propertyPrefix + ".level",
defaultLevel);
this.filter = (Filter) LogManager.getInstanceProperty(
propertyPrefix + ".filter",
/* must be instance of */ Filter.class,
/* default: new instance of */ null);
if (formatter != null)
this.formatter = formatter;
else
this.formatter = (Formatter) LogManager.getInstanceProperty(
propertyPrefix + ".formatter",
/* must be instance of */ Formatter.class,
/* default: new instance of */ defaultFormatterClass);
try
{
String enc = LogManager.getLogManager().getProperty(propertyPrefix
+ ".encoding");
/* make sure enc actually is a valid encoding */
if ((enc != null) && (enc.length() > 0))
new String(new byte[0], enc);
this.encoding = enc;
}
catch (Exception _)
{
}
if (out != null)
{
try
{
changeWriter(out, getEncoding());
}
catch (UnsupportedEncodingException uex)
{
/* This should never happen, since the validity of the encoding
* name has been checked above.
*/
throw new RuntimeException(uex.getMessage());
}
}
}
private void checkOpen()
{
if (streamState == STATE_CLOSED)
throw new IllegalStateException(this.toString() + " has been closed");
}
private void checkFresh()
{
checkOpen();
if (streamState != STATE_FRESH)
throw new IllegalStateException("some log records have been published to " + this);
}
private void changeWriter(OutputStream out, String encoding)
throws UnsupportedEncodingException
{
OutputStreamWriter writer;
/* The logging API says that a null encoding means the default
* platform encoding. However, java.io.OutputStreamWriter needs
* another constructor for the default platform encoding,
* passing null would throw an exception.
*/
if (encoding == null)
writer = new OutputStreamWriter(out);
else
writer = new OutputStreamWriter(out, encoding);
/* Closing the stream has side effects -- do this only after
* creating a new writer has been successful.
*/
if ((streamState != STATE_FRESH) || (this.writer != null))
close();
this.writer = writer;
this.out = out;
this.encoding = encoding;
streamState = STATE_FRESH;
}
/**
* Sets the character encoding which this handler uses for publishing
* log records. The encoding of a <code>StreamHandler</code> must be
* set before any log records have been published.
*
* @param encoding the name of a character encoding, or <code>null</code>
* for the default encoding.
*
* @throws SecurityException if a security manager exists and
* the caller is not granted the permission to control the
* the logging infrastructure.
*
* @exception IllegalStateException if any log records have been
* published to this <code>StreamHandler</code> before. Please
* be aware that this is a pecularity of the GNU implementation.
* While the API specification indicates that it is an error
* if the encoding is set after records have been published,
* it does not mandate any specific behavior for that case.
*/
public void setEncoding(String encoding)
throws SecurityException, UnsupportedEncodingException
{
/* The inherited implementation first checks whether the invoking
* code indeed has the permission to control the logging infra-
* structure, and throws a SecurityException if this was not the
* case.
*
* Next, it verifies that the encoding is supported and throws
* an UnsupportedEncodingExcpetion otherwise. Finally, it remembers
* the name of the encoding.
*/
super.setEncoding(encoding);
checkFresh();
/* If out is null, setEncoding is being called before an output
* stream has been set. In that case, we need to check that the
* encoding is valid, and remember it if this is the case. Since
* this is exactly what the inherited implementation of
* Handler.setEncoding does, we can delegate.
*/
if (out != null)
{
/* The logging API says that a null encoding means the default
* platform encoding. However, java.io.OutputStreamWriter needs
* another constructor for the default platform encoding, passing
* null would throw an exception.
*/
if (encoding == null)
writer = new OutputStreamWriter(out);
else
writer = new OutputStreamWriter(out, encoding);
}
}
/**
* Changes the output stream to which this handler publishes
* logging records.
*
* @throws SecurityException if a security manager exists and
* the caller is not granted the permission to control
* the logging infrastructure.
*
* @throws NullPointerException if <code>out</code>
* is <code>null</code>.
*/
protected void setOutputStream(OutputStream out)
throws SecurityException
{
LogManager.getLogManager().checkAccess();
/* Throw a NullPointerException if out is null. */
out.getClass();
try
{
changeWriter(out, getEncoding());
}
catch (UnsupportedEncodingException ex)
{
/* This seems quite unlikely to happen, unless the underlying
* implementation of java.io.OutputStreamWriter changes its
* mind (at runtime) about the set of supported character
* encodings.
*/
throw new RuntimeException(ex.getMessage());
}
}
/**
* Publishes a <code>LogRecord</code> to the associated output
* stream, provided the record passes all tests for being loggable.
* The <code>StreamHandler</code> will localize the message of the
* log record and substitute any message parameters.
*
* <p>Most applications do not need to call this method directly.
* Instead, they will use use a {@link Logger}, which will create
* LogRecords and distribute them to registered handlers.<|fim▁hole|> *
* <p>If a log record is being published to a
* <code>StreamHandler</code> that has been closed earlier, the Sun
* J2SE 1.4 reference can be observed to silently ignore the
* call. The GNU implementation, however, intentionally behaves
* differently by informing the <code>ErrorManager</code> associated
* with this <code>StreamHandler</code>. Since the condition
* indicates a programming error, the programmer should be
* informed. It also seems extremely unlikely that any application
* would depend on the exact behavior in this rather obscure,
* erroneous case -- especially since the API specification does not
* prescribe what is supposed to happen.
*
* @param record the log event to be published.
*/
public void publish(LogRecord record)
{
String formattedMessage;
if (!isLoggable(record))
return;
if (streamState == STATE_FRESH)
{
try
{
writer.write(formatter.getHead(this));
}
catch (java.io.IOException ex)
{
reportError(null, ex, ErrorManager.WRITE_FAILURE);
return;
}
catch (Exception ex)
{
reportError(null, ex, ErrorManager.GENERIC_FAILURE);
return;
}
streamState = STATE_PUBLISHED;
}
try
{
formattedMessage = formatter.format(record);
}
catch (Exception ex)
{
reportError(null, ex, ErrorManager.FORMAT_FAILURE);
return;
}
try
{
writer.write(formattedMessage);
}
catch (Exception ex)
{
reportError(null, ex, ErrorManager.WRITE_FAILURE);
}
}
/**
* Checks whether or not a <code>LogRecord</code> would be logged
* if it was passed to this <code>StreamHandler</code> for publication.
*
* <p>The <code>StreamHandler</code> implementation first checks
* whether a writer is present and the handler's level is greater
* than or equal to the severity level threshold. In a second step,
* if a {@link Filter} has been installed, its {@link
* Filter#isLoggable(LogRecord) isLoggable} method is
* invoked. Subclasses of <code>StreamHandler</code> can override
* this method to impose their own constraints.
*
* @param record the <code>LogRecord</code> to be checked.
*
* @return <code>true</code> if <code>record</code> would
* be published by {@link #publish(LogRecord) publish},
* <code>false</code> if it would be discarded.
*
* @see #setLevel(Level)
* @see #setFilter(Filter)
* @see Filter#isLoggable(LogRecord)
*
* @throws NullPointerException if <code>record</code> is
* <code>null</code>. */
public boolean isLoggable(LogRecord record)
{
return (writer != null) && super.isLoggable(record);
}
/**
* Forces any data that may have been buffered to the underlying
* output device.
*
* <p>In case of an I/O failure, the <code>ErrorManager</code>
* of this <code>Handler</code> will be informed, but the caller
* of this method will not receive an exception.
*
* <p>If a <code>StreamHandler</code> that has been closed earlier
* is closed a second time, the Sun J2SE 1.4 reference can be
* observed to silently ignore the call. The GNU implementation,
* however, intentionally behaves differently by informing the
* <code>ErrorManager</code> associated with this
* <code>StreamHandler</code>. Since the condition indicates a
* programming error, the programmer should be informed. It also
* seems extremely unlikely that any application would depend on the
* exact behavior in this rather obscure, erroneous case --
* especially since the API specification does not prescribe what is
* supposed to happen.
*/
public void flush()
{
try
{
checkOpen();
if (writer != null)
writer.flush();
}
catch (Exception ex)
{
reportError(null, ex, ErrorManager.FLUSH_FAILURE);
}
}
/**
* Closes this <code>StreamHandler</code> after having forced any
* data that may have been buffered to the underlying output
* device.
*
* <p>As soon as <code>close</code> has been called,
* a <code>Handler</code> should not be used anymore. Attempts
* to publish log records, to flush buffers, or to modify the
* <code>Handler</code> in any other way may throw runtime
* exceptions after calling <code>close</code>.</p>
*
* <p>In case of an I/O failure, the <code>ErrorManager</code>
* of this <code>Handler</code> will be informed, but the caller
* of this method will not receive an exception.</p>
*
* <p>If a <code>StreamHandler</code> that has been closed earlier
* is closed a second time, the Sun J2SE 1.4 reference can be
* observed to silently ignore the call. The GNU implementation,
* however, intentionally behaves differently by informing the
* <code>ErrorManager</code> associated with this
* <code>StreamHandler</code>. Since the condition indicates a
* programming error, the programmer should be informed. It also
* seems extremely unlikely that any application would depend on the
* exact behavior in this rather obscure, erroneous case --
* especially since the API specification does not prescribe what is
* supposed to happen.
*
* @throws SecurityException if a security manager exists and
* the caller is not granted the permission to control
* the logging infrastructure.
*/
public void close()
throws SecurityException
{
LogManager.getLogManager().checkAccess();
try
{
/* Although flush also calls checkOpen, it catches
* any exceptions and reports them to the ErrorManager
* as flush failures. However, we want to report
* a closed stream as a close failure, not as a
* flush failure here. Therefore, we call checkOpen()
* before flush().
*/
checkOpen();
flush();
if (writer != null)
{
if (formatter != null)
{
/* Even if the StreamHandler has never published a record,
* it emits head and tail upon closing. An earlier version
* of the GNU Classpath implementation did not emitted
* anything. However, this had caused XML log files to be
* entirely empty instead of containing no log records.
*/
if (streamState == STATE_FRESH)
writer.write(formatter.getHead(this));
if (streamState != STATE_CLOSED)
writer.write(formatter.getTail(this));
}
streamState = STATE_CLOSED;
writer.close();
}
}
catch (Exception ex)
{
reportError(null, ex, ErrorManager.CLOSE_FAILURE);
}
}
}<|fim▁end|> | *
* <p>In case of an I/O failure, the <code>ErrorManager</code>
* of this <code>Handler</code> will be informed, but the caller
* of this method will not receive an exception. |
<|file_name|>offscreencanvasrenderingcontext2d.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasFillRule;
use crate::dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasImageSource;
use crate::dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasLineCap;
use crate::dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasLineJoin;
use crate::dom::bindings::codegen::Bindings::OffscreenCanvasRenderingContext2DBinding;
use crate::dom::bindings::codegen::Bindings::OffscreenCanvasRenderingContext2DBinding::OffscreenCanvasRenderingContext2DMethods;
use crate::dom::bindings::codegen::UnionTypes::StringOrCanvasGradientOrCanvasPattern;
use crate::dom::bindings::error::ErrorResult;
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::canvasgradient::CanvasGradient;
use crate::dom::canvaspattern::CanvasPattern;
use crate::dom::canvasrenderingcontext2d::CanvasState;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlcanvaselement::HTMLCanvasElement;
use crate::dom::imagedata::ImageData;
use crate::dom::offscreencanvas::OffscreenCanvas;
use dom_struct::dom_struct;
use euclid::default::Size2D;
#[dom_struct]
pub struct OffscreenCanvasRenderingContext2D {
reflector_: Reflector,
canvas: Option<Dom<OffscreenCanvas>>,
canvas_state: DomRefCell<CanvasState>,
htmlcanvas: Option<Dom<HTMLCanvasElement>>,
}
impl OffscreenCanvasRenderingContext2D {
fn new_inherited(
global: &GlobalScope,
canvas: Option<&OffscreenCanvas>,
size: Size2D<u64>,
htmlcanvas: Option<&HTMLCanvasElement>,
) -> OffscreenCanvasRenderingContext2D {
OffscreenCanvasRenderingContext2D {
reflector_: Reflector::new(),
canvas: canvas.map(Dom::from_ref),
htmlcanvas: htmlcanvas.map(Dom::from_ref),
canvas_state: DomRefCell::new(CanvasState::new(
global,
Size2D::new(size.width as u64, size.height as u64),
)),
}
}
pub fn new(
global: &GlobalScope,
canvas: &OffscreenCanvas,
size: Size2D<u64>,
htmlcanvas: Option<&HTMLCanvasElement>,
) -> DomRoot<OffscreenCanvasRenderingContext2D> {
let boxed = Box::new(OffscreenCanvasRenderingContext2D::new_inherited(
global,
Some(canvas),
size,
htmlcanvas,
));
reflect_dom_object(
boxed,
global,
OffscreenCanvasRenderingContext2DBinding::Wrap,
)
}
}
impl OffscreenCanvasRenderingContext2DMethods for OffscreenCanvasRenderingContext2D {
// https://html.spec.whatwg.org/multipage/offscreencontext2d-canvas
fn Canvas(&self) -> DomRoot<OffscreenCanvas> {
DomRoot::from_ref(self.canvas.as_ref().expect("No canvas."))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-fillrect
fn FillRect(&self, x: f64, y: f64, width: f64, height: f64) {
self.canvas_state.borrow().FillRect(x, y, width, height);
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-clearrect
fn ClearRect(&self, x: f64, y: f64, width: f64, height: f64) {
self.canvas_state.borrow().ClearRect(x, y, width, height);
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokerect
fn StrokeRect(&self, x: f64, y: f64, width: f64, height: f64) {
self.canvas_state.borrow().StrokeRect(x, y, width, height);
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsetx
fn ShadowOffsetX(&self) -> f64 {
self.canvas_state.borrow().ShadowOffsetX()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsetx
fn SetShadowOffsetX(&self, value: f64) {
self.canvas_state.borrow().SetShadowOffsetX(value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsety
fn ShadowOffsetY(&self) -> f64 {
self.canvas_state.borrow().ShadowOffsetY()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsety
fn SetShadowOffsetY(&self, value: f64) {
self.canvas_state.borrow().SetShadowOffsetY(value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowblur
fn ShadowBlur(&self) -> f64 {
self.canvas_state.borrow().ShadowBlur()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowblur
fn SetShadowBlur(&self, value: f64) {
self.canvas_state.borrow().SetShadowBlur(value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowcolor
fn ShadowColor(&self) -> DOMString {
self.canvas_state.borrow().ShadowColor()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowcolor
fn SetShadowColor(&self, value: DOMString) {
self.canvas_state.borrow().SetShadowColor(value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn StrokeStyle(&self) -> StringOrCanvasGradientOrCanvasPattern {
self.canvas_state.borrow().StrokeStyle()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn SetStrokeStyle(&self, value: StringOrCanvasGradientOrCanvasPattern) {
self.canvas_state
.borrow()
.SetStrokeStyle(self.htmlcanvas.as_ref().map(|c| &**c), value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn FillStyle(&self) -> StringOrCanvasGradientOrCanvasPattern {
self.canvas_state.borrow().FillStyle()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn SetFillStyle(&self, value: StringOrCanvasGradientOrCanvasPattern) {
self.canvas_state
.borrow()
.SetFillStyle(self.htmlcanvas.as_ref().map(|c| &**c), value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createlineargradient
fn CreateLinearGradient(
&self,
x0: Finite<f64>,
y0: Finite<f64>,
x1: Finite<f64>,
y1: Finite<f64>,
) -> DomRoot<CanvasGradient> {
self.canvas_state
.borrow()
.CreateLinearGradient(&self.global(), x0, y0, x1, y1)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createradialgradient
fn CreateRadialGradient(
&self,
x0: Finite<f64>,
y0: Finite<f64>,
r0: Finite<f64>,
x1: Finite<f64>,
y1: Finite<f64>,
r1: Finite<f64>,
) -> Fallible<DomRoot<CanvasGradient>> {
self.canvas_state
.borrow()
.CreateRadialGradient(&self.global(), x0, y0, r0, x1, y1, r1)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createpattern
fn CreatePattern(
&self,
image: CanvasImageSource,
repetition: DOMString,
) -> Fallible<DomRoot<CanvasPattern>> {
self.canvas_state
.borrow()
.CreatePattern(&self.global(), image, repetition)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-save
fn Save(&self) {
self.canvas_state.borrow().Save()
}
#[allow(unrooted_must_root)]
// https://html.spec.whatwg.org/multipage/#dom-context-2d-restore
fn Restore(&self) {
self.canvas_state.borrow().Restore()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalalpha
fn GlobalAlpha(&self) -> f64 {
self.canvas_state.borrow().GlobalAlpha()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalalpha
fn SetGlobalAlpha(&self, alpha: f64) {
self.canvas_state.borrow().SetGlobalAlpha(alpha)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalcompositeoperation
fn GlobalCompositeOperation(&self) -> DOMString {
self.canvas_state.borrow().GlobalCompositeOperation()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalcompositeoperation
fn SetGlobalCompositeOperation(&self, op_str: DOMString) {
self.canvas_state
.borrow()
.SetGlobalCompositeOperation(op_str)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-imagesmoothingenabled
fn ImageSmoothingEnabled(&self) -> bool {
self.canvas_state.borrow().ImageSmoothingEnabled()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-imagesmoothingenabled
fn SetImageSmoothingEnabled(&self, value: bool) {
self.canvas_state.borrow().SetImageSmoothingEnabled(value)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-filltext
fn FillText(&self, text: DOMString, x: f64, y: f64, max_width: Option<f64>) {
self.canvas_state.borrow().FillText(text, x, y, max_width)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linewidth
fn LineWidth(&self) -> f64 {
self.canvas_state.borrow().LineWidth()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linewidth
fn SetLineWidth(&self, width: f64) {
self.canvas_state.borrow().SetLineWidth(width)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linecap
fn LineCap(&self) -> CanvasLineCap {
self.canvas_state.borrow().LineCap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linecap
fn SetLineCap(&self, cap: CanvasLineCap) {
self.canvas_state.borrow().SetLineCap(cap)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linejoin
fn LineJoin(&self) -> CanvasLineJoin {
self.canvas_state.borrow().LineJoin()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linejoin
fn SetLineJoin(&self, join: CanvasLineJoin) {
self.canvas_state.borrow().SetLineJoin(join)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-miterlimit
fn MiterLimit(&self) -> f64 {
self.canvas_state.borrow().MiterLimit()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-miterlimit
fn SetMiterLimit(&self, limit: f64) {
self.canvas_state.borrow().SetMiterLimit(limit)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata
fn CreateImageData(&self, sw: i32, sh: i32) -> Fallible<DomRoot<ImageData>> {
self.canvas_state
.borrow()
.CreateImageData(&self.global(), sw, sh)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata
fn CreateImageData_(&self, imagedata: &ImageData) -> Fallible<DomRoot<ImageData>> {
self.canvas_state
.borrow()
.CreateImageData_(&self.global(), imagedata)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-getimagedata
fn GetImageData(&self, sx: i32, sy: i32, sw: i32, sh: i32) -> Fallible<DomRoot<ImageData>> {
self.canvas_state.borrow().GetImageData(
self.htmlcanvas.as_ref().map(|c| &**c),
&self.global(),
sx,
sy,
sw,
sh,
)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
fn PutImageData(&self, imagedata: &ImageData, dx: i32, dy: i32) {
self.canvas_state.borrow().PutImageData(
self.htmlcanvas.as_ref().map(|c| &**c),
imagedata,
dx,
dy,
)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
#[allow(unsafe_code)]
fn PutImageData_(
&self,
imagedata: &ImageData,
dx: i32,
dy: i32,
dirty_x: i32,
dirty_y: i32,
dirty_width: i32,
dirty_height: i32,
) {
self.canvas_state.borrow().PutImageData_(
self.htmlcanvas.as_ref().map(|c| &**c),
imagedata,
dx,
dy,
dirty_x,
dirty_y,
dirty_width,
dirty_height,
)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage(&self, image: CanvasImageSource, dx: f64, dy: f64) -> ErrorResult {<|fim▁hole|>
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage_(
&self,
image: CanvasImageSource,
dx: f64,
dy: f64,
dw: f64,
dh: f64,
) -> ErrorResult {
self.canvas_state.borrow().DrawImage_(
self.htmlcanvas.as_ref().map(|c| &**c),
image,
dx,
dy,
dw,
dh,
)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage__(
&self,
image: CanvasImageSource,
sx: f64,
sy: f64,
sw: f64,
sh: f64,
dx: f64,
dy: f64,
dw: f64,
dh: f64,
) -> ErrorResult {
self.canvas_state.borrow().DrawImage__(
self.htmlcanvas.as_ref().map(|c| &**c),
image,
sx,
sy,
sw,
sh,
dx,
dy,
dw,
dh,
)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-beginpath
fn BeginPath(&self) {
self.canvas_state.borrow().BeginPath()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-fill
fn Fill(&self, fill_rule: CanvasFillRule) {
self.canvas_state.borrow().Fill(fill_rule)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-stroke
fn Stroke(&self) {
self.canvas_state.borrow().Stroke()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-clip
fn Clip(&self, fill_rule: CanvasFillRule) {
self.canvas_state.borrow().Clip(fill_rule)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-ispointinpath
fn IsPointInPath(&self, x: f64, y: f64, fill_rule: CanvasFillRule) -> bool {
self.canvas_state
.borrow()
.IsPointInPath(&self.global(), x, y, fill_rule)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-scale
fn Scale(&self, x: f64, y: f64) {
self.canvas_state.borrow().Scale(x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-rotate
fn Rotate(&self, angle: f64) {
self.canvas_state.borrow().Rotate(angle)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-translate
fn Translate(&self, x: f64, y: f64) {
self.canvas_state.borrow().Translate(x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-transform
fn Transform(&self, a: f64, b: f64, c: f64, d: f64, e: f64, f: f64) {
self.canvas_state.borrow().Transform(a, b, c, d, e, f)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-settransform
fn SetTransform(&self, a: f64, b: f64, c: f64, d: f64, e: f64, f: f64) {
self.canvas_state.borrow().SetTransform(a, b, c, d, e, f)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-resettransform
fn ResetTransform(&self) {
self.canvas_state.borrow().ResetTransform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-closepath
fn ClosePath(&self) {
self.canvas_state.borrow().ClosePath()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-moveto
fn MoveTo(&self, x: f64, y: f64) {
self.canvas_state.borrow().MoveTo(x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-lineto
fn LineTo(&self, x: f64, y: f64) {
self.canvas_state.borrow().LineTo(x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-rect
fn Rect(&self, x: f64, y: f64, width: f64, height: f64) {
self.canvas_state.borrow().Rect(x, y, width, height)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-quadraticcurveto
fn QuadraticCurveTo(&self, cpx: f64, cpy: f64, x: f64, y: f64) {
self.canvas_state.borrow().QuadraticCurveTo(cpx, cpy, x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-beziercurveto
fn BezierCurveTo(&self, cp1x: f64, cp1y: f64, cp2x: f64, cp2y: f64, x: f64, y: f64) {
self.canvas_state
.borrow()
.BezierCurveTo(cp1x, cp1y, cp2x, cp2y, x, y)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-arc
fn Arc(&self, x: f64, y: f64, r: f64, start: f64, end: f64, ccw: bool) -> ErrorResult {
self.canvas_state.borrow().Arc(x, y, r, start, end, ccw)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-arcto
fn ArcTo(&self, cp1x: f64, cp1y: f64, cp2x: f64, cp2y: f64, r: f64) -> ErrorResult {
self.canvas_state.borrow().ArcTo(cp1x, cp1y, cp2x, cp2y, r)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-ellipse
fn Ellipse(
&self,
x: f64,
y: f64,
rx: f64,
ry: f64,
rotation: f64,
start: f64,
end: f64,
ccw: bool,
) -> ErrorResult {
self.canvas_state
.borrow()
.Ellipse(x, y, rx, ry, rotation, start, end, ccw)
}
}<|fim▁end|> | self.canvas_state
.borrow()
.DrawImage(self.htmlcanvas.as_ref().map(|c| &**c), image, dx, dy)
} |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::error::Error;
use mediawiki::{
api_sync::ApiSync,
};
const CATEGORY_NAMESPACE: &str = "14";
fn ends_in_four_digits(s: &str) -> bool {
s.chars().rev().take(4).all(|c: char| c.is_ascii_digit())
}
fn make_map(params: &[(&str, &str)]) -> HashMap<String, String> {
params.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect()
}
fn make_nonnegative(i: i64) -> u64 {
if i < 0 {
0
} else {
i as u64
}
}
/// recursively get total category size given title and categoryinfo
fn size(api: &ApiSync, category_title: &str, categoryinfo: &serde_json::Value, never_recurse: bool) -> Result<u64, Box<dyn Error>> {
// can't use u64 because sometimes the API returns negative numbers for these values (????)
let mut total =
make_nonnegative(categoryinfo["pages"].as_i64().ok_or(format!(
"categoryinfo.pages '{}' '{:?}' '{:?}'",
category_title, categoryinfo["pages"], categoryinfo
))?) + make_nonnegative(categoryinfo["files"].as_i64().ok_or("categoryinfo.files")?);
let num_subcats = make_nonnegative(categoryinfo["subcats"].as_i64().ok_or("categoryinfo.subcats")?);
if num_subcats > 0 {
if never_recurse {
total += num_subcats;
} else {
for subcats_page in api.get_query_api_json_limit_iter(&make_map(&[
("action", "query"),
("generator", "categorymembers"),
("gcmtitle", category_title),
("gcmnamespace", CATEGORY_NAMESPACE),
("gcmtype", "subcat"),
("prop", "categoryinfo"),
("formatversion", "2"),
("format", "json"),
]), /* limit */ None) {
let x = format!("{:?}", subcats_page);
for subcat in subcats_page?["query"]["pages"].as_array().ok_or(format!("subcats_page.query.pages: {} {} {:?}", &category_title, &categoryinfo, x))? {
total += size(
api,
subcat["title"].as_str().ok_or("subcat.title")?,
&subcat["categoryinfo"],
false,
)?;
}
}
}
}
Ok(total)
}
fn main() -> Result<(), Box<dyn Error>> {
let mut config = config::Config::default();
config
.merge(config::File::with_name("settings"))?
.merge(config::Environment::with_prefix("APP"))?;
let username = config.get_str("username")?;
let password = config.get_str("password")?;
let api_url = config.get_str("api_url")?;
let mut api = ApiSync::new(&api_url)?;
api.login(username, password)?;
api.set_user_agent(format!("EnterpriseyBot/{}/{} (https://en.wikipedia.org/wiki/User:EnterpriseyBot; [email protected])", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")));
let cat_track_template_name = config.get_str("cat_track_template_name")?;
let never_recurse_list = config.get_array("never_recurse_list")?.into_iter().map(|v| v.into_str().unwrap()).collect::<Vec<String>>();
let tracked_categories = api.get_query_api_json_limit_iter(&make_map(&[
("action", "query"),
("generator", "embeddedin"),
("geititle", &cat_track_template_name),
("geinamespace", CATEGORY_NAMESPACE),
("prop", "categoryinfo"),
("formatversion", "2"),
("format", "json"),
]), /* limit */ None);
let mut counts: HashMap<String, u64> = HashMap::new();
for tracked_cat_result_page in tracked_categories {
let tracked_cat_result_page = tracked_cat_result_page?;
let tracked_cat_results = tracked_cat_result_page["query"]["pages"].as_array().ok_or("tracked_cat_results")?;
for tracked_cat_result in tracked_cat_results {
let tracked_cat_title = tracked_cat_result["title"].as_str().ok_or("tracked_cat_result.title")?;
if ends_in_four_digits(tracked_cat_title) {
// date subcategory, skip because there are too many of these
continue;
}
let never_recurse = never_recurse_list.iter().any(|s| s == tracked_cat_title);
match size(&api, tracked_cat_title, &tracked_cat_result["categoryinfo"], never_recurse) {
Ok(num) => { counts.insert(tracked_cat_title["Category:".len()..].to_string(), num); },
Err(e) => println!("size err: inner {}, res pg {:?}", e, tracked_cat_result_page),
}
}
}
let mut output_filename = std::path::PathBuf::from(config.get_str("output_directory")?);<|fim▁hole|> Ok(())
}<|fim▁end|> | output_filename.push(chrono::Utc::now().format("%d %B %Y").to_string());
output_filename.set_extension("json");
std::fs::write(output_filename, serde_json::to_string(&counts)?)?; |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""
from __future__ import absolute_import, print_function, unicode_literals
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework_jwt',
'django_extensions',
'project',
)<|fim▁end|> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" |
<|file_name|>test_plugin.py<|end_file_name|><|fim▁begin|>"""Tests for plugin.py."""
import ckanext.dictionary.plugin as plugin
def test_plugin():<|fim▁hole|><|fim▁end|> | pass |
<|file_name|>i686_unknown_linux_gnu.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use target::Target;
pub fn target() -> Target {
let mut base = super::linux_base::opts();
base.pre_link_args.push("-m32".to_string());
Target {<|fim▁hole|> target_pointer_width: "32".to_string(),
arch: "x86".to_string(),
target_os: "linux".to_string(),
options: base,
}
}<|fim▁end|> | data_layout: "e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_string(),
llvm_target: "i686-unknown-linux-gnu".to_string(),
target_endian: "little".to_string(), |
<|file_name|>website_settings.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import get_request_site_address, encode
from frappe.model.document import Document
from six.moves.urllib.parse import quote
from frappe.website.router import resolve_route
from frappe.website.doctype.website_theme.website_theme import add_website_theme
class WebsiteSettings(Document):
def validate(self):
self.validate_top_bar_items()
self.validate_footer_items()
self.validate_home_page()
def validate_home_page(self):
if frappe.flags.in_install:
return
if self.home_page and not resolve_route(self.home_page):
frappe.msgprint(_("Invalid Home Page") + " (Standard pages - index, login, products, blog, about, contact)")
self.home_page = ''
def validate_top_bar_items(self):
"""validate url in top bar items"""
for top_bar_item in self.get("top_bar_items"):
if top_bar_item.parent_label:
parent_label_item = self.get("top_bar_items", {"label": top_bar_item.parent_label})
if not parent_label_item:
# invalid item
frappe.throw(_("{0} does not exist in row {1}").format(top_bar_item.parent_label, top_bar_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
frappe.throw(_("{0} in row {1} cannot have both URL and child items").format(top_bar_item.parent_label,
top_bar_item.idx))
def validate_footer_items(self):
"""validate url in top bar items"""
for footer_item in self.get("footer_items"):
if footer_item.parent_label:
parent_label_item = self.get("footer_items", {"label": footer_item.parent_label})
if not parent_label_item:
# invalid item
frappe.throw(_("{0} does not exist in row {1}").format(footer_item.parent_label, footer_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
frappe.throw(_("{0} in row {1} cannot have both URL and child items").format(footer_item.parent_label,
footer_item.idx))<|fim▁hole|>
def on_update(self):
self.clear_cache()
def clear_cache(self):
# make js and css
# clear web cache (for menus!)
frappe.clear_cache(user = 'Guest')
from frappe.website.render import clear_cache
clear_cache()
# clears role based home pages
frappe.clear_cache()
def get_website_settings():
hooks = frappe.get_hooks()
context = frappe._dict({
'top_bar_items': get_items('top_bar_items'),
'footer_items': get_items('footer_items'),
"post_login": [
{"label": _("My Account"), "url": "/me"},
# {"class": "divider"},
{"label": _("Logout"), "url": "/?cmd=web_logout"}
]
})
settings = frappe.get_single("Website Settings")
for k in ["banner_html", "brand_html", "copyright", "twitter_share_via",
"facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup", "hide_footer_signup", "head_html", "title_prefix",
"navbar_search"]:
if hasattr(settings, k):
context[k] = settings.get(k)
if settings.address:
context["footer_address"] = settings.address
for k in ["facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup"]:
context[k] = int(context.get(k) or 0)
if frappe.request:
context.url = quote(str(get_request_site_address(full_address=True)), safe="/:")
context.encoded_title = quote(encode(context.title or ""), str(""))
for update_website_context in hooks.update_website_context or []:
frappe.get_attr(update_website_context)(context)
context.web_include_js = hooks.web_include_js or []
context.web_include_css = hooks.web_include_css or []
via_hooks = frappe.get_hooks("website_context")
for key in via_hooks:
context[key] = via_hooks[key]
if key not in ("top_bar_items", "footer_items", "post_login") \
and isinstance(context[key], (list, tuple)):
context[key] = context[key][-1]
add_website_theme(context)
if not context.get("favicon"):
context["favicon"] = "/assets/frappe/images/favicon.png"
if settings.favicon and settings.favicon != "attach_files:":
context["favicon"] = settings.favicon
return context
def get_items(parentfield):
all_top_items = frappe.db.sql("""\
select * from `tabTop Bar Item`
where parent='Website Settings' and parentfield= %s
order by idx asc""", parentfield, as_dict=1)
top_items = [d for d in all_top_items if not d['parent_label']]
# attach child items to top bar
for d in all_top_items:
if d['parent_label']:
for t in top_items:
if t['label']==d['parent_label']:
if not 'child_items' in t:
t['child_items'] = []
t['child_items'].append(d)
break
return top_items<|fim▁end|> | |
<|file_name|>attributes.py<|end_file_name|><|fim▁begin|>import json
import requests
# TODO: Complete methods.
class AttributeType():
"""Attribute Type Client."""
# Service Setup
config = {
'schema': 'http',
'host': 'localhost',
'port': '9903',
'endpoint': 'api/v1/attribute_types'
}
@classmethod
def base_url(cls):
"""Form the base url for the service."""
return "{schema}://{host}:{port}/{endpoint}".format(**cls.config)
@classmethod
def configure(cls, options={}):
cls.config.update(options)
@classmethod
def get_all(cls):
"""Return all attribute types."""
r = requests.get(cls.base_url())
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def get(cls, code):
"""Return an attribute type."""
r = requests.get(cls.base_url() + '/' + code)
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def create(cls, attrs):
"""Create an attribute type with the attributes
passed in attrs dict."""
r = requests.post(cls.base_url(), data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def update(cls, code, attrs):
"""Update the attribute type identified by code with attrs dict."""
r = requests.put(cls.base_url() + '/' + code, data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def delete(cls, code):
"""Delete the attribute type identified by code."""
r = requests.delete(cls.base_url() + '/' + code)
return r.status_code == 204
@classmethod
def delete_all(cls):
"""Delete all attribute types (for all kind of resources)."""
r = requests.delete(cls.base_url())
return r.status_code == 204
@classmethod
def bulk_load(cls, json_string):
"""Bulk loads an array of attribute types."""
h = {
'Content-Type': 'application/json'
}
return requests.post(cls.base_url(), data=json_string, headers=h)
class Attribute():<|fim▁hole|> # Service Setup
config = {
'schema': 'http',
'host': 'localhost',
'port': '9903',
'endpoint': 'api/v1/attributes'
}
@classmethod
def base_url(cls):
"""Form the base url for the service."""
return "{schema}://{host}:{port}/{endpoint}".format(**cls.config)
@classmethod
def configure(cls, options={}):
cls.config.update(options)
@classmethod
def get_all(cls):
"""Return all attributes defined."""
r = requests.get(cls.base_url())
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def get_all_for(cls, resource_type):
"""Return all attribute for the resource type."""
r = requests.get(cls.base_url() + '/for/' + resource_type)
if r.status_code == 200:
return r.json()
else:
return None
# @classmethod
# def create(cls, attrs):
# """Create an classroom with the attributes passed in attrs dict."""
# r = requests.post(cls.base_url(), data=json.dumps(attrs))
# if r.status_code == 200:
# return r.json()
# else:
# return None
# @classmethod
# def delete(cls, code):
# """Delete the classroom identified by code."""
# r = requests.delete(cls.base_url() + '/' + code)
# return r.status_code == 204
@classmethod
def delete_all(cls):
"""Delete all instructors."""
r = requests.delete(cls.base_url())
return r.status_code == 204
@classmethod
def bulk_load(cls, json_string):
"""Bulk loads an array of attributes."""
h = {
'Content-Type': 'application/json'
}
return requests.post(cls.base_url(), data=json_string, headers=h)<|fim▁end|> | """Attribute Client."""
|
<|file_name|>solution.py<|end_file_name|><|fim▁begin|>def is_palindrome(data):
if isinstance(data, list):
data = ''.join(c.lower() for c in ''.join(data) if c.isalpha())
if isinstance(data, str):
return "Palindrome" if data == data[::-1] else "Not a palindrome"
else:
return "Invalid input"
if __name__ == "__main__":
with open("input/input4.txt", "r") as file:<|fim▁hole|><|fim▁end|> | num, *lines = file.read().splitlines()
print(is_palindrome(lines)) |
<|file_name|>test_consul.py<|end_file_name|><|fim▁begin|>import random
from tests.checks.common import AgentCheckTest, load_check
from utils.containers import hash_mutable
MOCK_CONFIG = {
'init_config': {},
'instances' : [{
'url': 'http://localhost:8500',
'catalog_checks': True,
}]
}
MOCK_CONFIG_SERVICE_WHITELIST = {
'init_config': {},
'instances' : [{
'url': 'http://localhost:8500',
'catalog_checks': True,
'service_whitelist': ['service_{0}'.format(k) for k in range(70)]
}]
}
MOCK_CONFIG_LEADER_CHECK = {
'init_config': {},
'instances' : [{
'url': 'http://localhost:8500',
'catalog_checks': True,
'new_leader_checks': True
}]
}
MOCK_CONFIG_SELF_LEADER_CHECK = {
'init_config': {},
'instances' : [{
'url': 'http://localhost:8500',
'catalog_checks': True,
'self_leader_check': True
}]
}
MOCK_CONFIG_NETWORK_LATENCY_CHECKS = {
'init_config': {},
'instances' : [{
'url': 'http://localhost:8500',
'catalog_checks': True,
'network_latency_checks': True
}]
}
MOCK_BAD_CONFIG = {
'init_config': {},
'instances' : [{ # Multiple instances should cause it to fail
'url': 'http://localhost:8500',
'catalog_checks': True,
'new_leader_checks': True
}, {
'url': 'http://localhost:8501',
'catalog_checks': True,
'new_leader_checks': True,
'self_leader_check': True
}]
}
def _get_random_ip():
rand_int = int(15 * random.random()) + 10
return "10.0.2.{0}".format(rand_int)
class TestCheckConsul(AgentCheckTest):
CHECK_NAME = 'consul'
def mock_get_peers_in_cluster(self, instance):
return [
"10.0.2.14:8300",
"10.0.2.15:8300",
"10.0.2.16:8300"
]
def mock_get_services_in_cluster(self, instance):
return {
"service-1": [
"az-us-east-1a"
],
"service-2": [
"az-us-east-1a"
],
"service-3": [
"az-us-east-1a"
],
"service-4": [
"az-us-east-1a"
],
"service-5": [
"az-us-east-1a"
],
"service-6": [
"az-us-east-1a"
]
}
def mock_get_n_services_in_cluster(self, n):
dct = {}
for i in range(n):
k = "service_{0}".format(i)
dct[k] = []
return dct
def mock_get_local_config(self, instance, instance_state):
return {
"Config": {
"AdvertiseAddr": "10.0.2.15",
"Datacenter": "dc1",
"Ports": {
"DNS": 8600,
"HTTP": 8500,
"HTTPS": -1,
"RPC": 8400,
"SerfLan": 8301,
"SerfWan": 8302,
"Server": 8300
},
}
}
def mock_get_nodes_in_cluster(self, instance):
return [
{
"Address": "10.0.2.15",
"Node": "node-1"
},
{
"Address": "10.0.2.25",
"Node": "node-2"
},
{
"Address": "10.0.2.35",
"Node": "node-2"
},
]
def mock_get_nodes_with_service(self, instance, service):
return [
{
"Checks": [
{
"CheckID": "serfHealth",
"Name": "Serf Health Status",
"Node": "node-1",
"Notes": "",
"Output": "Agent alive and reachable",
"ServiceID": "",
"ServiceName": "",
"Status": "passing"
},
{
"CheckID": "service:{0}".format(service),
"Name": "service check {0}".format(service),
"Node": "node-1",
"Notes": "",
"Output": "Service {0} alive".format(service),
"ServiceID": service,
"ServiceName": "",
"Status": "passing"
}
],
"Node": {
"Address": _get_random_ip(),
"Node": "node-1"
},
"Service": {
"Address": "",
"ID": service,
"Port": 80,
"Service": service,
"Tags": [
"az-us-east-1a"
]
}
}
]
def mock_get_nodes_with_service_warning(self, instance, service):
return [
{
"Checks": [
{
"CheckID": "serfHealth",
"Name": "Serf Health Status",<|fim▁hole|> "Notes": "",
"Output": "Agent alive and reachable",
"ServiceID": "",
"ServiceName": "",
"Status": "passing"
},
{
"CheckID": "service:{0}".format(service),
"Name": "service check {0}".format(service),
"Node": "node-1",
"Notes": "",
"Output": "Service {0} alive".format(service),
"ServiceID": service,
"ServiceName": "",
"Status": "warning"
}
],
"Node": {
"Address": _get_random_ip(),
"Node": "node-1"
},
"Service": {
"Address": "",
"ID": service,
"Port": 80,
"Service": service,
"Tags": [
"az-us-east-1a"
]
}
}
]
def mock_get_nodes_with_service_critical(self, instance, service):
return [
{
"Checks": [
{
"CheckID": "serfHealth",
"Name": "Serf Health Status",
"Node": "node-1",
"Notes": "",
"Output": "Agent alive and reachable",
"ServiceID": "",
"ServiceName": "",
"Status": "passing"
},
{
"CheckID": "service:{0}".format(service),
"Name": "service check {0}".format(service),
"Node": "node-1",
"Notes": "",
"Output": "Service {0} alive".format(service),
"ServiceID": service,
"ServiceName": "",
"Status": "warning"
},
{
"CheckID": "service:{0}".format(service),
"Name": "service check {0}".format(service),
"Node": "node-1",
"Notes": "",
"Output": "Service {0} alive".format(service),
"ServiceID": service,
"ServiceName": "",
"Status": "critical"
}
],
"Node": {
"Address": _get_random_ip(),
"Node": "node-1"
},
"Service": {
"Address": "",
"ID": service,
"Port": 80,
"Service": service,
"Tags": [
"az-us-east-1a"
]
}
}
]
def mock_get_coord_datacenters(self, instance):
return [{
"Datacenter": "dc1",
"Coordinates": [
{
"Node": "host-1",
"Coord": {
"Vec": [
0.036520147625677804,
-0.00453289164613373,
-0.020523210880196232,
-0.02699760529719879,
-0.02689207977655939,
-0.01993826834797845,
-0.013022029942846501,
-0.002101656069659926
],
"Error": 0.11137306578107628,
"Adjustment": -0.00021065907491393056,
"Height": 1.1109163532378512e-05
}
}]
}, {
"Datacenter": "dc2",
"Coordinates": [
{
"Node": "host-2",
"Coord": {
"Vec": [
0.03548568620505946,
-0.0038202417296129025,
-0.01987440114252717,
-0.026223108843980016,
-0.026581965209197853,
-0.01891384862245717,
-0.013677323575279184,
-0.0014257906933581217
],
"Error": 0.06388569381495224,
"Adjustment": -0.00036731776343708724,
"Height": 8.962823816793629e-05
}
}]
}]
def mock_get_coord_nodes(self, instance):
return [{
"Node": "host-1",
"Coord": {
"Vec": [
0.007682993877165208,
0.002411059340215172,
0.0016420746641640123,
0.0037411046929292906,
0.004541946058965728,
0.0032195622863890523,
-0.0039447666794166095,
-0.0021767019427297815
],
"Error": 0.28019529748212335,
"Adjustment": -9.966407036439966e-05,
"Height": 0.00011777098790169723
}
}, {
"Node": "host-2",
"Coord": {
"Vec": [
0.007725239390196322,
0.0025160987581685982,
0.0017412811939227935,
0.003740935739394932,
0.004628794642643524,
0.003190871896051593,
-0.004058197296573195,
-0.002108437352702053
],
"Error": 0.31518043241386984,
"Adjustment": -0.00012274366490350246,
"Height": 0.00015006836008626717
}
}]
def mock_get_cluster_leader_A(self, instance):
return '10.0.2.15:8300'
def mock_get_cluster_leader_B(self, instance):
return 'My New Leader'
def _get_consul_mocks(self):
return {
'get_services_in_cluster': self.mock_get_services_in_cluster,
'get_nodes_with_service': self.mock_get_nodes_with_service,
'get_peers_in_cluster': self.mock_get_peers_in_cluster,
'_get_local_config': self.mock_get_local_config,
'_get_cluster_leader': self.mock_get_cluster_leader_A,
'_get_coord_datacenters': self.mock_get_coord_datacenters,
'_get_coord_nodes': self.mock_get_coord_nodes,
}
def test_get_nodes_with_service(self):
self.run_check(MOCK_CONFIG, mocks=self._get_consul_mocks())
self.assertMetric('consul.catalog.nodes_up', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_passing', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_warning', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_critical', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.services_up', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_passing', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_warning', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_critical', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
def test_get_nodes_with_service_warning(self):
my_mocks = self._get_consul_mocks()
my_mocks['get_nodes_with_service'] = self.mock_get_nodes_with_service_warning
self.run_check(MOCK_CONFIG, mocks=my_mocks)
self.assertMetric('consul.catalog.nodes_up', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_passing', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_warning', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_critical', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.services_up', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_passing', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_warning', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_critical', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
def test_get_nodes_with_service_critical(self):
my_mocks = self._get_consul_mocks()
my_mocks['get_nodes_with_service'] = self.mock_get_nodes_with_service_critical
self.run_check(MOCK_CONFIG, mocks=my_mocks)
self.assertMetric('consul.catalog.nodes_up', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_passing', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_warning', value=0, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.nodes_critical', value=1, tags=['consul_datacenter:dc1', 'consul_service_id:service-1'])
self.assertMetric('consul.catalog.services_up', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_passing', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_warning', value=0, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
self.assertMetric('consul.catalog.services_critical', value=6, tags=['consul_datacenter:dc1', 'consul_node_id:node-1'])
def test_get_peers_in_cluster(self):
mocks = self._get_consul_mocks()
# When node is leader
self.run_check(MOCK_CONFIG, mocks=mocks)
self.assertMetric('consul.peers', value=3, tags=['consul_datacenter:dc1', 'mode:leader'])
mocks['_get_cluster_leader'] = self.mock_get_cluster_leader_B
# When node is follower
self.run_check(MOCK_CONFIG, mocks=mocks)
self.assertMetric('consul.peers', value=3, tags=['consul_datacenter:dc1', 'mode:follower'])
def test_cull_services_list(self):
self.check = load_check(self.CHECK_NAME, MOCK_CONFIG_LEADER_CHECK, self.DEFAULT_AGENT_CONFIG)
# Pad num_services to kick in truncation logic
num_services = self.check.MAX_SERVICES + 20
# Big whitelist
services = self.mock_get_n_services_in_cluster(num_services)
whitelist = ['service_{0}'.format(k) for k in range(num_services)]
self.assertEqual(len(self.check._cull_services_list(services, whitelist)), self.check.MAX_SERVICES)
# Whitelist < MAX_SERVICES should spit out the whitelist
services = self.mock_get_n_services_in_cluster(num_services)
whitelist = ['service_{0}'.format(k) for k in range(self.check.MAX_SERVICES-1)]
self.assertEqual(set(self.check._cull_services_list(services, whitelist)), set(whitelist))
# No whitelist, still triggers truncation
whitelist = []
self.assertEqual(len(self.check._cull_services_list(services, whitelist)), self.check.MAX_SERVICES)
# Num. services < MAX_SERVICES should be no-op in absence of whitelist
num_services = self.check.MAX_SERVICES - 1
services = self.mock_get_n_services_in_cluster(num_services)
self.assertEqual(len(self.check._cull_services_list(services, whitelist)), num_services)
# Num. services < MAX_SERVICES should spit out only the whitelist when one is defined
num_services = self.check.MAX_SERVICES - 1
whitelist = ['service_1', 'service_2', 'service_3']
services = self.mock_get_n_services_in_cluster(num_services)
self.assertEqual(set(self.check._cull_services_list(services, whitelist)), set(whitelist))
def test_new_leader_event(self):
self.check = load_check(self.CHECK_NAME, MOCK_CONFIG_LEADER_CHECK, self.DEFAULT_AGENT_CONFIG)
instance_hash = hash_mutable(MOCK_CONFIG_LEADER_CHECK['instances'][0])
self.check._instance_states[instance_hash].last_known_leader = 'My Old Leader'
mocks = self._get_consul_mocks()
mocks['_get_cluster_leader'] = self.mock_get_cluster_leader_B
self.run_check(MOCK_CONFIG_LEADER_CHECK, mocks=mocks)
self.assertEqual(len(self.events), 1)
event = self.events[0]
self.assertEqual(event['event_type'], 'consul.new_leader')
self.assertIn('prev_consul_leader:My Old Leader', event['tags'])
self.assertIn('curr_consul_leader:My New Leader', event['tags'])
def test_self_leader_event(self):
self.check = load_check(self.CHECK_NAME, MOCK_CONFIG_SELF_LEADER_CHECK, self.DEFAULT_AGENT_CONFIG)
instance_hash = hash_mutable(MOCK_CONFIG_SELF_LEADER_CHECK['instances'][0])
self.check._instance_states[instance_hash].last_known_leader = 'My Old Leader'
mocks = self._get_consul_mocks()
our_url = self.mock_get_cluster_leader_A(None)
other_url = self.mock_get_cluster_leader_B(None)
# We become the leader
mocks['_get_cluster_leader'] = self.mock_get_cluster_leader_A
self.run_check(MOCK_CONFIG_SELF_LEADER_CHECK, mocks=mocks)
self.assertEqual(len(self.events), 1)
self.assertEqual(our_url, self.check._instance_states[instance_hash].last_known_leader)
event = self.events[0]
self.assertEqual(event['event_type'], 'consul.new_leader')
self.assertIn('prev_consul_leader:My Old Leader', event['tags'])
self.assertIn('curr_consul_leader:%s' % our_url, event['tags'])
# We are already the leader, no new events
self.run_check(MOCK_CONFIG_SELF_LEADER_CHECK, mocks=mocks)
self.assertEqual(len(self.events), 0)
# We lose the leader, no new events
mocks['_get_cluster_leader'] = self.mock_get_cluster_leader_B
self.run_check(MOCK_CONFIG_SELF_LEADER_CHECK, mocks=mocks)
self.assertEqual(len(self.events), 0)
self.assertEqual(other_url, self.check._instance_states[instance_hash].last_known_leader)
# We regain the leadership
mocks['_get_cluster_leader'] = self.mock_get_cluster_leader_A
self.run_check(MOCK_CONFIG_SELF_LEADER_CHECK, mocks=mocks)
self.assertEqual(len(self.events), 1)
self.assertEqual(our_url, self.check._instance_states[instance_hash].last_known_leader)
event = self.events[0]
self.assertEqual(event['event_type'], 'consul.new_leader')
self.assertIn('prev_consul_leader:%s' % other_url, event['tags'])
self.assertIn('curr_consul_leader:%s' % our_url, event['tags'])
def test_network_latency_checks(self):
self.check = load_check(self.CHECK_NAME, MOCK_CONFIG_NETWORK_LATENCY_CHECKS,
self.DEFAULT_AGENT_CONFIG)
mocks = self._get_consul_mocks()
# We start out as the leader, and stay that way
instance_hash = hash_mutable(MOCK_CONFIG_NETWORK_LATENCY_CHECKS['instances'][0])
self.check._instance_states[instance_hash].last_known_leader = self.mock_get_cluster_leader_A(None)
self.run_check(MOCK_CONFIG_NETWORK_LATENCY_CHECKS, mocks=mocks)
latency = [m for m in self.metrics if m[0].startswith('consul.net.')]
latency.sort()
# Make sure we have the expected number of metrics
self.assertEquals(19, len(latency))
# Only 3 dc-latency metrics since we only do source = self
dc = [m for m in latency if '.dc.latency.' in m[0]]
self.assertEquals(3, len(dc))
self.assertEquals(1.6746410750238774, dc[0][2])
# 16 latency metrics, 2 nodes * 8 metrics each
node = [m for m in latency if '.node.latency.' in m[0]]
self.assertEquals(16, len(node))
self.assertEquals(0.26577747932995816, node[0][2])<|fim▁end|> | "Node": "node-1", |
<|file_name|>vs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""<|fim▁hole|> Operator, Generic
class VisualStudioStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}<|fim▁end|> |
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \ |
<|file_name|>resource_sked.hh<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2011 The Regents of The University of Michigan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Korey Sewell
*
*/
#ifndef __CPU_INORDER_RESOURCE_SKED_HH__
#define __CPU_INORDER_RESOURCE_SKED_HH__
#include <cstdlib>
#include <list>
#include <vector>
/** ScheduleEntry class represents a single function that an instruction
wants to do at any pipeline stage. For example, if an instruction
needs to be decoded and do a branch prediction all in one stage
then each of those tasks would need it's own ScheduleEntry.
Each schedule entry corresponds to some resource that the instruction
wants to interact with.
The file pipeline_traits.cc shows how a typical instruction schedule is
made up of these schedule entries.
*/
class ScheduleEntry {
public:
ScheduleEntry(int stage_num, int _priority, int res_num, int _cmd = 0,
int _idx = 0) :
stageNum(stage_num), resNum(res_num), cmd(_cmd),
idx(_idx), priority(_priority)
{ }
/** Stage number to perform this service. */
int stageNum;
/** Resource ID to access */
int resNum;
/** See specific resource for meaning */
unsigned cmd;
/** See specific resource for meaning */
unsigned idx;
/** Some Resources May Need Priority */
int priority;
};
/** The ResourceSked maintains the complete schedule
for an instruction. That schedule includes what
resources an instruction wants to acquire at each
pipeline stage and is represented by a collection
of ScheduleEntry objects (described above) that
must be executed in-order.
In every pipeline stage, the InOrder model will
process all entries on the resource schedule for
that stage and then send the instruction to the next
stage if and only if the instruction successfully
completed each ScheduleEntry.
*/
class ResourceSked {
public:
typedef std::list<ScheduleEntry*>::iterator SkedIt;
typedef std::vector<std::list<ScheduleEntry*> > StageList;
ResourceSked();
/** Initializee the current entry pointer to
pipeline stage 0 and the 1st schedule entry
*/
void init();
/** Goes through the remaining stages on the schedule
and sums all the remaining entries left to be
processed
*/
int size();
/** Is the schedule empty? */
bool empty();
/** Beginning Entry of this schedule */
SkedIt begin();
/** Ending Entry of this schedule */
SkedIt end();
/** What is the next task for this instruction schedule? */
ScheduleEntry* top();
/** Top() Task is completed, remove it from schedule */
void pop();
/** Add To Schedule based on stage num and priority of
Schedule Entry
*/
void push(ScheduleEntry* sked_entry);
/** Add Schedule Entry to be in front of another Entry */
void pushBefore(ScheduleEntry* sked_entry, int sked_cmd, int sked_cmd_idx);
/** Print what's left on the instruction schedule */
void print();
StageList *getStages()
{
return &stages;
}
private:
/** Current Schedule Entry Pointer */
SkedIt curSkedEntry;
/** The Stage-by-Stage Resource Schedule:
Resized to Number of Stages in the constructor
*/
StageList stages;
/** Find a place to insert the instruction using the
schedule entries priority
*/
SkedIt findIterByPriority(ScheduleEntry *sked_entry, int stage_num);
/** Find a place to insert the instruction using a particular command
to look for.
*/
SkedIt findIterByCommand(ScheduleEntry *sked_entry, int stage_num,
int sked_cmd, int sked_cmd_idx = -1);
};
/** Wrapper class around the SkedIt iterator in the Resource Sked so that
we can use ++ operator to automatically go to the next available
resource schedule entry but otherwise maintain same functionality
as a normal iterator.
*/
class RSkedIt
{
public:
RSkedIt()
: curStage(0), numStages(0)
{ }
/** init() must be called before the use of any other member
in the RSkedIt class.
*/
void init(ResourceSked* rsked)
{
stages = rsked->getStages();
numStages = stages->size();
}
/* Update the encapsulated "myIt" iterator, but only
update curStage/curStage_end if the iterator is valid.
The iterator could be invalid in the case where
someone is saving the end of a list (i.e. std::list->end())
*/
RSkedIt operator=(ResourceSked::SkedIt const &rhs)
{
myIt = rhs;
if (myIt != (*stages)[numStages-1].end()) {
curStage = (*myIt)->stageNum;
curStage_end = (*stages)[curStage].end();
}
return *this;
}
/** Increment to the next entry in current stage.
If no more entries then find the next stage that has
resource schedule to complete.
If no more stages, then return the end() iterator from
the last stage to indicate we are done.
*/
RSkedIt &operator++(int unused)
{
if (++myIt == curStage_end) {
curStage++;
while (curStage < numStages) {
if ((*stages)[curStage].empty()) {
curStage++;
} else {
myIt = (*stages)[curStage].begin();
curStage_end = (*stages)[curStage].end();
return *this;
}
}
myIt = (*stages)[numStages - 1].end();
}
return *this;
}
/** The "pointer" operator can be used on a RSkedIt and it
will use the encapsulated iterator
*/
ScheduleEntry* operator->()
{
return *myIt;
}
/** Dereferencing a RSKedIt will access the encapsulated
iterator
*/
ScheduleEntry* operator*()
{
return *myIt;
}
/** Equality for RSkedIt only compares the "myIt" iterators,
as the other members are just ancillary
*/
bool operator==(RSkedIt const &rhs)
{
return this->myIt == rhs.myIt;
}
/** Inequality for RSkedIt only compares the "myIt" iterators,
as the other members are just ancillary
*/
bool operator!=(RSkedIt const &rhs)
{
return this->myIt != rhs.myIt;
}
/* The == and != operator overloads should be sufficient
here if need otherwise direct access to the schedule
iterator, then this can be used */
ResourceSked::SkedIt getIt()
{
return myIt;
}
private:
/** Schedule Iterator that this class is encapsulating */
ResourceSked::SkedIt myIt;
/** Ptr to resource schedule that the 'myIt' iterator
belongs to
*/
ResourceSked::StageList *stages;
/** The last iterator in the current stage. */
ResourceSked::SkedIt curStage_end;
/** Current Stage that "myIt" refers to. */
int curStage;
/** Number of stages in the "*stages" object. */<|fim▁hole|>#endif //__CPU_INORDER_RESOURCE_SKED_HH__<|fim▁end|> | int numStages;
};
|
<|file_name|>format.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import argparse
import sys
import yaml
<|fim▁hole|>from pf_focus.markdown import output_markdown
from pf_focus.bbcode import output_bbcode
from pf_focus.parse import parse_pfsense
from pf_focus.pfsense import PfSenseDocument
from pf_focus.progress import Animation
def output_yaml(doc, stream):
yaml.safe_dump(doc.data, stream)
OUTPUT_FORMATS = {
'yaml': output_yaml,
'md': output_markdown,
'bbcode': output_bbcode,
}
def get_output_func(args):
return OUTPUT_FORMATS.get(args.output_format, output_yaml)
def get_progress_animation(args):
return Animation(args.quiet or args.output_path == '-')
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-q", dest="quiet", action="store_const", const=True, default=False, help="Hide progress messages")
parser.add_argument("-i", dest="input_path", help="XML input path", required=True)
parser.add_argument("-o", dest="output_path", help="Output path", default="-")
parser.add_argument("-f", dest="output_format", help="Output format", default="yaml", choices=OUTPUT_FORMATS.keys())
return parser.parse_args()
def step_parse(args, doc):
if not args.quiet:
print('\u268b Parsing "{}" ...'.format(args.input_path), file=sys.stderr)
with get_progress_animation(args):
parse_pfsense(args.input_path, doc)
if not args.quiet:
print('\u268d Successfully parsed pfSense config version {}.'.format(doc.pfsense.version), file=sys.stderr)
def step_stdout(args, doc, output_func):
if not args.quiet:
print('\u2631 Outputting to stdout ...', file=sys.stderr)
with get_progress_animation(args):
output_file = sys.stdout
output_func(doc, output_file)
if not args.quiet:
print('\u2630 Successfully outputted pfSense config as {}.'.format(args.output_format), file=sys.stderr)
def step_file(args, doc, output_func):
if not args.quiet:
print('\u2631 Outputting to "{}" ...'.format(args.output_path), file=sys.stderr)
with get_progress_animation(args):
with open(args.output_path, 'w+') as output_file:
output_func(doc, output_file)
if not args.quiet:
print('\u2630 Successfully outputted pfSense config as {}.'.format(args.output_format), file=sys.stderr)
def main():
args = parse_args()
doc = PfSenseDocument()
output_func = get_output_func(args)
step_parse(args, doc)
if args.output_path == '-':
step_stdout(args, doc, output_func)
else:
step_file(args, doc, output_func)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>File.js<|end_file_name|><|fim▁begin|>/*
* Moonshine - a Lua virtual machine.
*
* Email: [email protected]
* http://moonshinejs.org
*
* Copyright (c) 2013-2015 Gamesys Limited. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,<|fim▁hole|> * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* @fileOverview File class.
* @author <a href="mailto:[email protected]">Paul Cuthbertson</a>
*/
'use strict';
var shine = shine || {};
/**
* Represents a Luac data file.
* @constructor
* @extends shine.EventEmitter
* @param {String} url Url of the distilled JSON file.
*/
shine.File = function (url, data) {
this.url = url;
this.data = data;
};
/**
* Dump memory associated with file.
*/
shine.File.prototype.dispose = function () {
delete this.url;
delete this.data;
};<|fim▁end|> | |
<|file_name|>basic.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var parseTorrent = require('parse-torrent')
var test = require('tape')
var WebTorrent = require('../')
var DHT = require('bittorrent-dht/client')
var parallel = require('run-parallel')
var bufferEqual = require('buffer-equal')
var leaves = fs.readFileSync(__dirname + '/torrents/leaves.torrent')
var leavesTorrent = parseTorrent(leaves)
var leavesBook = fs.readFileSync(__dirname + '/content/Leaves of Grass by Walt Whitman.epub')
var leavesMagnetURI = 'magnet:?xt=urn:btih:d2474e86c95b19b8bcfdb92bc12c9d44667cfa36&dn=Leaves+of+Grass+by+Walt+Whitman.epub&tr=http%3A%2F%2Ftracker.thepiratebay.org%2Fannounce&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=udp%3A%2F%2Ftracker.ccc.de%3A80&tr=udp%3A%2F%2Ftracker.publicbt.com%3A80&tr=udp%3A%2F%2Ffr33domtracker.h33t.com%3A3310%2Fannounce&tr=http%3A%2F%2Ftracker.bittorrent.am%2Fannounce'
test('client.add (magnet uri, torrent file, info hash, and parsed torrent)', function (t) {
// magnet uri (utf8 string)
var client1 = new WebTorrent({ dht: false, tracker: false })
var torrent1 = client1.add('magnet:?xt=urn:btih:' + leavesTorrent.infoHash)
t.equal(torrent1.infoHash, leavesTorrent.infoHash)
t.equal(torrent1.magnetURI, 'magnet:?xt=urn:btih:' + leavesTorrent.infoHash)
client1.destroy()
// torrent file (buffer)
var client2 = new WebTorrent({ dht: false, tracker: false })
var torrent2 = client2.add(leaves)
t.equal(torrent2.infoHash, leavesTorrent.infoHash)
t.equal(torrent2.magnetURI, leavesMagnetURI)
client2.destroy()
// info hash (hex string)
var client3 = new WebTorrent({ dht: false, tracker: false })
var torrent3 = client3.add(leavesTorrent.infoHash)
t.equal(torrent3.infoHash, leavesTorrent.infoHash)
t.equal(torrent3.magnetURI, 'magnet:?xt=urn:btih:' + leavesTorrent.infoHash)
client3.destroy()
// info hash (buffer)
var client4 = new WebTorrent({ dht: false, tracker: false })
var torrent4 = client4.add(new Buffer(leavesTorrent.infoHash, 'hex'))
t.equal(torrent4.infoHash, leavesTorrent.infoHash)
t.equal(torrent4.magnetURI, 'magnet:?xt=urn:btih:' + leavesTorrent.infoHash)
client4.destroy()
// parsed torrent (from parse-torrent)
var client5 = new WebTorrent({ dht: false, tracker: false })
var torrent5 = client5.add(leavesTorrent)
t.equal(torrent5.infoHash, leavesTorrent.infoHash)
t.equal(torrent5.magnetURI, leavesMagnetURI)
client5.destroy()
t.end()
})
test('client.seed (Buffer, Blob)', function (t) {
t.plan(typeof Blob !== 'undefined' ? 4 : 2)
var opts = {
name: 'Leaves of Grass by Walt Whitman.epub',
announce: [
'http://tracker.thepiratebay.org/announce',
'udp://tracker.openbittorrent.com:80',
'udp://tracker.ccc.de:80',
'udp://tracker.publicbt.com:80',
'udp://fr33domtracker.h33t.com:3310/announce',
'http://tracker.bittorrent.am/announce'
]
}
// torrent file (Buffer)
var client1 = new WebTorrent({ dht: false, tracker: false })
client1.seed(leavesBook, opts, function (torrent1) {
t.equal(torrent1.infoHash, leavesTorrent.infoHash)
t.equal(torrent1.magnetURI, leavesMagnetURI)
client1.destroy()
})
// Blob
if (typeof Blob !== 'undefined') {
var client2 = new WebTorrent({ dht: false, tracker: false })
client2.seed(new Blob([ leavesBook ]), opts, function (torrent2) {
t.equal(torrent2.infoHash, leavesTorrent.infoHash)
t.equal(torrent2.magnetURI, leavesMagnetURI)
client2.destroy()
})
} else {
console.log('Skipping Blob test because missing `Blob` constructor')
}
})
test('after client.destroy(), throw on client.add() or client.seed()', function (t) {
t.plan(3)
var client = new WebTorrent({ dht: false, tracker: false })
client.destroy(function () {
t.pass('client destroyed')
})
t.throws(function () {
client.add('magnet:?xt=urn:btih:' + leavesTorrent.infoHash)
})
t.throws(function () {
client.seed(new Buffer('sup'))
})
})
test('after client.destroy(), no "torrent" or "ready" events emitted', function (t) {
t.plan(1)
var client = new WebTorrent({ dht: false, tracker: false })
client.add(leaves, function () {
t.fail('unexpected "torrent" event (from add)')
})
client.seed(leavesBook, function () {
t.fail('unexpected "torrent" event (from seed)')
})
client.on('ready', function () {
t.fail('unexpected "ready" event')
})
client.destroy(function () {
t.pass('client destroyed')
})
})
test('download via DHT', function (t) {
t.plan(2)
var data = new Buffer('blah blah')
var dhts = []
// need 3 because nodes don't advertise themselves as peers
for (var i = 0; i < 3; i++) {
dhts.push(new DHT({ bootstrap: false }))
}
parallel(dhts.map(function (dht) {
return function (cb) {
dht.listen(function (port) {
cb(null, port)
})
}
}), function () {
for (var i = 0; i < dhts.length; i++) {
for (var j = 0; j < dhts.length; j++) {
if (i !== j) dhts[i].addNode('127.0.0.1:' + getDHTPort(dhts[j]), dhts[j].nodeId)
}
}
var client1 = new WebTorrent({ dht: dhts[0], tracker: false })
var client2 = new WebTorrent({ dht: dhts[1], tracker: false })
client1.seed(data, { name: 'blah' }, function (torrent1) {
client2.download(torrent1.infoHash, function (torrent2) {
t.equal(torrent2.infoHash, torrent1.infoHash)
torrent2.on('done', function () {
t.ok(bufferEqual(getFileData(torrent2), data))
dhts.forEach(function (d) {
d.destroy()
})
client1.destroy()
client2.destroy()
})
})
})
})
})
test('don\'t kill passed in DHT on destroy', function (t) {
t.plan(1)
var dht = new DHT({ bootstrap: false })
var destroy = dht.destroy
var okToDie
dht.destroy = function () {
t.equal(okToDie, true)
dht.destroy = destroy.bind(dht)
dht.destroy()
}
var client = new WebTorrent({ dht: dht, tracker: false })
client.destroy(function () {
okToDie = true
dht.destroy()
})
})
function getFileData (torrent) {
var pieces = torrent.files[0].pieces
return Buffer.concat(pieces.map(
function (piece) {
return piece.buffer
}
))
}
function getDHTPort (dht) {
return dht.address().port
}<|fim▁end|> | var fs = require('fs') |
<|file_name|>window.rs<|end_file_name|><|fim▁begin|>use error::Result;
use error::Error::Sdl as SdlError;
use sdl2::Sdl;
use sdl2::video::Window as SdlWindow;
use sdl2::video::{gl_attr, GLProfile, GLContext};
use libc::c_void;
use gl;
use sdl2;
const WINDOW_TITLE: &'static str = "Rusty Doom v0.0.7 - Toggle mouse with backtick key (`))";
const OPENGL_DEPTH_SIZE: u8 = 24;
pub struct Window {
window: SdlWindow,
width: u32,
height: u32,
_context: GLContext,
}
impl Window {
pub fn new(sdl: &Sdl, width: u32, height: u32) -> Result<Window> {
gl_attr::set_context_profile(GLProfile::Core);
gl_attr::set_context_major_version(gl::platform::GL_MAJOR_VERSION);
gl_attr::set_context_minor_version(gl::platform::GL_MINOR_VERSION);
gl_attr::set_depth_size(OPENGL_DEPTH_SIZE);
gl_attr::set_double_buffer(true);
let window = try!(sdl.window(WINDOW_TITLE, width as u32, height as u32)
.position_centered()
.opengl()
.build()
.map_err(SdlError));
let context = try!(window.gl_create_context().map_err(SdlError));
sdl2::clear_error();
gl::load_with(|name| {
sdl2::video::gl_get_proc_address(name) as *const c_void
});
let mut vao_id = 0;
check_gl_unsafe!(gl::GenVertexArrays(1, &mut vao_id));
check_gl_unsafe!(gl::BindVertexArray(vao_id));
check_gl_unsafe!(gl::ClearColor(0.06, 0.07, 0.09, 0.0));
check_gl_unsafe!(gl::Enable(gl::DEPTH_TEST));
check_gl_unsafe!(gl::DepthFunc(gl::LESS));
Ok(Window {
window: window,
width: width,
height: height,
_context: context,
})
}
pub fn aspect_ratio(&self) -> f32 {
self.width as f32 / self.height as f32
}
pub fn swap_buffers(&self) {
self.window.gl_swap_window();
}
<|fim▁hole|> }
}<|fim▁end|> | pub fn clear(&self) {
check_gl_unsafe!(gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT)); |
<|file_name|>remote_database.py<|end_file_name|><|fim▁begin|>__author__ = 'juan'
import json
from termcolor import colored
import mysql.connector
import time
config = {
'user': 'elec',
'password': 'elec',
'host': 'thor.deusto.es',
'database': 'eu_test2',
}
class database:
def __init__(self):
self.con = mysql.connector.connect(**config)
def insert(self,tweet):
try:
self.con = mysql.connector.connect(**config)
tweet = json.loads(tweet)
self.insert_users(tweet)
#self.insert_tweets(tweet)
self.insert_mention(tweet)
#self.insert_language_group(tweet)
#self.insert_language_candidate(tweet)
#self.insert_hash_country(tweet)
#self.insert_hash_group(tweet)
#self.insert_hash_candidate(tweet)
self.con.commit()
self.con.close()
except Exception, e:
print colored("Insertion error "+ e.message, "red")
def insert_users(self,tweet):
#id TEXT, screen_name TEXT, total_tweets INT
keys = [tweet['user']['id'], tweet['user']['screen_name'],1]
try:
cursor = self.con.cursor()
select = "SELECT id, total_tweets from twitter_users where id="+str(keys[0])
cursor.execute(select)
node = cursor.fetchone()
if node:
total = node[1]+1
update = "UPDATE twitter_users set total_tweets = "+str(total)+" where id = "+str(keys[0])
cursor.execute(update)
else:
insert = "INSERT INTO twitter_users(id, screen_name, total_tweets) VALUES (" + str(keys[0]) + ",'" + keys[1] + "', 1)"
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_user: ", e
def insert_tweets(self,tweet):
# id TEXT, user_id TEXT, text TEXT, created_at DATE, lang TEXT, retweeted BOOL
date= time.strftime('%Y-%m-%d', time.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y'))
keys = [tweet['id'], tweet['user']['id'], tweet['text'], date, tweet['lang'], tweet['retweeted']]
try:
cursor = self.con.cursor()
insert = "INSERT INTO tweets(id, user_id, id_str, text, created_at, lang, retweeted) VALUES ('"+str(keys[0])+"','"+str(keys[1])+"','"+str(keys[0])+"','"+keys[2]+"','"+keys[3]+"','"+keys[4]+"','"+str(bool(keys[5]))+"')"
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_tweet: ", e
def insert_mention(self,tweet):
#user_id INT, target_id INT, day DATE, weight INT
replies = tweet['in_reply_to_user_id']
replies_screen_name = tweet['in_reply_to_screen_name']
date= time.strftime('%Y-%m-%d', time.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y'))
if replies:
keys = [tweet['user']['id'], replies, date, 1]
try:
cursor = self.con.cursor()
cursor.execute("SELECT * from interactions where (user_id = '"+str(tweet['user']['id'])+"' AND target_id = '"+str(replies)+"' AND day = '"+str(date)+"')")
node = cursor.fetchone()
if node:
total = node[3]+1
cursor.execute("UPDATE interactions set weight = '"+str(total)+"' WHERE (user_id = '"+str(tweet['user']['id'])+"' AND target_id = '"+str(replies)+"' AND day = '"+str(date)+"')")
else:
insert = "INSERT INTO interactions(user_id, target_id, day, weight) VALUES ('"+str(keys[0])+"','"+str(keys[1])+"','"+str(keys[2])+"','"+str(keys[3])+"') "
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_mention: ", e
try:
cursor = self.con.cursor()
select = "SELECT id from twitter_users WHERE id="+str(replies)+";"
print select
cursor.execute(select)
node = cursor.fetchone()
if node:
print node
else:
insert = "INSERT INTO twitter_users(id, screen_name, total_tweets) VALUES (" + str(replies) + ",'" + replies_screen_name + "', 1)"
cursor.execute(insert)
print "added"
################
except Exception, e:
print "DB Error - insert_mentionAA: ", e
def insert_language_group(self,tweet):
#lang TEXT, group_id TEXT, total INT
keys = [tweet['lang'], "ALDE", 1]
try:
cursor = self.con.cursor()
cursor.execute("SELECT total from language_group WHERE ( lang='"+tweet['lang']+"' AND group_id ='"+"ALDE"+"')")
node = cursor.fetchone()
if node:
total = node[0]+1
cursor.execute("UPDATE language_group set total = "+str(total)+" WHERE ( lang='"+tweet['lang']+"' AND group_id ='"+"ALDE"+"')")
else:
cursor.execute("INSERT INTO language_group(lang,group_id,total) VALUES ('"+keys[0]+"','"+keys[1]+"','"+str(keys[2])+"')")
except Exception, e:
print "DB Error - language_group: ", e
def insert_language_candidate(self,tweet):
#lang TEXT, candidate_id INT, total INT
keys = [tweet['lang'], 44101578, 1]
try:
cursor = self.con.cursor()
cursor.execute("SELECT total from language_candidate WHERE ( lang='"+tweet['lang']+"' AND candidate_id ='"+str(44101578)+"')")
node = cursor.fetchone()
if node:
total = node[0]+1
cursor.execute("UPDATE language_candidate set total = "+str(total)+" WHERE ( lang='"+tweet['lang']+"' AND candidate_id ='"+str(44101578)+"')")
else:
cursor.execute("INSERT INTO language_candidate(lang,candidate_id,total) VALUES ('"+keys[0]+"','"+str(keys[1])+"','"+str(keys[2])+"')")
except Exception, e:
print "DB Error - language_candidate: ", e
def insert_hash_country(self,tweet):
#text TEXT, country_id TEXT, day DATE, total INT
hashtags = tweet['entities']['hashtags']
date= time.strftime('%Y-%m-%d', time.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y'))
for h in hashtags:
hashtag = h['text']
keys = [hashtag, tweet['lang'], date, 1]
try:
cursor = self.con.cursor()
cursor.execute("SELECT text, total from hash_country WHERE ( text='"+hashtag+"' AND country_id = '"+tweet['lang']+"' AND day = '"+str(date)+"')")
node = cursor.fetchone()
if node:
total = node[1]+1
cursor.execute("UPDATE hash_country set total = "+str(total)+" WHERE ( text='"+hashtag+"' AND country_id = '"+tweet['lang']+"' AND day = '"+str(date)+"')")
else:
insert = "INSERT INTO hash_country(text, country_id, day, total) VALUES ('"+hashtag+"','"+tweet['lang']+"','"+str(date)+"','"+str(1)+"' )"
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_hash_country: ", e
def insert_hash_group(self,tweet):
#text TEXT, group_id TEXT, day DATE, total INT
hashtags = tweet['entities']['hashtags']
date= time.strftime('%Y-%m-%d', time.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y'))
for h in hashtags:
hashtag = h['text']<|fim▁hole|> node = cursor.fetchone()
if node:
total = node[1]+1
cursor.execute("UPDATE hash_group set total = "+str(total)+" WHERE ( text='"+hashtag+"' AND group_id = 'ALDE' AND day = '"+str(date)+"')")
else:
insert = "INSERT INTO hash_group(text, group_id, day, total) VALUES ('"+hashtag+"','"+"ALDE"+"','"+str(date)+"','"+str(1)+"' )"
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_hash_group: ", e
def insert_hash_candidate(self,tweet):
#text TEXT, candidate_id INT, day DATE, total INT
hashtags = tweet['entities']['hashtags']
date= time.strftime('%Y-%m-%d', time.strptime(tweet['created_at'],'%a %b %d %H:%M:%S +0000 %Y'))
for h in hashtags:
hashtag = h['text']
try:
cursor = self.con.cursor()
cursor.execute("SELECT text, total from hash_candidate WHERE ( text='"+hashtag+"' AND candidate_id = "+str(44101578)+" AND day = '"+str(date)+"')")
node = cursor.fetchone()
if node:
total = node[1]+1
cursor.execute("UPDATE hash_candidate set total = "+str(total)+" WHERE ( text='"+hashtag+"' AND candidate_id = "+str(44101578)+" AND day = '"+str(date)+"')")
else:
insert = "INSERT INTO hash_candidate(text, candidate_id, day, total) VALUES ('"+hashtag+"','"+str(44101578)+"','"+str(date)+"','"+str(1)+"' )"
cursor.execute(insert)
except Exception, e:
print "DB Error - insert_hash_group: ", e<|fim▁end|> | try:
cursor = self.con.cursor()
cursor.execute("SELECT text, total from hash_group WHERE ( text='"+hashtag+"' AND group_id = 'ALDE' AND day = '"+str(date)+"')") |
<|file_name|>dde-session-ui_gl_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="gl_ES" version="2.1">
<context>
<name>ContentWidget</name>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="244"/>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="290"/>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="378"/>
<source>Shut down</source>
<translation>Apagar</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="246"/>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="295"/>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="381"/>
<source>Reboot</source>
<translation>Reiniciar</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="291"/>
<source>Are you sure to shut down?</source>
<translation>Tes a certeza de querer apagar?</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="296"/>
<source>Are you sure to reboot?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="300"/>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="390"/>
<source>Log out</source>
<translation>Pechar a sesión</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="301"/>
<source>Are you sure to log out?</source>
<translation>Tes a certeza de querer pechar a sesión?</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="384"/>
<source>Suspend</source>
<translation>Suspender</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="387"/>
<source>Lock</source>
<translation>Bloquear</translation>
</message>
<message>
<location filename="../dde-shutdown/view/contentwidget.cpp" line="394"/>
<source>Switch user</source>
<translation>Cambiar de usuario</translation>
</message>
</context>
<context>
<name>DisplayModeProvider</name>
<message>
<location filename="../dde-osd/displaymodeprovider.cpp" line="158"/>
<source>Duplicate</source>
<translation>Duplicar</translation>
</message>
<message>
<location filename="../dde-osd/displaymodeprovider.cpp" line="160"/>
<source>Extend</source>
<translation>Extender</translation>
</message>
</context>
<context>
<name>InhibitWarnView</name>
<message>
<location filename="../dde-shutdown/view/inhibitwarnview.cpp" line="50"/>
<source>Cancel</source>
<translation>Cancelar</translation>
</message>
</context>
<context>
<name>KBLayoutIndicator</name>
<message>
<location filename="../dde-osd/kblayoutindicator.cpp" line="152"/>
<source>Add keyboard layout</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>LockManager</name>
<message>
<location filename="../dde-lock/lockmanager.cpp" line="154"/>
<source>Login</source>
<translation>Inicio de sesión</translation>
</message>
<message>
<location filename="../dde-lock/lockmanager.cpp" line="405"/>
<source>Please enter your password manually if fingerprint password timed out</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-lock/lockmanager.cpp" line="420"/>
<source>Wrong Password</source>
<translation>Contrasinal non válido</translation>
</message>
<message>
<location filename="../dde-lock/lockmanager.cpp" line="550"/>
<source>Enter your password to reboot</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-lock/lockmanager.cpp" line="555"/>
<source>Enter your password to shutdown</source>
<translation>Insire o teu contrasinal para apagar</translation>
</message>
</context>
<context>
<name>LoginManager</name>
<message>
<location filename="../lightdm-deepin-greeter/app/loginmanager.cpp" line="297"/>
<source>Login</source>
<translation>Inicio de sesión</translation>
</message>
<message>
<location filename="../lightdm-deepin-greeter/app/loginmanager.cpp" line="473"/>
<source>Please enter your password manually if fingerprint password timed out</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../lightdm-deepin-greeter/app/loginmanager.cpp" line="527"/>
<source>Wrong Password</source>
<translation>Contrasinal non válido</translation>
</message>
</context>
<context>
<name>MultiUsersWarningView</name>
<message>
<location filename="../dde-shutdown/view/multiuserswarningview.cpp" line="124"/>
<source>The above users still keep logged in and the data will be lost due to shutdown, are you sure to shut down? </source>
<translation>Estes usuarios aínda están conectados, ao apagar perderanse todos os seus datos, tes a certeza de querer continuar?</translation>
</message>
<message>
<location filename="../dde-shutdown/view/multiuserswarningview.cpp" line="130"/>
<source>The above users still keep logged in and the data will be lost due to reboot, are you sure to reboot? </source>
<translation>Estes usuarios aínda están conectados, ao reiniciar perderanse todos os seus datos, tes a certeza de querer continuar?</translation>
</message>
</context>
<context>
<name>ShutdownWidget</name>
<message>
<location filename="../widgets/shutdownwidget.cpp" line="45"/>
<source>Shut down</source>
<translation>Apagar</translation>
</message>
<message>
<location filename="../widgets/shutdownwidget.cpp" line="49"/>
<source>Reboot</source>
<translation>Reiniciar</translation>
</message>
<message>
<location filename="../widgets/shutdownwidget.cpp" line="53"/>
<source>Suspend</source>
<translation>Suspender</translation>
</message>
</context>
<context>
<name>SuspendDialog</name>
<message>
<location filename="../dde-suspend-dialog/suspenddialog.cpp" line="34"/>
<source>External monitor detected, suspend?</source>
<translation>Monitor externo detectado, suspender?</translation>
</message>
<message>
<location filename="../dde-suspend-dialog/suspenddialog.cpp" line="34"/>
<location filename="../dde-suspend-dialog/suspenddialog.cpp" line="55"/>
<source>%1s</source>
<translation>%1s</translation>
</message>
<message>
<location filename="../dde-suspend-dialog/suspenddialog.cpp" line="45"/>
<source>Cancel</source>
<translation>Cancelar</translation>
</message>
<message>
<location filename="../dde-suspend-dialog/suspenddialog.cpp" line="45"/>
<source>Suspend</source>
<translation>Suspender</translation>
</message>
</context>
<context>
<name>SystemMonitor</name>
<message>
<location filename="../dde-shutdown/view/systemmonitor.cpp" line="47"/>
<source>Start system monitor</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TimeWidget</name>
<message>
<location filename="../dde-lock/timewidget.cpp" line="76"/>
<source>hh:mm</source>
<translation> hh:mm </translation>
</message>
<message>
<location filename="../dde-lock/timewidget.cpp" line="77"/>
<source>yyyy-MM-dd dddd</source>
<translation>dddd dd-MM-yyyy</translation>
</message>
</context>
<context>
<name>UpdateContent</name>
<message>
<location filename="../dde-welcome/updatecontent.cpp" line="41"/>
<source>Welcome, system updated successfully</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-welcome/updatecontent.cpp" line="42"/>
<source>Current Edition:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-welcome/updatecontent.cpp" line="43"/>
<source>Enter</source>
<translation type="unfinished"/>
</message><|fim▁hole|> <name>WMChooser</name>
<message>
<location filename="../dde-wm-chooser/wmchooser.cpp" line="56"/>
<source>Effect Mode</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-wm-chooser/wmchooser.cpp" line="59"/>
<source>Common Mode</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-wm-chooser/wmchooser.cpp" line="63"/>
<source>Friendly Reminder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-wm-chooser/wmchooser.cpp" line="67"/>
<source>System has detected that you are using a virtual machine, which will affect the system performance and operation experience, for a smooth experience, it is recommended to select Common Mode</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../dde-wm-chooser/wmchooser.cpp" line="70"/>
<source>Effect Mode: You can smoothly and gorgeously experience. Common Mode: You can extremely rapidly experience</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WMStateProvider</name>
<message>
<location filename="../dde-osd/wmstateprovider.cpp" line="43"/>
<source>Window effect enabled</source>
<translation>Efecto da xanela activado</translation>
</message>
<message>
<location filename="../dde-osd/wmstateprovider.cpp" line="45"/>
<source>Window effect disabled</source>
<translation>Efecto da xanela desactivado</translation>
</message>
<message>
<location filename="../dde-osd/wmstateprovider.cpp" line="47"/>
<source>Failed to enable window effects</source>
<translation>Non foi posible activar os efectos das xanelas</translation>
</message>
</context>
<context>
<name>WarningDialog</name>
<message>
<location filename="../dde-warning-dialog/warningdialog.cpp" line="38"/>
<source>Kindly Reminder</source>
<translation>Recordatorio</translation>
</message>
<message>
<location filename="../dde-warning-dialog/warningdialog.cpp" line="39"/>
<source>This application can not run without window effect</source>
<translation>Esta aplicación non pode correr sen efecto de xanela</translation>
</message>
<message>
<location filename="../dde-warning-dialog/warningdialog.cpp" line="43"/>
<source>OK</source>
<translation>Aceptar</translation>
</message>
</context>
<context>
<name>Window</name>
<message>
<location filename="../dde-lowpower/window.cpp" line="39"/>
<source>Low battery, please plug in</source>
<translation>Batería baixa, por favor conecta á corrente</translation>
</message>
</context>
</TS><|fim▁end|> | </context>
<context> |
<|file_name|>test_cli20_rule.py<|end_file_name|><|fim▁begin|># Copyright 2015 Huawei Technologies India Pvt Ltd.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
from neutronclient.neutron.v2_0.qos import rule as qos_rule
from neutronclient.tests.unit import test_cli20
class CLITestV20QoSRuleJSON(test_cli20.CLITestV20Base):
non_admin_status_resources = ['bandwidth_limit_rule', 'dscp_marking_rule']
def setUp(self):
super(CLITestV20QoSRuleJSON, self).setUp()
def test_list_qos_rule_types(self):
# qos_rule_types.
resources = 'rule_types'
cmd_resources = 'qos_rule_types'
response_contents = [{'type': 'bandwidth_limit',
'type': 'dscp_marking'}]
cmd = qos_rule.ListQoSRuleTypes(test_cli20.MyApp(sys.stdout),<|fim▁hole|><|fim▁end|> | None)
self._test_list_resources(resources, cmd, True,
response_contents=response_contents,
cmd_resources=cmd_resources) |
<|file_name|>model.external.py<|end_file_name|><|fim▁begin|># The content of this file was generated using the Python profile of libCellML 0.2.0.
from enum import Enum
from math import *
__version__ = "0.3.0"
LIBCELLML_VERSION = "0.2.0"
STATE_COUNT = 3
VARIABLE_COUNT = 19
class VariableType(Enum):
VARIABLE_OF_INTEGRATION = 1
STATE = 2
CONSTANT = 3
COMPUTED_CONSTANT = 4
ALGEBRAIC = 5
EXTERNAL = 6
VOI_INFO = {"name": "time", "units": "millisecond", "component": "environment", "type": VariableType.VARIABLE_OF_INTEGRATION}
STATE_INFO = [
{"name": "m", "units": "dimensionless", "component": "sodium_channel_m_gate", "type": VariableType.STATE},
{"name": "h", "units": "dimensionless", "component": "sodium_channel_h_gate", "type": VariableType.STATE},
{"name": "n", "units": "dimensionless", "component": "potassium_channel_n_gate", "type": VariableType.STATE}
]
VARIABLE_INFO = [
{"name": "V", "units": "millivolt", "component": "membrane", "type": VariableType.EXTERNAL},
{"name": "g_L", "units": "milliS_per_cm2", "component": "leakage_current", "type": VariableType.CONSTANT},
{"name": "Cm", "units": "microF_per_cm2", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "E_R", "units": "millivolt", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "g_K", "units": "milliS_per_cm2", "component": "potassium_channel", "type": VariableType.CONSTANT},
{"name": "g_Na", "units": "milliS_per_cm2", "component": "sodium_channel", "type": VariableType.CONSTANT},
{"name": "i_Stim", "units": "microA_per_cm2", "component": "membrane", "type": VariableType.ALGEBRAIC},
{"name": "E_L", "units": "millivolt", "component": "leakage_current", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_L", "units": "microA_per_cm2", "component": "leakage_current", "type": VariableType.ALGEBRAIC},
{"name": "E_Na", "units": "millivolt", "component": "sodium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_Na", "units": "microA_per_cm2", "component": "sodium_channel", "type": VariableType.EXTERNAL},
{"name": "alpha_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "alpha_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "E_K", "units": "millivolt", "component": "potassium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_K", "units": "microA_per_cm2", "component": "potassium_channel", "type": VariableType.ALGEBRAIC},
{"name": "alpha_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.EXTERNAL},
{"name": "beta_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.ALGEBRAIC}
]
def leq_func(x, y):
return 1.0 if x <= y else 0.0
def geq_func(x, y):
return 1.0 if x >= y else 0.0
def and_func(x, y):
return 1.0 if bool(x) & bool(y) else 0.0
def create_states_array():
return [nan]*STATE_COUNT
def create_variables_array():
return [nan]*VARIABLE_COUNT
def initialise_variables(voi, states, variables, external_variable):
variables[1] = 0.3
variables[2] = 1.0
variables[3] = 0.0
variables[4] = 36.0
variables[5] = 120.0
states[0] = 0.05
states[1] = 0.6
states[2] = 0.325
variables[0] = external_variable(voi, states, variables, 0)
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
def compute_computed_constants(variables):
variables[7] = variables[3]-10.613
variables[9] = variables[3]-115.0
variables[15] = variables[3]+12.0
def compute_rates(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)<|fim▁hole|> variables[11] = 0.1*(variables[0]+25.0)/(exp((variables[0]+25.0)/10.0)-1.0)
variables[12] = 4.0*exp(variables[0]/18.0)
rates[0] = variables[11]*(1.0-states[0])-variables[12]*states[0]
variables[13] = 0.07*exp(variables[0]/20.0)
variables[14] = 1.0/(exp((variables[0]+30.0)/10.0)+1.0)
rates[1] = variables[13]*(1.0-states[1])-variables[14]*states[1]
variables[17] = external_variable(voi, states, variables, 17)
variables[18] = 0.125*exp(variables[0]/80.0)
rates[2] = variables[17]*(1.0-states[2])-variables[18]*states[2]
def compute_variables(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)
variables[6] = -20.0 if and_func(geq_func(voi, 10.0), leq_func(voi, 10.5)) else 0.0
variables[8] = variables[1]*(variables[0]-variables[7])
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
variables[16] = variables[4]*pow(states[2], 4.0)*(variables[0]-variables[15])<|fim▁end|> | |
<|file_name|>DateArrayType.js<|end_file_name|><|fim▁begin|>/*!
* Module dependencies.
*/
var util = require('util'),
moment = require('moment'),
super_ = require('../Type');
/**
* Date FieldType Constructor
* @extends Field
* @api public
*/
function datearray(list, path, options) {
this._nativeType = [Date];
this._defaultSize = 'medium';
this._underscoreMethods = ['format'];
this._properties = ['formatString'];
this.parseFormatString = options.parseFormat || 'YYYY-MM-DD';
this.formatString = (options.format === false) ? false : (options.format || 'Do MMM YYYY');
if (this.formatString && 'string' !== typeof this.formatString) {
throw new Error('FieldType.Date: options.format must be a string.');
}
datearray.super_.call(this, list, path, options);
}
/*!
* Inherit from Field
*/
util.inherits(datearray, super_);
/**
* Formats the field value
*
* @api public
*/
datearray.prototype.format = function(item, format) {
if (format || this.formatString) {
return item.get(this.path) ? moment(item.get(this.path)).format(format || this.formatString) : '';
} else {
return item.get(this.path) || '';
}
};
/**
* Checks that a valid array of dates has been provided in a data object
*
* An empty value clears the stored value and is considered valid
*
* @api public
*/
datearray.prototype.inputIsValid = function(data, required, item) {
var value = this.getValueFromData(data);
var parseFormatString = this.parseFormatString;
if ('string' === typeof value) {
if (!moment(value, parseFormatString).isValid()) {
return false;
}
value = [value];
}
if (required) {
if (value === undefined && item && item.get(this.path) && item.get(this.path).length) {
return true;
}
if (value === undefined || !Array.isArray(value)) {
return false;
}
if (Array.isArray(value) && !value.length) {
return false;
}
}
if (Array.isArray(value)) {
// filter out empty fields
value = value.filter(function(date) {
return date.trim() !== '';
});
// if there are no values left, and requried is true, return false
if (required && !value.length) {
return false;
}
// if any date in the array is invalid, return false
if (value.some(function (dateValue) { return !moment(dateValue, parseFormatString).isValid(); })) {
return false;
}
}
return (value === undefined || Array.isArray(value));
};
/**
* Updates the value for this field in the item from a data object
*
* @api public
*/
datearray.prototype.updateItem = function(item, data, callback) {
<|fim▁hole|> var value = this.getValueFromData(data);
if (value !== undefined) {
if (Array.isArray(value)) {
// Only save valid dates
value = value.filter(function(date) {
return moment(date).isValid();
});
}
if (value === null) {
value = [];
}
if ('string' === typeof value) {
if (moment(value).isValid()) {
value = [value];
}
}
if (Array.isArray(value)) {
item.set(this.path, value);
}
} else item.set(this.path, []);
process.nextTick(callback);
};
/*!
* Export class
*/
module.exports = datearray;<|fim▁end|> | |
<|file_name|>train_eval_online.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
# pylint: disable=line-too-long
r"""Run training loop.
"""
# pylint: enable=line-too-long
import os
import random
import time
from absl import app
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
from tf_agents.policies import random_tf_policy
from tf_agents.replay_buffers import tf_uniform_replay_buffer
from tf_agents.specs.tensor_spec import TensorSpec
import tqdm
from representation_batch_rl.batch_rl import asac
from representation_batch_rl.batch_rl import awr
from representation_batch_rl.batch_rl import ddpg
from representation_batch_rl.batch_rl import evaluation
from representation_batch_rl.batch_rl import pcl
from representation_batch_rl.batch_rl import sac
from representation_batch_rl.batch_rl import sac_v1
from representation_batch_rl.batch_rl.image_utils import image_aug
from representation_batch_rl.twin_sac import utils
FLAGS = flags.FLAGS
<|fim▁hole|>flags.DEFINE_float('actor_lr', 3e-4, 'Actor learning rate.')
flags.DEFINE_float('alpha_lr', 3e-4, 'Temperature learning rate.')
flags.DEFINE_float('critic_lr', 3e-4, 'Critic learning rate.')
flags.DEFINE_integer('deployment_batch_size', 1, 'Batch size.')
flags.DEFINE_integer('sample_batch_size', 256, 'Batch size.')
flags.DEFINE_float('discount', 0.99, 'Discount used for returns.')
flags.DEFINE_float('tau', 0.005,
'Soft update coefficient for the target network.')
flags.DEFINE_integer('max_timesteps', 200_000, 'Max timesteps to train.')
flags.DEFINE_integer('max_length_replay_buffer', 100_000,
'Max replay buffer size (image observations use 100k).')
flags.DEFINE_integer('num_random_actions', 10_000,
'Fill replay buffer with N random actions.')
flags.DEFINE_integer('start_training_timesteps', 10_000,
'Start training when replay buffer contains N timesteps.')
flags.DEFINE_string('save_dir', '/tmp/save/', 'Directory to save results to.')
flags.DEFINE_integer('log_interval', 1_000, 'Log every N timesteps.')
flags.DEFINE_integer('eval_interval', 10_000, 'Evaluate every N timesteps.')
flags.DEFINE_integer('action_repeat', 8,
'(optional) action repeat used when instantiating env.')
flags.DEFINE_integer('frame_stack', 0,
'(optional) frame stack used when instantiating env.')
flags.DEFINE_enum('algo_name', 'sac', [
'ddpg',
'crossnorm_ddpg',
'sac',
'pc_sac',
'pcl',
'crossnorm_sac',
'crr',
'awr',
'sac_v1',
'asac',
], 'Algorithm.')
flags.DEFINE_boolean('eager', False, 'Execute functions eagerly.')
def main(_):
if FLAGS.eager:
tf.config.experimental_run_functions_eagerly(FLAGS.eager)
tf.random.set_seed(FLAGS.seed)
np.random.seed(FLAGS.seed)
random.seed(FLAGS.seed)
action_repeat = FLAGS.action_repeat
_, _, domain_name, _ = FLAGS.env_name.split('-')
if domain_name in ['cartpole']:
FLAGS.set_default('action_repeat', 8)
elif domain_name in ['reacher', 'cheetah', 'ball_in_cup', 'hopper']:
FLAGS.set_default('action_repeat', 4)
elif domain_name in ['finger', 'walker']:
FLAGS.set_default('action_repeat', 2)
FLAGS.set_default('max_timesteps', FLAGS.max_timesteps // FLAGS.action_repeat)
env = utils.load_env(
FLAGS.env_name, FLAGS.seed, action_repeat, FLAGS.frame_stack)
eval_env = utils.load_env(
FLAGS.env_name, FLAGS.seed, action_repeat, FLAGS.frame_stack)
is_image_obs = (isinstance(env.observation_spec(), TensorSpec) and
len(env.observation_spec().shape) == 3)
spec = (
env.observation_spec(),
env.action_spec(),
env.reward_spec(),
env.reward_spec(), # discount spec
env.observation_spec() # next observation spec
)
replay_buffer = tf_uniform_replay_buffer.TFUniformReplayBuffer(
spec, batch_size=1, max_length=FLAGS.max_length_replay_buffer)
@tf.function
def add_to_replay(state, action, reward, discount, next_states):
replay_buffer.add_batch((state, action, reward, discount, next_states))
hparam_str = utils.make_hparam_string(
FLAGS.xm_parameters, seed=FLAGS.seed, env_name=FLAGS.env_name,
algo_name=FLAGS.algo_name)
summary_writer = tf.summary.create_file_writer(
os.path.join(FLAGS.save_dir, 'tb', hparam_str))
results_writer = tf.summary.create_file_writer(
os.path.join(FLAGS.save_dir, 'results', hparam_str))
if 'ddpg' in FLAGS.algo_name:
model = ddpg.DDPG(
env.observation_spec(),
env.action_spec(),
cross_norm='crossnorm' in FLAGS.algo_name)
elif 'crr' in FLAGS.algo_name:
model = awr.AWR(
env.observation_spec(),
env.action_spec(), f='bin_max')
elif 'awr' in FLAGS.algo_name:
model = awr.AWR(
env.observation_spec(),
env.action_spec(), f='exp_mean')
elif 'sac_v1' in FLAGS.algo_name:
model = sac_v1.SAC(
env.observation_spec(),
env.action_spec(),
target_entropy=-env.action_spec().shape[0])
elif 'asac' in FLAGS.algo_name:
model = asac.ASAC(
env.observation_spec(),
env.action_spec(),
target_entropy=-env.action_spec().shape[0])
elif 'sac' in FLAGS.algo_name:
model = sac.SAC(
env.observation_spec(),
env.action_spec(),
target_entropy=-env.action_spec().shape[0],
cross_norm='crossnorm' in FLAGS.algo_name,
pcl_actor_update='pc' in FLAGS.algo_name)
elif 'pcl' in FLAGS.algo_name:
model = pcl.PCL(
env.observation_spec(),
env.action_spec(),
target_entropy=-env.action_spec().shape[0])
initial_collect_policy = random_tf_policy.RandomTFPolicy(
env.time_step_spec(), env.action_spec())
dataset = replay_buffer.as_dataset(
num_parallel_calls=tf.data.AUTOTUNE,
sample_batch_size=FLAGS.sample_batch_size)
if is_image_obs:
# Augment images as in DRQ.
dataset = dataset.map(image_aug,
num_parallel_calls=tf.data.AUTOTUNE,
deterministic=False).prefetch(3)
else:
dataset = dataset.prefetch(3)
def repack(*data):
return data[0]
dataset = dataset.map(repack)
replay_buffer_iter = iter(dataset)
previous_time = time.time()
timestep = env.reset()
episode_return = 0
episode_timesteps = 0
step_mult = 1 if action_repeat < 1 else action_repeat
for i in tqdm.tqdm(range(FLAGS.max_timesteps)):
if i % FLAGS.deployment_batch_size == 0:
for _ in range(FLAGS.deployment_batch_size):
if timestep.is_last():
if episode_timesteps > 0:
current_time = time.time()
with summary_writer.as_default():
tf.summary.scalar(
'train/returns',
episode_return,
step=(i + 1) * step_mult)
tf.summary.scalar(
'train/FPS',
episode_timesteps / (current_time - previous_time),
step=(i + 1) * step_mult)
timestep = env.reset()
episode_return = 0
episode_timesteps = 0
previous_time = time.time()
if (replay_buffer.num_frames() < FLAGS.num_random_actions or
replay_buffer.num_frames() < FLAGS.deployment_batch_size):
# Use policy only after the first deployment.
policy_step = initial_collect_policy.action(timestep)
action = policy_step.action
else:
action = model.actor(timestep.observation, sample=True)
next_timestep = env.step(action)
add_to_replay(timestep.observation, action, next_timestep.reward,
next_timestep.discount, next_timestep.observation)
episode_return += next_timestep.reward[0]
episode_timesteps += 1
timestep = next_timestep
if i + 1 >= FLAGS.start_training_timesteps:
with summary_writer.as_default():
info_dict = model.update_step(replay_buffer_iter)
if (i + 1) % FLAGS.log_interval == 0:
with summary_writer.as_default():
for k, v in info_dict.items():
tf.summary.scalar(f'training/{k}', v, step=(i + 1) * step_mult)
if (i + 1) % FLAGS.eval_interval == 0:
logging.info('Performing policy eval.')
average_returns, evaluation_timesteps = evaluation.evaluate(
eval_env, model)
with results_writer.as_default():
tf.summary.scalar(
'evaluation/returns', average_returns, step=(i + 1) * step_mult)
tf.summary.scalar(
'evaluation/length', evaluation_timesteps, step=(i+1) * step_mult)
logging.info('Eval at %d: ave returns=%f, ave episode length=%f',
(i + 1) * step_mult, average_returns, evaluation_timesteps)
if (i + 1) % FLAGS.eval_interval == 0:
model.save_weights(
os.path.join(FLAGS.save_dir, 'results',
FLAGS.env_name + '__' + str(i + 1)))
if __name__ == '__main__':
app.run(main)<|fim▁end|> | flags.DEFINE_string('env_name', 'pixels-dm-cartpole-swingup',
'Environment for training/evaluation.')
flags.DEFINE_integer('seed', 42, 'Fixed random seed for training.') |
<|file_name|>initialConditions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Tue Aug 25 13:08:19 2015
@author: jgimenez
"""
from PyQt4 import QtGui, QtCore
from initialConditions_ui import Ui_initialConditionsUI
import os
from utils import *
from PyFoam.RunDictionary.BoundaryDict import BoundaryDict
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from utils import types
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
unknowns = ['U','p','p_rgh','alpha','k','epsilon','omega','nut','nuTilda']
class initialConditionsUI(QtGui.QScrollArea, Ui_initialConditionsUI):
def __init__(self, parent=None, f=QtCore.Qt.WindowFlags()):
QtGui.QScrollArea.__init__(self, parent)
self.setupUi(self)
class initialConditionsWidget(initialConditionsUI):
def __init__(self,folder):
self.currentFolder = folder
initialConditionsUI.__init__(self)
[self.timedir,self.fields,currtime] = currentFields(self.currentFolder)
self.pushButton.setEnabled(False)
self.addTabs()
def addTabs(self,ipatch=None):
for itab in range(self.tabWidget.count()):
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
self.clearLayout(layout,0)
self.tabWidget.clear()
for ifield in self.fields:
if ifield not in unknowns:
continue
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout2 = QtGui.QHBoxLayout()
cb = QtGui.QComboBox()
cb.addItems(['uniform','nonuniform'])
layout2.addWidget(cb)
if types[ifield]=='scalar':
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout2.addWidget(ledit)
else:
for j in range(3):
ledit = QtGui.QLineEdit()
ledit.setValidator(QtGui.QDoubleValidator())
layout2.addWidget(ledit)
QtCore.QObject.connect(ledit, QtCore.SIGNAL(_fromUtf8("textEdited(QString)")), self.checkData)
layout.addLayout(layout2)
if ifield=='U':
qbutton = QtGui.QCheckBox()
qbutton.setText('Initialize from potential flow')
layout.addWidget(qbutton)
QtCore.QObject.connect(qbutton, QtCore.SIGNAL(_fromUtf8("stateChanged(int)")), self.onPotentialFlow)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
self.tabWidget.addTab(widget, ifield)
self.tabWidget.setTabText(self.tabWidget.count(),ifield)
def onPotentialFlow(self):
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
if ifield=='U':
print ifield
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
cb = self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0]
layout2 = layout.itemAt(0).layout()
for i in range(layout2.count()):
if isinstance(layout2.itemAt(i), QtGui.QWidgetItem):
layout2.itemAt(i).widget().setEnabled(not cb.isChecked())
<|fim▁hole|>
if isinstance(item, QtGui.QWidgetItem):
item.widget().close()
item.widget().deleteLater()
# or
# item.widget().setParent(None)
elif isinstance(item, QtGui.QSpacerItem):
None
# no need to do extra stuff
else:
self.clearLayout(item.layout(),0)
# remove the item from layout
layout.removeItem(item)
def setConditions(self):
runPotentialFlow = 0
for itab in range(self.tabWidget.count()):
ifield = self.tabWidget.tabText(itab)
layout = self.tabWidget.widget(itab).findChildren(QtGui.QVBoxLayout)[0]
filename = '%s/%s'%(self.timedir,ifield)
parsedData = ParsedParameterFile(filename,createZipped=False)
layout2 = layout.itemAt(0).layout()
if layout2.count()==2:
parsedData['internalField'] = '%s %s'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text())
else:
if ifield == 'U' and self.tabWidget.widget(itab).findChildren(QtGui.QCheckBox)[0].isChecked():
runPotentialFlow = 1
parsedData['internalField'] = '%s (%s %s %s)'%('uniform',0,0,0)
else:
parsedData['internalField'] = '%s (%s %s %s)'%(layout2.itemAt(0).widget().currentText(),layout2.itemAt(1).widget().text(),layout2.itemAt(2).widget().text(),layout2.itemAt(3).widget().text())
parsedData.writeFile()
self.pushButton.setEnabled(False)
if runPotentialFlow:
QtGui.QMessageBox.about(self, "ERROR", 'Debe simularse con potentialFoam, hacer!!')
return
def checkData(self):
ready = True
for itab in range(self.tabWidget.count()):
edits = self.tabWidget.widget(itab).findChildren(QtGui.QLineEdit)
for E in edits:
if E.isEnabled():
if not E.text():
ready = False
if ready:
self.pushButton.setEnabled(True)
else:
self.pushButton.setEnabled(False)<|fim▁end|> | def clearLayout(self, layout, dejar):
for i in reversed(range(layout.count())):
if i>= dejar:
item = layout.itemAt(i) |
<|file_name|>RegisterBankEmitter.cpp<|end_file_name|><|fim▁begin|>//===- RegisterBankEmitter.cpp - Generate a Register Bank Desc. -*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This tablegen backend is responsible for emitting a description of a target
// register bank for a code generator.
//
//===----------------------------------------------------------------------===//
#include "llvm/ADT/BitVector.h"
#include "llvm/Support/Debug.h"
#include "llvm/TableGen/Error.h"
#include "llvm/TableGen/Record.h"
#include "llvm/TableGen/TableGenBackend.h"
#include "CodeGenHwModes.h"
#include "CodeGenRegisters.h"
#include "CodeGenTarget.h"
#define DEBUG_TYPE "register-bank-emitter"
using namespace llvm;
namespace {
class RegisterBank {
/// A vector of register classes that are included in the register bank.
typedef std::vector<const CodeGenRegisterClass *> RegisterClassesTy;
private:
const Record &TheDef;
/// The register classes that are covered by the register bank.
RegisterClassesTy RCs;
/// The register class with the largest register size.
const CodeGenRegisterClass *RCWithLargestRegsSize;
public:
RegisterBank(const Record &TheDef)
: TheDef(TheDef), RCs(), RCWithLargestRegsSize(nullptr) {}
/// Get the human-readable name for the bank.
StringRef getName() const { return TheDef.getValueAsString("Name"); }
/// Get the name of the enumerator in the ID enumeration.
std::string getEnumeratorName() const { return (TheDef.getName() + "ID").str(); }
/// Get the name of the array holding the register class coverage data;
std::string getCoverageArrayName() const {
return (TheDef.getName() + "CoverageData").str();
}
/// Get the name of the global instance variable.
StringRef getInstanceVarName() const { return TheDef.getName(); }
const Record &getDef() const { return TheDef; }
/// Get the register classes listed in the RegisterBank.RegisterClasses field.
std::vector<const CodeGenRegisterClass *>
getExplicitlySpecifiedRegisterClasses(
const CodeGenRegBank &RegisterClassHierarchy) const {
std::vector<const CodeGenRegisterClass *> RCs;
for (const auto *RCDef : getDef().getValueAsListOfDefs("RegisterClasses"))
RCs.push_back(RegisterClassHierarchy.getRegClass(RCDef));
return RCs;
}
/// Add a register class to the bank without duplicates.
void addRegisterClass(const CodeGenRegisterClass *RC) {
if (llvm::is_contained(RCs, RC))
return;
// FIXME? We really want the register size rather than the spill size
// since the spill size may be bigger on some targets with
// limited load/store instructions. However, we don't store the
// register size anywhere (we could sum the sizes of the subregisters
// but there may be additional bits too) and we can't derive it from
// the VT's reliably due to Untyped.
if (RCWithLargestRegsSize == nullptr)
RCWithLargestRegsSize = RC;
else if (RCWithLargestRegsSize->RSI.get(DefaultMode).SpillSize <
RC->RSI.get(DefaultMode).SpillSize)
RCWithLargestRegsSize = RC;
assert(RCWithLargestRegsSize && "RC was nullptr?");
RCs.emplace_back(RC);
}
const CodeGenRegisterClass *getRCWithLargestRegsSize() const {
return RCWithLargestRegsSize;
}
iterator_range<typename RegisterClassesTy::const_iterator>
register_classes() const {
return llvm::make_range(RCs.begin(), RCs.end());
}
};
class RegisterBankEmitter {
private:
CodeGenTarget Target;
RecordKeeper &Records;
void emitHeader(raw_ostream &OS, const StringRef TargetName,
const std::vector<RegisterBank> &Banks);
void emitBaseClassDefinition(raw_ostream &OS, const StringRef TargetName,
const std::vector<RegisterBank> &Banks);
void emitBaseClassImplementation(raw_ostream &OS, const StringRef TargetName,
std::vector<RegisterBank> &Banks);
public:
RegisterBankEmitter(RecordKeeper &R) : Target(R), Records(R) {}
void run(raw_ostream &OS);
};
} // end anonymous namespace
/// Emit code to declare the ID enumeration and external global instance
/// variables.
void RegisterBankEmitter::emitHeader(raw_ostream &OS,
const StringRef TargetName,
const std::vector<RegisterBank> &Banks) {
// <Target>RegisterBankInfo.h
OS << "namespace llvm {\n"
<< "namespace " << TargetName << " {\n"
<< "enum : unsigned {\n";
OS << " InvalidRegBankID = ~0u,\n";
unsigned ID = 0;
for (const auto &Bank : Banks)
OS << " " << Bank.getEnumeratorName() << " = " << ID++ << ",\n";
OS << " NumRegisterBanks,\n"
<< "};\n"
<< "} // end namespace " << TargetName << "\n"
<< "} // end namespace llvm\n";
}
/// Emit declarations of the <Target>GenRegisterBankInfo class.
void RegisterBankEmitter::emitBaseClassDefinition(
raw_ostream &OS, const StringRef TargetName,
const std::vector<RegisterBank> &Banks) {
OS << "private:\n"
<< " static RegisterBank *RegBanks[];\n\n"
<< "protected:\n"
<< " " << TargetName << "GenRegisterBankInfo();\n"
<< "\n";
}
/// Visit each register class belonging to the given register bank.
///
/// A class belongs to the bank iff any of these apply:
/// * It is explicitly specified
/// * It is a subclass of a class that is a member.
/// * It is a class containing subregisters of the registers of a class that
/// is a member. This is known as a subreg-class.
///
/// This function must be called for each explicitly specified register class.
///
/// \param RC The register class to search.
/// \param Kind A debug string containing the path the visitor took to reach RC.
/// \param VisitFn The action to take for each class visited. It may be called
/// multiple times for a given class if there are multiple paths
/// to the class.
static void visitRegisterBankClasses(
const CodeGenRegBank &RegisterClassHierarchy,
const CodeGenRegisterClass *RC, const Twine &Kind,
std::function<void(const CodeGenRegisterClass *, StringRef)> VisitFn,
SmallPtrSetImpl<const CodeGenRegisterClass *> &VisitedRCs) {
<|fim▁hole|> // Make sure we only visit each class once to avoid infinite loops.
if (VisitedRCs.count(RC))
return;
VisitedRCs.insert(RC);
// Visit each explicitly named class.
VisitFn(RC, Kind.str());
for (const auto &PossibleSubclass : RegisterClassHierarchy.getRegClasses()) {
std::string TmpKind =
(Kind + " (" + PossibleSubclass.getName() + ")").str();
// Visit each subclass of an explicitly named class.
if (RC != &PossibleSubclass && RC->hasSubClass(&PossibleSubclass))
visitRegisterBankClasses(RegisterClassHierarchy, &PossibleSubclass,
TmpKind + " " + RC->getName() + " subclass",
VisitFn, VisitedRCs);
// Visit each class that contains only subregisters of RC with a common
// subregister-index.
//
// More precisely, PossibleSubclass is a subreg-class iff Reg:SubIdx is in
// PossibleSubclass for all registers Reg from RC using any
// subregister-index SubReg
for (const auto &SubIdx : RegisterClassHierarchy.getSubRegIndices()) {
BitVector BV(RegisterClassHierarchy.getRegClasses().size());
PossibleSubclass.getSuperRegClasses(&SubIdx, BV);
if (BV.test(RC->EnumValue)) {
std::string TmpKind2 = (Twine(TmpKind) + " " + RC->getName() +
" class-with-subregs: " + RC->getName())
.str();
VisitFn(&PossibleSubclass, TmpKind2);
}
}
}
}
void RegisterBankEmitter::emitBaseClassImplementation(
raw_ostream &OS, StringRef TargetName,
std::vector<RegisterBank> &Banks) {
const CodeGenRegBank &RegisterClassHierarchy = Target.getRegBank();
OS << "namespace llvm {\n"
<< "namespace " << TargetName << " {\n";
for (const auto &Bank : Banks) {
std::vector<std::vector<const CodeGenRegisterClass *>> RCsGroupedByWord(
(RegisterClassHierarchy.getRegClasses().size() + 31) / 32);
for (const auto &RC : Bank.register_classes())
RCsGroupedByWord[RC->EnumValue / 32].push_back(RC);
OS << "const uint32_t " << Bank.getCoverageArrayName() << "[] = {\n";
unsigned LowestIdxInWord = 0;
for (const auto &RCs : RCsGroupedByWord) {
OS << " // " << LowestIdxInWord << "-" << (LowestIdxInWord + 31) << "\n";
for (const auto &RC : RCs) {
std::string QualifiedRegClassID =
(Twine(RC->Namespace) + "::" + RC->getName() + "RegClassID").str();
OS << " (1u << (" << QualifiedRegClassID << " - "
<< LowestIdxInWord << ")) |\n";
}
OS << " 0,\n";
LowestIdxInWord += 32;
}
OS << "};\n";
}
OS << "\n";
for (const auto &Bank : Banks) {
std::string QualifiedBankID =
(TargetName + "::" + Bank.getEnumeratorName()).str();
const CodeGenRegisterClass &RC = *Bank.getRCWithLargestRegsSize();
unsigned Size = RC.RSI.get(DefaultMode).SpillSize;
OS << "RegisterBank " << Bank.getInstanceVarName() << "(/* ID */ "
<< QualifiedBankID << ", /* Name */ \"" << Bank.getName()
<< "\", /* Size */ " << Size << ", "
<< "/* CoveredRegClasses */ " << Bank.getCoverageArrayName()
<< ", /* NumRegClasses */ "
<< RegisterClassHierarchy.getRegClasses().size() << ");\n";
}
OS << "} // end namespace " << TargetName << "\n"
<< "\n";
OS << "RegisterBank *" << TargetName
<< "GenRegisterBankInfo::RegBanks[] = {\n";
for (const auto &Bank : Banks)
OS << " &" << TargetName << "::" << Bank.getInstanceVarName() << ",\n";
OS << "};\n\n";
OS << TargetName << "GenRegisterBankInfo::" << TargetName
<< "GenRegisterBankInfo()\n"
<< " : RegisterBankInfo(RegBanks, " << TargetName
<< "::NumRegisterBanks) {\n"
<< " // Assert that RegBank indices match their ID's\n"
<< "#ifndef NDEBUG\n"
<< " unsigned Index = 0;\n"
<< " for (const auto &RB : RegBanks)\n"
<< " assert(Index++ == RB->getID() && \"Index != ID\");\n"
<< "#endif // NDEBUG\n"
<< "}\n"
<< "} // end namespace llvm\n";
}
void RegisterBankEmitter::run(raw_ostream &OS) {
StringRef TargetName = Target.getName();
const CodeGenRegBank &RegisterClassHierarchy = Target.getRegBank();
Records.startTimer("Analyze records");
std::vector<RegisterBank> Banks;
for (const auto &V : Records.getAllDerivedDefinitions("RegisterBank")) {
SmallPtrSet<const CodeGenRegisterClass *, 8> VisitedRCs;
RegisterBank Bank(*V);
for (const CodeGenRegisterClass *RC :
Bank.getExplicitlySpecifiedRegisterClasses(RegisterClassHierarchy)) {
visitRegisterBankClasses(
RegisterClassHierarchy, RC, "explicit",
[&Bank](const CodeGenRegisterClass *RC, StringRef Kind) {
LLVM_DEBUG(dbgs()
<< "Added " << RC->getName() << "(" << Kind << ")\n");
Bank.addRegisterClass(RC);
},
VisitedRCs);
}
Banks.push_back(Bank);
}
// Warn about ambiguous MIR caused by register bank/class name clashes.
Records.startTimer("Warn ambiguous");
for (const auto &Class : RegisterClassHierarchy.getRegClasses()) {
for (const auto &Bank : Banks) {
if (Bank.getName().lower() == StringRef(Class.getName()).lower()) {
PrintWarning(Bank.getDef().getLoc(), "Register bank names should be "
"distinct from register classes "
"to avoid ambiguous MIR");
PrintNote(Bank.getDef().getLoc(), "RegisterBank was declared here");
PrintNote(Class.getDef()->getLoc(), "RegisterClass was declared here");
}
}
}
Records.startTimer("Emit output");
emitSourceFileHeader("Register Bank Source Fragments", OS);
OS << "#ifdef GET_REGBANK_DECLARATIONS\n"
<< "#undef GET_REGBANK_DECLARATIONS\n";
emitHeader(OS, TargetName, Banks);
OS << "#endif // GET_REGBANK_DECLARATIONS\n\n"
<< "#ifdef GET_TARGET_REGBANK_CLASS\n"
<< "#undef GET_TARGET_REGBANK_CLASS\n";
emitBaseClassDefinition(OS, TargetName, Banks);
OS << "#endif // GET_TARGET_REGBANK_CLASS\n\n"
<< "#ifdef GET_TARGET_REGBANK_IMPL\n"
<< "#undef GET_TARGET_REGBANK_IMPL\n";
emitBaseClassImplementation(OS, TargetName, Banks);
OS << "#endif // GET_TARGET_REGBANK_IMPL\n";
}
namespace llvm {
void EmitRegisterBank(RecordKeeper &RK, raw_ostream &OS) {
RegisterBankEmitter(RK).run(OS);
}
} // end namespace llvm<|fim▁end|> | |
<|file_name|>IncludeTest.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2013 Statoil ASA.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#define BOOST_TEST_MODULE ParserIntegrationTests
#include <boost/test/unit_test.hpp>
#include <boost/test/test_tools.hpp>
#include <boost/filesystem.hpp>
#include <ostream>
#include <fstream>
#include <opm/parser/eclipse/Deck/Deck.hpp>
#include <opm/parser/eclipse/Parser/Parser.hpp>
#include <opm/parser/eclipse/Parser/ParserRecord.hpp>
#include <opm/parser/eclipse/Parser/ParserEnums.hpp>
using namespace Opm;
using namespace boost::filesystem;
static void
createDeckWithInclude(path& datafile, std::string addEndKeyword)
{
path tmpdir = temp_directory_path();
path root = tmpdir / unique_path("%%%%-%%%%");
path absoluteInclude = root / "absolute.include";
path includePath1 = root / "include";
path includePath2 = root / "include2";
path pathIncludeFile = "path.file";
create_directories(root);
create_directories(includePath1);
create_directories(includePath2);
{
datafile = root / "TEST.DATA";
std::ofstream of(datafile.string().c_str());
of << "PATHS" << std::endl;
of << "PATH1 '" << includePath1.string() << "' /" << std::endl;
of << "PATH2 '" << includePath2.string() << "' /" << std::endl;
of << "/" << std::endl;
of << "INCLUDE" << std::endl;
of << " \'relative.include\' /" << std::endl;
of << std::endl;
of << "INCLUDE" << std::endl;
of << " \'" << absoluteInclude.string() << "\' /" << std::endl;
of << std::endl;
of << "INCLUDE" << std::endl;
of << " \'include/nested.include\' /" << std::endl;
of << std::endl;
<|fim▁hole|>
of << std::endl;
of << "INCLUDE" << std::endl;
of << " \'$PATH1/" << pathIncludeFile.string() << "\' /" << std::endl;
of << std::endl;
of << "INCLUDE" << std::endl;
of << " \'$PATH2/" << pathIncludeFile.string() << "\' /" << std::endl;
of.close();
}
{
path relativeInclude = root / "relative.include";
std::ofstream of(relativeInclude.string().c_str());
of << "START" << std::endl;
of << " 10 'FEB' 2012 /" << std::endl;
of.close();
}
{
std::ofstream of(absoluteInclude.string().c_str());
if (addEndKeyword.length() > 0) {
of << addEndKeyword << std::endl;
}
of << "DIMENS" << std::endl;
of << " 10 20 30 /" << std::endl;
of.close();
}
{
path nestedInclude = includePath1 / "nested.include";
path gridInclude = includePath1 / "grid.include";
std::ofstream of(nestedInclude.string().c_str());
of << "INCLUDE" << std::endl;
of << " \'$PATH1/grid.include\' /" << std::endl;
of.close();
std::ofstream of2(gridInclude.string().c_str());
of2 << "GRIDUNIT" << std::endl;
of2 << "/" << std::endl;
of2.close();
}
{
path fullPathToPathIncludeFile1 = includePath1 / pathIncludeFile;
std::ofstream of1(fullPathToPathIncludeFile1.string().c_str());
of1 << "TITLE" << std::endl;
of1 << "This is the title /" << std::endl;
of1.close();
path fullPathToPathIncludeFile2 = includePath2 / pathIncludeFile;
std::ofstream of2(fullPathToPathIncludeFile2.string().c_str());
of2 << "BOX" << std::endl;
of2 << " 1 2 3 4 5 6 /" << std::endl;
of2.close();
}
std::cout << datafile << std::endl;
}
BOOST_AUTO_TEST_CASE(parse_fileWithWWCTKeyword_deckReturned) {
path datafile;
Parser parser;
createDeckWithInclude (datafile, "");
auto deck = parser.parseFile(datafile.string());
BOOST_CHECK( deck.hasKeyword("START"));
BOOST_CHECK( deck.hasKeyword("DIMENS"));
BOOST_CHECK( deck.hasKeyword("GRIDUNIT"));
}
BOOST_AUTO_TEST_CASE(parse_fileWithENDINCKeyword_deckReturned) {
path datafile;
Parser parser;
createDeckWithInclude (datafile, "ENDINC");
auto deck = parser.parseFile(datafile.string());
BOOST_CHECK( deck.hasKeyword("START"));
BOOST_CHECK( !deck.hasKeyword("DIMENS"));
BOOST_CHECK( deck.hasKeyword("GRIDUNIT"));
}
BOOST_AUTO_TEST_CASE(parse_fileWithENDKeyword_deckReturned) {
path datafile;
Parser parser;
createDeckWithInclude (datafile, "END");
auto deck = parser.parseFile(datafile.string());
BOOST_CHECK( deck.hasKeyword("START"));
BOOST_CHECK( !deck.hasKeyword("DIMENS"));
BOOST_CHECK( !deck.hasKeyword("GRIDUNIT"));
}
BOOST_AUTO_TEST_CASE(parse_fileWithPathsKeyword_IncludeExtendsPath) {
path datafile;
Parser parser;
createDeckWithInclude (datafile, "");
auto deck = parser.parseFile(datafile.string());
BOOST_CHECK( deck.hasKeyword("TITLE"));
BOOST_CHECK( deck.hasKeyword("BOX"));
}<|fim▁end|> | |
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>import { PLATFORM } from 'aurelia-framework';
import { RouterConfiguration, Router } from 'aurelia-router';
export class App {
appDate: Date = new Date();
message = 'Hello, welcome to Aurelia!';
router: Router;
configureRouter(config: RouterConfiguration, router: Router) {
this.router = router;
config
.map([
{
route: ['', '/home'],
name: 'home',
moduleId: PLATFORM.moduleName('./pages/home/home', 'home-page'),
nav: true,
settings: {
text: 'Home'
}
},
{
route: '/about',
name: 'about',<|fim▁hole|> moduleId: PLATFORM.moduleName('./pages/about/about', 'about-page'),
nav: true,
settings: {
text: 'About'
}
},
])
.mapUnknownRoutes({ redirect: 'home' })
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = function(el, state, container) {
var ul = el.getElementsByTagName('ul')[0]
var lastFlags = []
var controlsTouch = -1
var containerTouch = {"id":-1, "x":-1, "y":-1}
el.addEventListener('touchstart', startTouchControls)
el.addEventListener('touchmove', handleTouchControls)
el.addEventListener('touchend', unTouchControls)
container.addEventListener('touchstart', startTouchContainer)
container.addEventListener('touchmove', handleTouchContainer)
container.addEventListener('touchend', unTouchContainer)
function startTouchControls(event) {
if (controlsTouch === -1) {
controlsTouch = event.targetTouches[0].identifier
}
handleTouchControls(event)
}
function handleTouchControls(event) {
event.preventDefault()
var touch = null
if (event.targetTouches.length > 1) {
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === controlsTouch) {
touch = event.targetTouches[t]
break
}
}
} else {
touch = event.targetTouches[0]
}
if (touch === null) return
var top=touch.clientY-el.offsetTop
var left=touch.clientX-el.offsetLeft
var flags=[]
if (top < 50) flags.push('forward')
if (left < 50 && top < 100) flags.push('left')
if (left > 100 && top < 100) flags.push('right')
if (top > 100 && left > 50 && left < 100) flags.push('backward')
if (top > 50 && top < 100 && left > 50 && left < 100) flags.push('jump')
if (flags.indexOf('jump') === -1) {
for (flag in lastFlags) {
if (flags.indexOf(lastFlags[flag]) !== -1) {
lastFlags.splice(flag, 1)
}
}
setState(lastFlags, 0)
setState(flags, 1)
lastFlags = flags
} else if (lastFlags.indexOf('jump') === -1) {
// Start jumping (in additional to existing movement)
lastFlags.push('jump')
setState(['jump'], 1)
}
}
function unTouchControls() {
setState(lastFlags, 0)
lastFlags = []
controlsTouch = -1
}
function setState(states, value) {
var delta = {}
for(s in states) {
delta[states[s]] = value
}
state.write(delta)
}
function startTouchContainer(event) {
if (containerTouch.id === -1) {
containerTouch.id = event.targetTouches[0].identifier
containerTouch.x = event.targetTouches[0].clientX
containerTouch.y = event.targetTouches[0].clientY
}
handleTouchContainer(event)<|fim▁hole|> event.preventDefault()
var touch = null, x = y = -1, delta = {}
for (t in event.targetTouches) {
if (event.targetTouches[t].identifier === containerTouch.id) {
touch = event.targetTouches[t]
break
}
}
if (touch === null) return
dx = containerTouch.x - touch.clientX
dy = containerTouch.y - touch.clientY
delta.x_rotation_accum = dy * 2
delta.y_rotation_accum = dx * 8
state.write(delta)
containerTouch.x = touch.clientX
containerTouch.y = touch.clientY
}
function unTouchContainer(event) {
containerTouch = {"id":-1, "x":-1, "y":-1}
}
}<|fim▁end|> | }
function handleTouchContainer(event) { |
<|file_name|>login.module.spec.js<|end_file_name|><|fim▁begin|>'use strict';
describe('LoginController', function () {
// Load the parent app
beforeEach(module('demoSite'));
var $controller;
var $scope, controller, $window;
beforeEach(inject(function (_$controller_) {
$controller = _$controller_;
$scope = {};
$window = { location: {}, open: function () { } };
controller = $controller('LoginController', { $scope: $scope, $window: $window });
}));
describe('isTapIn variable', function () {
it('is true by default', function () {
expect($scope.isTapIn).toBe(true);
});
it('is true if that value is passed to initiateLogin', function () {
$scope.initiateLogin(true);
expect($scope.isTapIn).toBe(true);
});
it('is false if that value is passed to initiateLogin', function () {
$scope.initiateLogin(false);
expect($scope.isTapIn).toBe(false);
});
});
describe('popup creation', function () {
it('should pop up a new window when a new login is initiated', function () {
spyOn($window, 'open');
$scope.initiateLogin(true);
expect($window.open).toHaveBeenCalled();
})
})
describe('error message framework', function () {
it('should convert error codes to friendly messages', function () {
expect($scope.showError).toBe(false);
// Loop through each property in the errorMessages object and check that it is displayed properly.
for (var property in $scope.errorMessages) {
if ($scope.errorMessages.hasOwnProperty(property)) {
$scope.showErrorFromCode(property);
expect($scope.errorMessage).toBe($scope.errorMessages[property]);
expect($scope.showError).toBe(true);
}
}
});
it('should handle lack of connection to the server', function () {
expect($scope.showError).toBe(false);
$scope.handleGetURLError();
expect($scope.errorMessage).toBe($scope.errorMessages["no_connection"]);
expect($scope.showError).toBe(true);
});
it('should hide any errors when a new login is initiated', function () {
$scope.showError = true;
$scope.initiateLogin(true);
expect($scope.showError).toBe(false);
})
});
describe('polling framework', function () {
beforeEach(function () {
// Because the framework utilizes a popup, these variables are NOT inside the controller.
dataHasReturned = false;<|fim▁hole|> });
it('should handle manually closing of the popup window', function () {
$scope.popupWindow = window.open();
$scope.popupWindow.close();
$scope.pollPopupForCompletedAuth();
expect(dataHasReturned).toBe(false);
});
it('should present an error if one comes back from the server', function () {
dataHasReturned = true;
returnedData.error = "access_denied";
expect($scope.showError).toBe(false);
$scope.pollPopupForCompletedAuth();
expect($scope.showError).toBe(true);
expect(dataHasReturned).toBe(false);
});
it('should redirect the user to the auth page when proper data has returned', function () {
dataHasReturned = true;
returnedData = {
subject: "1111-2222-3333-4444",
username: "Test User",
email: "[email protected]",
details: "Tech+Details"
};
$scope.pollPopupForCompletedAuth();
expect($window.location.href).toBe('/#/auth');
});
})
});<|fim▁end|> | returnedData = new Object(); |
<|file_name|>application.py<|end_file_name|><|fim▁begin|>from flask import Flask
from os.path import expanduser
<|fim▁hole|>def create_app():
app = Flask(__name__)
app.config.from_pyfile(expanduser('~/.directory-tools.py'))
from directory_tools.frontend import frontend
app.register_blueprint(frontend)
return app<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
flaskbb.management.views
~~~~~~~~~~~~~~~~~~~~~~~~
This module handles the management views.
:copyright: (c) 2014 by the FlaskBB Team.
:license: BSD, see LICENSE for more details.
"""
import sys
from flask import (Blueprint, current_app, request, redirect, url_for, flash,
jsonify, __version__ as flask_version)
from flask_login import current_user, login_fresh
from flask_plugins import get_all_plugins, get_plugin, get_plugin_from_all
from flask_babelplus import gettext as _
from flask_allows import Permission, Not
from flaskbb import __version__ as flaskbb_version
from flaskbb._compat import iteritems
from flaskbb.forum.forms import UserSearchForm
from flaskbb.utils.settings import flaskbb_config
from flaskbb.utils.requirements import (IsAtleastModerator, IsAdmin,
CanBanUser, CanEditUser,
IsAtleastSuperModerator)
from flaskbb.extensions import db, allows
from flaskbb.utils.helpers import (render_template, time_diff, time_utcnow,
get_online_users)
from flaskbb.user.models import Guest, User, Group
from flaskbb.forum.models import Post, Topic, Forum, Category, Report
from flaskbb.management.models import Setting, SettingsGroup
from flaskbb.management.forms import (AddUserForm, EditUserForm, AddGroupForm,
EditGroupForm, EditForumForm,
AddForumForm, CategoryForm)
management = Blueprint("management", __name__)
@management.before_request
def check_fresh_login():
"""Checks if the login is fresh for the current user, otherwise the user
has to reauthenticate."""
if not login_fresh():
return current_app.login_manager.needs_refresh()
@management.route("/")
@allows.requires(IsAtleastModerator)
def overview():
# user and group stats
banned_users = User.query.filter(
Group.banned == True,
Group.id == User.primary_group_id
).count()
if not current_app.config["REDIS_ENABLED"]:
online_users = User.query.filter(User.lastseen >= time_diff()).count()
else:
online_users = len(get_online_users())
stats = {
# user stats
"all_users": User.query.count(),
"banned_users": banned_users,
"online_users": online_users,
"all_groups": Group.query.count(),
# forum stats
"report_count": Report.query.count(),
"topic_count": Topic.query.count(),
"post_count": Post.query.count(),
# misc stats
"plugins": get_all_plugins(),
"python_version": "%s.%s" % (sys.version_info[0], sys.version_info[1]),
"flask_version": flask_version,
"flaskbb_version": flaskbb_version
}
return render_template("management/overview.html", **stats)
@management.route("/settings", methods=["GET", "POST"])
@management.route("/settings/<path:slug>", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def settings(slug=None):
slug = slug if slug else "general"
# get the currently active group
active_group = SettingsGroup.query.filter_by(key=slug).first_or_404()
# get all groups - used to build the navigation
all_groups = SettingsGroup.query.all()
SettingsForm = Setting.get_form(active_group)
old_settings = Setting.get_settings(active_group)
new_settings = {}
form = SettingsForm()
if form.validate_on_submit():
for key, values in iteritems(old_settings):
try:
# check if the value has changed
if values['value'] == form[key].data:
continue
else:
new_settings[key] = form[key].data
except KeyError:
pass
Setting.update(settings=new_settings, app=current_app)
flash(_("Settings saved."), "success")
else:
for key, values in iteritems(old_settings):
try:
form[key].data = values['value']
except (KeyError, ValueError):
pass
return render_template("management/settings.html", form=form,
all_groups=all_groups, active_group=active_group)
# Users
@management.route("/users", methods=['GET', 'POST'])
@allows.requires(IsAtleastModerator)
def users():
page = request.args.get("page", 1, type=int)
search_form = UserSearchForm()
if search_form.validate():
users = search_form.get_results().\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/users.html", users=users,
search_form=search_form)
users = User.query. \
order_by(User.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/users.html", users=users,
search_form=search_form)
@management.route("/users/<int:user_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAtleastModerator)
def edit_user(user_id):
user = User.query.filter_by(id=user_id).first_or_404()
if not Permission(CanEditUser, identity=current_user):
flash(_("You are not allowed to edit this user."), "danger")
return redirect(url_for("management.users"))
member_group = db.and_(*[db.not_(getattr(Group, p)) for p in
['admin', 'mod', 'super_mod', 'banned', 'guest']])
filt = db.or_(
Group.id.in_(g.id for g in current_user.groups), member_group
)
if Permission(IsAtleastSuperModerator, identity=current_user):
filt = db.or_(filt, Group.mod)
if Permission(IsAdmin, identity=current_user):
filt = db.or_(filt, Group.admin, Group.super_mod)
if Permission(CanBanUser, identity=current_user):
filt = db.or_(filt, Group.banned)
group_query = Group.query.filter(filt)
form = EditUserForm(user)
form.primary_group.query = group_query
form.secondary_groups.query = group_query
if form.validate_on_submit():
form.populate_obj(user)
user.primary_group_id = form.primary_group.data.id
# Don't override the password
if form.password.data:
user.password = form.password.data
user.save(groups=form.secondary_groups.data)
flash(_("User updated."), "success")
return redirect(url_for("management.edit_user", user_id=user.id))
return render_template("management/user_form.html", form=form,
title=_("Edit User"))
@management.route("/users/delete", methods=["POST"])
@management.route("/users/<int:user_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_user(user_id=None):
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for user in User.query.filter(User.id.in_(ids)).all():
# do not delete current user
if current_user.id == user.id:
continue
if user.delete():
data.append({
"id": user.id,
"type": "delete",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} users deleted.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
if current_user.id == user.id:
flash(_("You cannot delete yourself.", "danger"))
return redirect(url_for("management.users"))
user.delete()
flash(_("User deleted."), "success")
return redirect(url_for("management.users"))
@management.route("/users/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)<|fim▁hole|> form = AddUserForm()
if form.validate_on_submit():
form.save()
flash(_("User added."), "success")
return redirect(url_for("management.users"))
return render_template("management/user_form.html", form=form,
title=_("Add User"))
@management.route("/users/banned", methods=["GET", "POST"])
@allows.requires(IsAtleastModerator)
def banned_users():
page = request.args.get("page", 1, type=int)
search_form = UserSearchForm()
users = User.query.filter(
Group.banned == True,
Group.id == User.primary_group_id
).paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
if search_form.validate():
users = search_form.get_results().\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/banned_users.html", users=users,
search_form=search_form)
return render_template("management/banned_users.html", users=users,
search_form=search_form)
@management.route("/users/ban", methods=["POST"])
@management.route("/users/<int:user_id>/ban", methods=["POST"])
@allows.requires(IsAtleastModerator)
def ban_user(user_id=None):
if not Permission(CanBanUser, identity=current_user):
flash(_("You do not have the permissions to ban this user."), "danger")
return redirect(url_for("management.overview"))
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
users = User.query.filter(User.id.in_(ids)).all()
for user in users:
# don't let a user ban himself and do not allow a moderator to ban
# a admin user
if (
current_user.id == user.id or
Permission(IsAdmin, identity=user) and
Permission(Not(IsAdmin), current_user)
):
continue
elif user.ban():
data.append({
"id": user.id,
"type": "ban",
"reverse": "unban",
"reverse_name": _("Unban"),
"reverse_url": url_for("management.unban_user",
user_id=user.id)
})
return jsonify(
message="{} users banned.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
# Do not allow moderators to ban admins
if Permission(IsAdmin, identity=user) and \
Permission(Not(IsAdmin), identity=current_user):
flash(_("A moderator cannot ban an admin user."), "danger")
return redirect(url_for("management.overview"))
if not current_user.id == user.id and user.ban():
flash(_("User is now banned."), "success")
else:
flash(_("Could not ban user."), "danger")
return redirect(url_for("management.banned_users"))
@management.route("/users/unban", methods=["POST"])
@management.route("/users/<int:user_id>/unban", methods=["POST"])
@allows.requires(IsAtleastModerator)
def unban_user(user_id=None):
if not Permission(CanBanUser, identity=current_user):
flash(_("You do not have the permissions to unban this user."),
"danger")
return redirect(url_for("management.overview"))
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for user in User.query.filter(User.id.in_(ids)).all():
if user.unban():
data.append({
"id": user.id,
"type": "unban",
"reverse": "ban",
"reverse_name": _("Ban"),
"reverse_url": url_for("management.ban_user",
user_id=user.id)
})
return jsonify(
message="{} users unbanned.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
if user.unban():
flash(_("User is now unbanned."), "success")
else:
flash(_("Could not unban user."), "danger")
return redirect(url_for("management.banned_users"))
# Reports
@management.route("/reports")
@allows.requires(IsAtleastModerator)
def reports():
page = request.args.get("page", 1, type=int)
reports = Report.query.\
order_by(Report.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/reports.html", reports=reports)
@management.route("/reports/unread")
@allows.requires(IsAtleastModerator)
def unread_reports():
page = request.args.get("page", 1, type=int)
reports = Report.query.\
filter(Report.zapped == None).\
order_by(Report.id.desc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/unread_reports.html", reports=reports)
@management.route("/reports/<int:report_id>/markread", methods=["POST"])
@management.route("/reports/markread", methods=["POST"])
@allows.requires(IsAtleastModerator)
def report_markread(report_id=None):
# AJAX request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for report in Report.query.filter(Report.id.in_(ids)).all():
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
data.append({
"id": report.id,
"type": "read",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} reports marked as read.".format(len(data)),
category="success",
data=data,
status=200
)
# mark single report as read
if report_id:
report = Report.query.filter_by(id=report_id).first_or_404()
if report.zapped:
flash(_("Report %(id)s is already marked as read.", id=report.id),
"success")
return redirect(url_for("management.reports"))
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
flash(_("Report %(id)s marked as read.", id=report.id), "success")
return redirect(url_for("management.reports"))
# mark all as read
reports = Report.query.filter(Report.zapped == None).all()
report_list = []
for report in reports:
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report_list.append(report)
db.session.add_all(report_list)
db.session.commit()
flash(_("All reports were marked as read."), "success")
return redirect(url_for("management.reports"))
# Groups
@management.route("/groups")
@allows.requires(IsAdmin)
def groups():
page = request.args.get("page", 1, type=int)
groups = Group.query.\
order_by(Group.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/groups.html", groups=groups)
@management.route("/groups/<int:group_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_group(group_id):
group = Group.query.filter_by(id=group_id).first_or_404()
form = EditGroupForm(group)
if form.validate_on_submit():
form.populate_obj(group)
group.save()
if group.guest:
Guest.invalidate_cache()
flash(_("Group updated."), "success")
return redirect(url_for("management.groups", group_id=group.id))
return render_template("management/group_form.html", form=form,
title=_("Edit Group"))
@management.route("/groups/<int:group_id>/delete", methods=["POST"])
@management.route("/groups/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_group(group_id=None):
if request.is_xhr:
ids = request.get_json()["ids"]
if not (set(ids) & set(["1", "2", "3", "4", "5"])):
data = []
for group in Group.query.filter(Group.id.in_(ids)).all():
group.delete()
data.append({
"id": group.id,
"type": "delete",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} groups deleted.".format(len(data)),
category="success",
data=data,
status=200
)
return jsonify(
message=_("You cannot delete one of the standard groups."),
category="danger",
data=None,
status=404
)
if group_id is not None:
if group_id <= 5: # there are 5 standard groups
flash(_("You cannot delete the standard groups. "
"Try renaming it instead.", "danger"))
return redirect(url_for("management.groups"))
group = Group.query.filter_by(id=group_id).first_or_404()
group.delete()
flash(_("Group deleted."), "success")
return redirect(url_for("management.groups"))
flash(_("No group chosen."), "danger")
return redirect(url_for("management.groups"))
@management.route("/groups/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_group():
form = AddGroupForm()
if form.validate_on_submit():
form.save()
flash(_("Group added."), "success")
return redirect(url_for("management.groups"))
return render_template("management/group_form.html", form=form,
title=_("Add Group"))
# Forums and Categories
@management.route("/forums")
@allows.requires(IsAdmin)
def forums():
categories = Category.query.order_by(Category.position.asc()).all()
return render_template("management/forums.html", categories=categories)
@management.route("/forums/<int:forum_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_forum(forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
form = EditForumForm(forum)
if form.validate_on_submit():
form.save()
flash(_("Forum updated."), "success")
return redirect(url_for("management.edit_forum", forum_id=forum.id))
else:
if forum.moderators:
form.moderators.data = ",".join([
user.username for user in forum.moderators
])
else:
form.moderators.data = None
return render_template("management/forum_form.html", form=form,
title=_("Edit Forum"))
@management.route("/forums/<int:forum_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_forum(forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
involved_users = User.query.filter(Topic.forum_id == forum.id,
Post.user_id == User.id).all()
forum.delete(involved_users)
flash(_("Forum deleted."), "success")
return redirect(url_for("management.forums"))
@management.route("/forums/add", methods=["GET", "POST"])
@management.route("/forums/<int:category_id>/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_forum(category_id=None):
form = AddForumForm()
if form.validate_on_submit():
form.save()
flash(_("Forum added."), "success")
return redirect(url_for("management.forums"))
else:
form.groups.data = Group.query.order_by(Group.id.asc()).all()
if category_id:
category = Category.query.filter_by(id=category_id).first()
form.category.data = category
return render_template("management/forum_form.html", form=form,
title=_("Add Forum"))
@management.route("/category/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_category():
form = CategoryForm()
if form.validate_on_submit():
form.save()
flash(_("Category added."), "success")
return redirect(url_for("management.forums"))
return render_template("management/category_form.html", form=form,
title=_("Add Category"))
@management.route("/category/<int:category_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_category(category_id):
category = Category.query.filter_by(id=category_id).first_or_404()
form = CategoryForm(obj=category)
if form.validate_on_submit():
form.populate_obj(category)
flash(_("Category updated."), "success")
category.save()
return render_template("management/category_form.html", form=form,
title=_("Edit Category"))
@management.route("/category/<int:category_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_category(category_id):
category = Category.query.filter_by(id=category_id).first_or_404()
involved_users = User.query.filter(Forum.category_id == category.id,
Topic.forum_id == Forum.id,
Post.user_id == User.id).all()
category.delete(involved_users)
flash(_("Category with all associated forums deleted."), "success")
return redirect(url_for("management.forums"))
# Plugins
@management.route("/plugins")
@allows.requires(IsAdmin)
def plugins():
plugins = get_all_plugins()
return render_template("management/plugins.html", plugins=plugins)
@management.route("/plugins/<path:plugin>/enable", methods=["POST"])
@allows.requires(IsAdmin)
def enable_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.enabled:
flash(_("Plugin %(plugin)s is already enabled.", plugin=plugin.name),
"info")
return redirect(url_for("management.plugins"))
try:
plugin.enable()
flash(_("Plugin %(plugin)s enabled. Please restart FlaskBB now.",
plugin=plugin.name), "success")
except OSError:
flash(_("It seems that FlaskBB does not have enough filesystem "
"permissions. Try removing the 'DISABLED' file by "
"yourself instead."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/disable", methods=["POST"])
@allows.requires(IsAdmin)
def disable_plugin(plugin):
try:
plugin = get_plugin(plugin)
except KeyError:
flash(_("Plugin %(plugin)s not found.", plugin=plugin.name), "danger")
return redirect(url_for("management.plugins"))
try:
plugin.disable()
flash(_("Plugin %(plugin)s disabled. Please restart FlaskBB now.",
plugin=plugin.name), "success")
except OSError:
flash(_("It seems that FlaskBB does not have enough filesystem "
"permissions. Try creating the 'DISABLED' file by "
"yourself instead."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/uninstall", methods=["POST"])
@allows.requires(IsAdmin)
def uninstall_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.uninstallable:
plugin.uninstall()
Setting.invalidate_cache()
flash(_("Plugin has been uninstalled."), "success")
else:
flash(_("Cannot uninstall plugin."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/install", methods=["POST"])
@allows.requires(IsAdmin)
def install_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.installable and not plugin.uninstallable:
plugin.install()
Setting.invalidate_cache()
flash(_("Plugin has been installed."), "success")
else:
flash(_("Cannot install plugin."), "danger")
return redirect(url_for("management.plugins"))<|fim▁end|> | def add_user(): |
<|file_name|>context.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Google LLC<|fim▁hole|> * You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export { createContext, useContext } from 'use-context-selector';
export const identity = (state) => state;<|fim▁end|> | *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>unique-panic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: panic
#![allow(unknown_features)]
#![feature(box_syntax)]
fn main() { box panic!(); }<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
# check.rs
Within the check phase of type check, we check each item one at a time
(bodies of function expressions are checked as part of the containing
function). Inference is used to supply types wherever they are
unknown.
By far the most complex case is checking the body of a function. This
can be broken down into several distinct phases:
- gather: creates type variables to represent the type of each local
variable and pattern binding.
- main: the main pass does the lion's share of the work: it
determines the types of all expressions, resolves
methods, checks for most invalid conditions, and so forth. In
some cases, where a type is unknown, it may create a type or region
variable and use that as the type of an expression.
In the process of checking, various constraints will be placed on
these type variables through the subtyping relationships requested
through the `demand` module. The `infer` module is in charge
of resolving those constraints.
- regionck: after main is complete, the regionck pass goes over all
types looking for regions and making sure that they did not escape
into places they are not in scope. This may also influence the
final assignments of the various region variables if there is some
flexibility.
- vtable: find and records the impls to use for each trait bound that
appears on a type parameter.
- writeback: writes the final types within a function body, replacing
type variables with their final inferred types. These final types
are written into the `tcx.node_types` table, which should *never* contain
any reference to a type variable.
## Intermediate types
While type checking a function, the intermediate types for the
expressions, blocks, and so forth contained within the function are
stored in `fcx.node_types` and `fcx.item_substs`. These types
may contain unresolved type variables. After type checking is
complete, the functions in the writeback module are used to take the
types from this table, resolve them, and then write them into their
permanent home in the type context `ccx.tcx`.
This means that during inferencing you should use `fcx.write_ty()`
and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
nodes within the function.
The types of top-level items, which never contain unbound type
variables, are stored directly into the `tcx` tables.
n.b.: A type variable is not the same thing as a type parameter. A
type variable is rather an "instance" of a type parameter: that is,
given a generic function `fn foo<T>(t: T)`: while checking the
function `foo`, the type `ty_param(0)` refers to the type `T`, which
is treated in abstract. When `foo()` is called, however, `T` will be
substituted for a fresh type variable `N`. This variable will
eventually be resolved to some concrete type (which might itself be
type parameter).
*/
pub use self::LvaluePreference::*;
pub use self::Expectation::*;
pub use self::compare_method::{compare_impl_method, compare_const_impl};
use self::TupleArgumentsFlag::*;
use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
use check::_match::pat_ctxt;
use fmt_macros::{Parser, Piece, Position};
use middle::astconv_util::{check_path_args, NO_TPS, NO_REGIONS};
use middle::def;
use middle::infer;
use middle::mem_categorization as mc;
use middle::mem_categorization::McResult;
use middle::pat_util::{self, pat_id_map};
use middle::privacy::{AllPublic, LastMod};
use middle::region::{self, CodeExtent};
use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
use middle::traits::{self, report_fulfillment_errors};
use middle::ty::{FnSig, GenericPredicates, TypeScheme};
use middle::ty::{Disr, ParamTy, ParameterEnvironment};
use middle::ty::{self, HasProjectionTypes, RegionEscape, ToPolyTraitRef, Ty};
use middle::ty::liberate_late_bound_regions;
use middle::ty::{MethodCall, MethodCallee, MethodMap, ObjectCastMap};
use middle::ty_fold::{TypeFolder, TypeFoldable};
use rscope::RegionScope;
use session::Session;
use {CrateCtxt, lookup_full_def, require_same_types};
use TypeAndSubsts;
use lint;
use util::common::{block_query, ErrorReported, indenter, loop_query};
use util::ppaux::{self, Repr};
use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
use util::lev_distance::lev_distance;
use std::cell::{Cell, Ref, RefCell};
use std::mem::replace;
use std::iter::repeat;
use std::slice;
use syntax::{self, abi, attr};
use syntax::attr::AttrMetaMethods;
use syntax::ast::{self, DefId, Visibility};
use syntax::ast_util::{self, local_def};
use syntax::codemap::{self, Span};
use syntax::feature_gate;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::visit::{self, Visitor};
mod assoc;
pub mod dropck;
pub mod _match;
pub mod writeback;
pub mod regionck;
pub mod coercion;
pub mod demand;
pub mod method;
mod upvar;
pub mod wf;
mod cast;
mod closure;
mod callee;
mod compare_method;
mod op;
/// closures defined within the function. For example:
///
/// fn foo() {
/// bar(move|| { ... })
/// }
///
/// Here, the function `foo()` and the closure passed to
/// `bar()` will each have their own `FnCtxt`, but they will
/// share the inherited fields.
pub struct Inherited<'a, 'tcx: 'a> {
infcx: infer::InferCtxt<'a, 'tcx>,
locals: RefCell<NodeMap<Ty<'tcx>>>,
param_env: ty::ParameterEnvironment<'a, 'tcx>,
// Temporary tables:
node_types: RefCell<NodeMap<Ty<'tcx>>>,
item_substs: RefCell<NodeMap<ty::ItemSubsts<'tcx>>>,
adjustments: RefCell<NodeMap<ty::AutoAdjustment<'tcx>>>,
method_map: MethodMap<'tcx>,
upvar_capture_map: RefCell<ty::UpvarCaptureMap>,
closure_tys: RefCell<DefIdMap<ty::ClosureTy<'tcx>>>,
closure_kinds: RefCell<DefIdMap<ty::ClosureKind>>,
object_cast_map: ObjectCastMap<'tcx>,
// A mapping from each fn's id to its signature, with all bound
// regions replaced with free ones. Unlike the other tables, this
// one is never copied into the tcx: it is only used by regionck.
fn_sig_map: RefCell<NodeMap<Vec<Ty<'tcx>>>>,
// Tracks trait obligations incurred during this function body.
fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
// When we process a call like `c()` where `c` is a closure type,
// we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
// `FnOnce` closure. In that case, we defer full resolution of the
// call until upvar inference can kick in and make the
// decision. We keep these deferred resolutions grouped by the
// def-id of the closure, so that once we decide, we can easily go
// back and process them.
deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
}
trait DeferredCallResolution<'tcx> {
fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
}
type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
/// When type-checking an expression, we propagate downward
/// whatever type hint we are able in the form of an `Expectation`.
#[derive(Copy, Clone)]
pub enum Expectation<'tcx> {
/// We know nothing about what type this expression should have.
NoExpectation,
/// This expression should have the type given (or some subtype)
ExpectHasType(Ty<'tcx>),
/// This expression will be cast to the `Ty`
ExpectCastableToType(Ty<'tcx>),
/// This rvalue expression will be wrapped in `&` or `Box` and coerced
/// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
ExpectRvalueLikeUnsized(Ty<'tcx>),
}
impl<'tcx> Expectation<'tcx> {
// Disregard "castable to" expectations because they
// can lead us astray. Consider for example `if cond
// {22} else {c} as u8` -- if we propagate the
// "castable to u8" constraint to 22, it will pick the
// type 22u8, which is overly constrained (c might not
// be a u8). In effect, the problem is that the
// "castable to" expectation is not the tightest thing
// we can say, so we want to drop it in this case.
// The tightest thing we can say is "must unify with
// else branch". Note that in the case of a "has type"
// constraint, this limitation does not hold.
// If the expected type is just a type variable, then don't use
// an expected type. Otherwise, we might write parts of the type
// when checking the 'then' block which are incompatible with the
// 'else' branch.
fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
match *self {
ExpectHasType(ety) => {
let ety = fcx.infcx().shallow_resolve(ety);
if !ty::type_is_ty_var(ety) {
ExpectHasType(ety)
} else {
NoExpectation
}
}
ExpectRvalueLikeUnsized(ety) => {
ExpectRvalueLikeUnsized(ety)
}
_ => NoExpectation
}
}
}
#[derive(Copy, Clone)]
pub struct UnsafetyState {
pub def: ast::NodeId,
pub unsafety: ast::Unsafety,
from_fn: bool
}
impl UnsafetyState {
pub fn function(unsafety: ast::Unsafety, def: ast::NodeId) -> UnsafetyState {
UnsafetyState { def: def, unsafety: unsafety, from_fn: true }
}
pub fn recurse(&mut self, blk: &ast::Block) -> UnsafetyState {
match self.unsafety {
// If this unsafe, then if the outer function was already marked as
// unsafe we shouldn't attribute the unsafe'ness to the block. This
// way the block can be warned about instead of ignoring this
// extraneous block (functions are never warned about).
ast::Unsafety::Unsafe if self.from_fn => *self,
unsafety => {
let (unsafety, def) = match blk.rules {
ast::UnsafeBlock(..) => (ast::Unsafety::Unsafe, blk.id),
ast::DefaultBlock => (unsafety, self.def),
};
UnsafetyState{ def: def,
unsafety: unsafety,
from_fn: false }
}
}
}
}
#[derive(Clone)]
pub struct FnCtxt<'a, 'tcx: 'a> {
body_id: ast::NodeId,
// This flag is set to true if, during the writeback phase, we encounter
// a type error in this function.
writeback_errors: Cell<bool>,
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
// expects the types within the function to be consistent.
err_count_on_creation: usize,
ret_ty: ty::FnOutput<'tcx>,
ps: RefCell<UnsafetyState>,
inh: &'a Inherited<'a, 'tcx>,
ccx: &'a CrateCtxt<'a, 'tcx>,
}
impl<'a, 'tcx> mc::Typer<'tcx> for FnCtxt<'a, 'tcx> {
fn node_ty(&self, id: ast::NodeId) -> McResult<Ty<'tcx>> {
let ty = self.node_ty(id);
self.resolve_type_vars_or_error(&ty)
}
fn expr_ty_adjusted(&self, expr: &ast::Expr) -> McResult<Ty<'tcx>> {
let ty = self.adjust_expr_ty(expr, self.inh.adjustments.borrow().get(&expr.id));
self.resolve_type_vars_or_error(&ty)
}
fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool {
let ty = self.infcx().resolve_type_vars_if_possible(&ty);
!traits::type_known_to_meet_builtin_bound(self.infcx(), self, ty, ty::BoundCopy, span)
}
fn node_method_ty(&self, method_call: ty::MethodCall)
-> Option<Ty<'tcx>> {
self.inh.method_map.borrow()
.get(&method_call)
.map(|method| method.ty)
.map(|ty| self.infcx().resolve_type_vars_if_possible(&ty))
}
fn node_method_origin(&self, method_call: ty::MethodCall)
-> Option<ty::MethodOrigin<'tcx>>
{
self.inh.method_map.borrow()
.get(&method_call)
.map(|method| method.origin.clone())
}
fn adjustments(&self) -> &RefCell<NodeMap<ty::AutoAdjustment<'tcx>>> {
&self.inh.adjustments
}
fn is_method_call(&self, id: ast::NodeId) -> bool {
self.inh.method_map.borrow().contains_key(&ty::MethodCall::expr(id))
}
fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option<CodeExtent> {
self.param_env().temporary_scope(rvalue_id)
}
fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture> {
self.inh.upvar_capture_map.borrow().get(&upvar_id).cloned()
}
}
impl<'a, 'tcx> ty::ClosureTyper<'tcx> for FnCtxt<'a, 'tcx> {
fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> {
&self.inh.param_env
}
fn closure_kind(&self,
def_id: ast::DefId)
-> Option<ty::ClosureKind>
{
self.inh.closure_kinds.borrow().get(&def_id).cloned()
}
fn closure_type(&self,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>)
-> ty::ClosureTy<'tcx>
{
self.inh.closure_tys.borrow().get(&def_id).unwrap().subst(self.tcx(), substs)
}
fn closure_upvars(&self,
def_id: ast::DefId,
substs: &Substs<'tcx>)
-> Option<Vec<ty::ClosureUpvar<'tcx>>>
{
ty::closure_upvars(self, def_id, substs)
}
}
impl<'a, 'tcx> Inherited<'a, 'tcx> {
fn new(tcx: &'a ty::ctxt<'tcx>,
param_env: ty::ParameterEnvironment<'a, 'tcx>)
-> Inherited<'a, 'tcx> {
Inherited {
infcx: infer::new_infer_ctxt(tcx),
locals: RefCell::new(NodeMap()),
param_env: param_env,
node_types: RefCell::new(NodeMap()),
item_substs: RefCell::new(NodeMap()),
adjustments: RefCell::new(NodeMap()),
method_map: RefCell::new(FnvHashMap()),
object_cast_map: RefCell::new(NodeMap()),
upvar_capture_map: RefCell::new(FnvHashMap()),
closure_tys: RefCell::new(DefIdMap()),
closure_kinds: RefCell::new(DefIdMap()),
fn_sig_map: RefCell::new(NodeMap()),
fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
deferred_call_resolutions: RefCell::new(DefIdMap()),
deferred_cast_checks: RefCell::new(Vec::new()),
}
}
fn normalize_associated_types_in<T>(&self,
typer: &ty::ClosureTyper<'tcx>,
span: Span,
body_id: ast::NodeId,
value: &T)
-> T
where T : TypeFoldable<'tcx> + Clone + HasProjectionTypes + Repr<'tcx>
{
let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
assoc::normalize_associated_types_in(&self.infcx,
typer,
&mut *fulfillment_cx, span,
body_id,
value)
}
}
// Used by check_const and check_enum_variants
pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
inh: &'a Inherited<'a, 'tcx>,
rty: ty::FnOutput<'tcx>,
body_id: ast::NodeId)
-> FnCtxt<'a, 'tcx> {
FnCtxt {
body_id: body_id,
writeback_errors: Cell::new(false),
err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty,
ps: RefCell::new(UnsafetyState::function(ast::Unsafety::Normal, 0)),
inh: inh,
ccx: ccx
}
}
fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>)
-> Inherited<'a, 'tcx> {
// It's kind of a kludge to manufacture a fake function context
// and statement context, but we might as well do write the code only once
let param_env = ty::empty_parameter_environment(ccx.tcx);
Inherited::new(ccx.tcx, param_env)
}
struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &'tcx ast::Item) {
check_item_type(self.ccx, i);
visit::walk_item(self, i);
}
fn visit_ty(&mut self, t: &'tcx ast::Ty) {
match t.node {
ast::TyFixedLengthVec(_, ref expr) => {
check_const_in_type(self.ccx, &**expr, self.ccx.tcx.types.usize);
}
_ => {}
}
visit::walk_ty(self, t);
}
}
impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &'tcx ast::Item) {
check_item_body(self.ccx, i);
visit::walk_item(self, i);
}
}
pub fn check_item_types(ccx: &CrateCtxt) {
let krate = ccx.tcx.map.krate();
let mut visit = wf::CheckTypeWellFormedVisitor::new(ccx);
visit::walk_crate(&mut visit, krate);
// If types are not well-formed, it leads to all manner of errors
// downstream, so stop reporting errors at this point.
ccx.tcx.sess.abort_if_errors();
let mut visit = CheckItemTypesVisitor { ccx: ccx };
visit::walk_crate(&mut visit, krate);
ccx.tcx.sess.abort_if_errors();
let mut visit = CheckItemBodiesVisitor { ccx: ccx };
visit::walk_crate(&mut visit, krate);
ccx.tcx.sess.abort_if_errors();
for drop_method_did in ccx.tcx.destructors.borrow().iter() {
if drop_method_did.krate == ast::LOCAL_CRATE {
let drop_impl_did = ccx.tcx.map.get_parent_did(drop_method_did.node);
match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
Ok(()) => {}
Err(()) => {
assert!(ccx.tcx.sess.has_errors());
}
}
}
}
ccx.tcx.sess.abort_if_errors();
}
fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
decl: &'tcx ast::FnDecl,
body: &'tcx ast::Block,
fn_id: ast::NodeId,
fn_span: Span,
raw_fty: Ty<'tcx>,
param_env: ty::ParameterEnvironment<'a, 'tcx>)
{
match raw_fty.sty {
ty::ty_bare_fn(_, ref fn_ty) => {
let inh = Inherited::new(ccx.tcx, param_env);
// Compute the fty from point of view of inside fn.
let fn_sig =
fn_ty.sig.subst(ccx.tcx, &inh.param_env.free_substs);
let fn_sig =
liberate_late_bound_regions(ccx.tcx,
region::DestructionScopeData::new(body.id),
&fn_sig);
let fn_sig =
inh.normalize_associated_types_in(&inh.param_env, body.span, body.id, &fn_sig);
let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
decl, fn_id, body, &inh);
fcx.select_all_obligations_and_apply_defaults();
upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
fcx.select_all_obligations_or_error();
fcx.check_casts();
fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
writeback::resolve_type_vars_in_fn(&fcx, decl, body);
}
_ => ccx.tcx.sess.impossible_case(body.span,
"check_bare_fn: function type expected")
}
}
struct GatherLocalsVisitor<'a, 'tcx: 'a> {
fcx: &'a FnCtxt<'a, 'tcx>
}
impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
match ty_opt {
None => {
// infer the variable's type
let var_ty = self.fcx.infcx().next_ty_var();
self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
var_ty
}
Some(typ) => {
// take type that the user specified
self.fcx.inh.locals.borrow_mut().insert(nid, typ);
typ
}
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
// Add explicitly-declared locals.
fn visit_local(&mut self, local: &'tcx ast::Local) {
let o_ty = match local.ty {
Some(ref ty) => Some(self.fcx.to_ty(&**ty)),
None => None
};
self.assign(local.span, local.id, o_ty);
debug!("Local variable {} is assigned type {}",
self.fcx.pat_to_string(&*local.pat),
self.fcx.infcx().ty_to_string(
self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
visit::walk_local(self, local);
}
// Add pattern bindings.
fn visit_pat(&mut self, p: &'tcx ast::Pat) {
if let ast::PatIdent(_, ref path1, _) = p.node {
if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map, p) {
let var_ty = self.assign(p.span, p.id, None);
self.fcx.require_type_is_sized(var_ty, p.span,
traits::VariableType(p.id));
debug!("Pattern binding {} is assigned to {} with type {}",
token::get_ident(path1.node),
self.fcx.infcx().ty_to_string(
self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
var_ty.repr(self.fcx.tcx()));
}
}
visit::walk_pat(self, p);
}
fn visit_block(&mut self, b: &'tcx ast::Block) {
// non-obvious: the `blk` variable maps to region lb, so
// we have to keep this up-to-date. This
// is... unfortunate. It'd be nice to not need this.
visit::walk_block(self, b);
}
// Since an expr occurs as part of the type fixed size arrays we
// need to record the type for that node
fn visit_ty(&mut self, t: &'tcx ast::Ty) {
match t.node {
ast::TyFixedLengthVec(ref ty, ref count_expr) => {
self.visit_ty(&**ty);
check_expr_with_hint(self.fcx, &**count_expr, self.fcx.tcx().types.usize);
}
_ => visit::walk_ty(self, t)
}
}
// Don't descend into fns and items
fn visit_fn(&mut self, _: visit::FnKind<'tcx>, _: &'tcx ast::FnDecl,
_: &'tcx ast::Block, _: Span, _: ast::NodeId) { }
fn visit_item(&mut self, _: &ast::Item) { }
}
/// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
/// body and returns the function context used for that purpose, since in the case of a fn item
/// there is still a bit more to do.
///
/// * ...
/// * inherited: other fields inherited from the enclosing fn (if any)
fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
unsafety: ast::Unsafety,
unsafety_id: ast::NodeId,
fn_sig: &ty::FnSig<'tcx>,
decl: &'tcx ast::FnDecl,
fn_id: ast::NodeId,
body: &'tcx ast::Block,
inherited: &'a Inherited<'a, 'tcx>)
-> FnCtxt<'a, 'tcx>
{
let tcx = ccx.tcx;
let err_count_on_creation = tcx.sess.err_count();
let arg_tys = &fn_sig.inputs;
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})",
arg_tys.repr(tcx),
ret_ty.repr(tcx),
fn_id);
// Create the function context. This is either derived from scratch or,
// in the case of function expressions, based on the outer context.
let fcx = FnCtxt {
body_id: body.id,
writeback_errors: Cell::new(false),
err_count_on_creation: err_count_on_creation,
ret_ty: ret_ty,
ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
inh: inherited,
ccx: ccx
};
// Remember return type so that regionck can access it later.
let mut fn_sig_tys: Vec<Ty> =
arg_tys.iter()
.cloned()
.collect();
if let ty::FnConverging(ret_ty) = ret_ty {
fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
fn_sig_tys.push(ret_ty);
}
debug!("fn-sig-map: fn_id={} fn_sig_tys={}",
fn_id,
fn_sig_tys.repr(tcx));
inherited.fn_sig_map.borrow_mut().insert(fn_id, fn_sig_tys);
{
let mut visit = GatherLocalsVisitor { fcx: &fcx, };
// Add formal parameters.
for (arg_ty, input) in arg_tys.iter().zip(decl.inputs.iter()) {
// Create type variables for each argument.
pat_util::pat_bindings(
&tcx.def_map,
&*input.pat,
|_bm, pat_id, sp, _path| {
let var_ty = visit.assign(sp, pat_id, None);
fcx.require_type_is_sized(var_ty, sp,
traits::VariableType(pat_id));
});
// Check the pattern.
let pcx = pat_ctxt {
fcx: &fcx,
map: pat_id_map(&tcx.def_map, &*input.pat),
};
_match::check_pat(&pcx, &*input.pat, *arg_ty);
}
visit.visit_block(body);
}
check_block_with_expected(&fcx, body, match ret_ty {
ty::FnConverging(result_type) => ExpectHasType(result_type),
ty::FnDiverging => NoExpectation
});
for (input, arg) in decl.inputs.iter().zip(arg_tys.iter()) {
fcx.write_ty(input.id, *arg);
}
fcx
}
pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
let tcx = ccx.tcx;
check_representable(tcx, span, id, "struct");
check_instantiable(tcx, span, id);
if ty::lookup_simd(tcx, local_def(id)) {
check_simd(tcx, span, id);
}
}
pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) {
debug!("check_item_type(it.id={}, it.ident={})",
it.id,
ty::item_path_str(ccx.tcx, local_def(it.id)));
let _indenter = indenter();
match it.node {
// Consts can play a role in type-checking, so they are included here.
ast::ItemStatic(_, _, ref e) |
ast::ItemConst(_, ref e) => check_const(ccx, it.span, &**e, it.id),
ast::ItemEnum(ref enum_definition, _) => {
check_enum_variants(ccx,
it.span,
&enum_definition.variants,
it.id);
}
ast::ItemFn(_, _, _, _, _) => {} // entirely within check_item_body
ast::ItemImpl(_, _, _, _, _, ref impl_items) => {
debug!("ItemImpl {} with id {}", token::get_ident(it.ident), it.id);
match ty::impl_trait_ref(ccx.tcx, local_def(it.id)) {
Some(impl_trait_ref) => {
check_impl_items_against_trait(ccx,
it.span,
&impl_trait_ref,
impl_items);
}
None => { }
}
}
ast::ItemTrait(_, ref generics, _, _) => {
check_trait_on_unimplemented(ccx, generics, it);
}
ast::ItemStruct(..) => {
check_struct(ccx, it.id, it.span);
}
ast::ItemTy(ref t, ref generics) => {
let pty_ty = ty::node_id_to_type(ccx.tcx, it.id);
check_bounds_are_used(ccx, t.span, &generics.ty_params, pty_ty);
}
ast::ItemForeignMod(ref m) => {
if m.abi == abi::RustIntrinsic {
for item in &m.items {
check_intrinsic_type(ccx, &**item);
}
} else {
for item in &m.items {
let pty = ty::lookup_item_type(ccx.tcx, local_def(item.id));
if !pty.generics.types.is_empty() {
span_err!(ccx.tcx.sess, item.span, E0044,
"foreign items may not have type parameters");
}
if let ast::ForeignItemFn(ref fn_decl, _) = item.node {
if fn_decl.variadic && m.abi != abi::C {
span_err!(ccx.tcx.sess, item.span, E0045,
"variadic function must have C calling convention");
}
}
}
}
}
_ => {/* nothing to do */ }
}
}
pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx ast::Item) {
debug!("check_item_body(it.id={}, it.ident={})",
it.id,
ty::item_path_str(ccx.tcx, local_def(it.id)));
let _indenter = indenter();
match it.node {
ast::ItemFn(ref decl, _, _, _, ref body) => {
let fn_pty = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id));
let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
check_bare_fn(ccx, &**decl, &**body, it.id, it.span, fn_pty.ty, param_env);
}
ast::ItemImpl(_, _, _, _, _, ref impl_items) => {
debug!("ItemImpl {} with id {}", token::get_ident(it.ident), it.id);
let impl_pty = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id));
for impl_item in impl_items {
match impl_item.node {
ast::ConstImplItem(_, ref expr) => {
check_const(ccx, impl_item.span, &*expr, impl_item.id)
}
ast::MethodImplItem(ref sig, ref body) => {
check_method_body(ccx, &impl_pty.generics, sig, body,
impl_item.id, impl_item.span);
}
ast::TypeImplItem(_) |
ast::MacImplItem(_) => {
// Nothing to do here.
}
}
}
}
ast::ItemTrait(_, _, _, ref trait_items) => {
let trait_def = ty::lookup_trait_def(ccx.tcx, local_def(it.id));
for trait_item in trait_items {
match trait_item.node {
ast::ConstTraitItem(_, Some(ref expr)) => {
check_const(ccx, trait_item.span, &*expr, trait_item.id)
}
ast::MethodTraitItem(ref sig, Some(ref body)) => {
check_method_body(ccx, &trait_def.generics, sig, body,
trait_item.id, trait_item.span);
}
ast::ConstTraitItem(_, None) |
ast::MethodTraitItem(_, None) |
ast::TypeTraitItem(..) => {
// Nothing to do.
}
}
}
}
_ => {/* nothing to do */ }
}
}
fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
generics: &ast::Generics,
item: &ast::Item) {
if let Some(ref attr) = item.attrs.iter().find(|a| {
a.check_name("rustc_on_unimplemented")
}) {
if let Some(ref istring) = attr.value_str() {
let parser = Parser::new(&istring);
let types = &*generics.ty_params;
for token in parser {
match token {
Piece::String(_) => (), // Normal string, no need to check it
Piece::NextArgument(a) => match a.position {
// `{Self}` is allowed
Position::ArgumentNamed(s) if s == "Self" => (),
// So is `{A}` if A is a type parameter
Position::ArgumentNamed(s) => match types.iter().find(|t| {
t.ident.as_str() == s
}) {
Some(_) => (),
None => {
span_err!(ccx.tcx.sess, attr.span, E0230,
"there is no type parameter \
{} on trait {}",
s, item.ident.as_str());
}
},
// `{:1}` and `{}` are not to be used
Position::ArgumentIs(_) | Position::ArgumentNext => {
span_err!(ccx.tcx.sess, attr.span, E0231,
"only named substitution \
parameters are allowed");
}
}
}
}
} else {
span_err!(ccx.tcx.sess, attr.span, E0232,
"this attribute must have a value, \
eg `#[rustc_on_unimplemented = \"foo\"]`")
}
}
}
/// Type checks a method body.
///
/// # Parameters
///
/// * `item_generics`: generics defined on the impl/trait that contains
/// the method
/// * `self_bound`: bound for the `Self` type parameter, if any
/// * `method`: the method definition
fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
item_generics: &ty::Generics<'tcx>,
sig: &'tcx ast::MethodSig,
body: &'tcx ast::Block,
id: ast::NodeId, span: Span) {
debug!("check_method_body(item_generics={}, id={})",
item_generics.repr(ccx.tcx), id);
let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
let fty = ty::node_id_to_type(ccx.tcx, id);
debug!("check_method_body: fty={}", fty.repr(ccx.tcx));
check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
}
fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
impl_span: Span,
impl_trait_ref: &ty::TraitRef<'tcx>,
impl_items: &[P<ast::ImplItem>]) {
// Locate trait methods
let tcx = ccx.tcx;
let trait_items = ty::trait_items(tcx, impl_trait_ref.def_id);
// Check existing impl methods to see if they are both present in trait
// and compatible with trait signature
for impl_item in impl_items {
match impl_item.node {
ast::ConstImplItem(..) => {
let impl_const_def_id = local_def(impl_item.id);
let impl_const_ty = ty::impl_or_trait_item(ccx.tcx,
impl_const_def_id);
// Find associated const definition.
let opt_associated_const =
trait_items.iter()
.find(|ac| ac.name() == impl_const_ty.name());
match opt_associated_const {
Some(associated_const) => {
match (associated_const, &impl_const_ty) {
(&ty::ConstTraitItem(ref const_trait),
&ty::ConstTraitItem(ref const_impl)) => {
compare_const_impl(ccx.tcx,
&const_impl,
impl_item.span,
&const_trait,
&*impl_trait_ref);
}
_ => {
span_err!(tcx.sess, impl_item.span, E0323,
"item `{}` is an associated const, \
which doesn't match its trait `{}`",
token::get_name(impl_const_ty.name()),
impl_trait_ref.repr(tcx))
}
}
}
None => {
// This is `span_bug` as it should have already been
// caught in resolve.
tcx.sess.span_bug(
impl_item.span,
&format!(
"associated const `{}` is not a member of \
trait `{}`",
token::get_name(impl_const_ty.name()),
impl_trait_ref.repr(tcx)));
}
}
}
ast::MethodImplItem(_, ref body) => {
let impl_method_def_id = local_def(impl_item.id);
let impl_item_ty = ty::impl_or_trait_item(ccx.tcx,
impl_method_def_id);
// If this is an impl of a trait method, find the
// corresponding method definition in the trait.
let opt_trait_method_ty =
trait_items.iter()
.find(|ti| ti.name() == impl_item_ty.name());
match opt_trait_method_ty {
Some(trait_method_ty) => {
match (trait_method_ty, &impl_item_ty) {
(&ty::MethodTraitItem(ref trait_method_ty),
&ty::MethodTraitItem(ref impl_method_ty)) => {
compare_impl_method(ccx.tcx,
&**impl_method_ty,
impl_item.span,
body.id,
&**trait_method_ty,
&*impl_trait_ref);
}
_ => {
span_err!(tcx.sess, impl_item.span, E0324,
"item `{}` is an associated method, \
which doesn't match its trait `{}`",
token::get_name(impl_item_ty.name()),
impl_trait_ref.repr(tcx))
}
}
}
None => {
// This is span_bug as it should have already been
// caught in resolve.
tcx.sess.span_bug(
impl_item.span,
&format!("method `{}` is not a member of trait `{}`",
token::get_name(impl_item_ty.name()),
impl_trait_ref.repr(tcx)));
}
}
}
ast::TypeImplItem(_) => {
let typedef_def_id = local_def(impl_item.id);
let typedef_ty = ty::impl_or_trait_item(ccx.tcx,
typedef_def_id);
// If this is an impl of an associated type, find the
// corresponding type definition in the trait.
let opt_associated_type =
trait_items.iter()
.find(|ti| ti.name() == typedef_ty.name());
match opt_associated_type {
Some(associated_type) => {
match (associated_type, &typedef_ty) {
(&ty::TypeTraitItem(_), &ty::TypeTraitItem(_)) => {}
_ => {
span_err!(tcx.sess, impl_item.span, E0325,
"item `{}` is an associated type, \
which doesn't match its trait `{}`",
token::get_name(typedef_ty.name()),
impl_trait_ref.repr(tcx))
}
}
}
None => {
// This is `span_bug` as it should have already been
// caught in resolve.
tcx.sess.span_bug(
impl_item.span,
&format!(
"associated type `{}` is not a member of \
trait `{}`",
token::get_name(typedef_ty.name()),
impl_trait_ref.repr(tcx)));
}
}
}
ast::MacImplItem(_) => tcx.sess.span_bug(impl_item.span,
"unexpanded macro")
}
}
// Check for missing items from trait
let provided_methods = ty::provided_trait_methods(tcx, impl_trait_ref.def_id);
let associated_consts = ty::associated_consts(tcx, impl_trait_ref.def_id);
let mut missing_methods = Vec::new();
for trait_item in &*trait_items {
match *trait_item {
ty::ConstTraitItem(ref associated_const) => {
let is_implemented = impl_items.iter().any(|ii| {
match ii.node {
ast::ConstImplItem(..) => {
ii.ident.name == associated_const.name
}
_ => false,
}
});
let is_provided =
associated_consts.iter().any(|ac| ac.default.is_some() &&
ac.name == associated_const.name);
if !is_implemented && !is_provided {
missing_methods.push(format!("`{}`",
token::get_name(associated_const.name)));
}
}
ty::MethodTraitItem(ref trait_method) => {
let is_implemented =
impl_items.iter().any(|ii| {
match ii.node {
ast::MethodImplItem(..) => {
ii.ident.name == trait_method.name
}
_ => false,
}
});
let is_provided =
provided_methods.iter().any(|m| m.name == trait_method.name);
if !is_implemented && !is_provided {
missing_methods.push(format!("`{}`", token::get_name(trait_method.name)));
}
}
ty::TypeTraitItem(ref associated_type) => {
let is_implemented = impl_items.iter().any(|ii| {
match ii.node {
ast::TypeImplItem(_) => {
ii.ident.name == associated_type.name
}
_ => false,
}
});
if !is_implemented {
missing_methods.push(format!("`{}`", token::get_name(associated_type.name)));
}
}
}
}
if !missing_methods.is_empty() {
span_err!(tcx.sess, impl_span, E0046,
"not all trait items implemented, missing: {}",
missing_methods.connect(", "));
}
}
fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
t_span: Span,
e_span: Span,
t_cast: Ty<'tcx>,
t_expr: Ty<'tcx>,
id: ast::NodeId) {
let tstr = fcx.infcx().ty_to_string(t_cast);
fcx.type_error_message(span, |actual| {
format!("cast to unsized type: `{}` as `{}`", actual, tstr)
}, t_expr, None);
match t_expr.sty {
ty::ty_rptr(_, ty::mt { mutbl: mt, .. }) => {
let mtstr = match mt {
ast::MutMutable => "mut ",
ast::MutImmutable => ""
};
if ty::type_is_trait(t_cast) {
match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
Ok(s) => {
fcx.tcx().sess.span_suggestion(t_span,
"try casting to a reference instead:",
format!("&{}{}", mtstr, s));
},
Err(_) =>
span_help!(fcx.tcx().sess, t_span,
"did you mean `&{}{}`?", mtstr, tstr),
}
} else {
span_help!(fcx.tcx().sess, span,
"consider using an implicit coercion to `&{}{}` instead",
mtstr, tstr);
}
}
ty::ty_uniq(..) => {
match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
Ok(s) => {
fcx.tcx().sess.span_suggestion(t_span,
"try casting to a `Box` instead:",
format!("Box<{}>", s));
},
Err(_) =>
span_help!(fcx.tcx().sess, t_span, "did you mean `Box<{}>`?", tstr),
}
}
_ => {
span_help!(fcx.tcx().sess, e_span,
"consider using a box or reference as appropriate");
}
}
fcx.write_error(id);
}
impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
fn get_item_type_scheme(&self, _: Span, id: ast::DefId)
-> Result<ty::TypeScheme<'tcx>, ErrorReported>
{
Ok(ty::lookup_item_type(self.tcx(), id))
}
fn get_trait_def(&self, _: Span, id: ast::DefId)
-> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
{
Ok(ty::lookup_trait_def(self.tcx(), id))
}
fn ensure_super_predicates(&self, _: Span, _: ast::DefId) -> Result<(), ErrorReported> {
// all super predicates are ensured during collect pass
Ok(())
}
fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
Some(&self.inh.param_env.free_substs)
}
fn get_type_parameter_bounds(&self,
_: Span,
node_id: ast::NodeId)
-> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
{
let def = self.tcx().type_parameter_def(node_id);
let r = self.inh.param_env.caller_bounds
.iter()
.filter_map(|predicate| {
match *predicate {
ty::Predicate::Trait(ref data) => {
if data.0.self_ty().is_param(def.space, def.index) {
Some(data.to_poly_trait_ref())
} else {
None
}
}
_ => {
None
}
}
})
.collect();
Ok(r)
}
fn trait_defines_associated_type_named(&self,
trait_def_id: ast::DefId,
assoc_name: ast::Name)
-> bool
{
let trait_def = ty::lookup_trait_def(self.ccx.tcx, trait_def_id);
trait_def.associated_type_names.contains(&assoc_name)
}
fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
self.infcx().next_ty_var()
}
fn projected_ty_from_poly_trait_ref(&self,
span: Span,
poly_trait_ref: ty::PolyTraitRef<'tcx>,
item_name: ast::Name)
-> Ty<'tcx>
{
let (trait_ref, _) =
self.infcx().replace_late_bound_regions_with_fresh_var(
span,
infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
&poly_trait_ref);
self.normalize_associated_type(span, trait_ref, item_name)
}
fn projected_ty(&self,
span: Span,
trait_ref: ty::TraitRef<'tcx>,
item_name: ast::Name)
-> Ty<'tcx>
{
self.normalize_associated_type(span, trait_ref, item_name)
}
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
&self.inh.infcx
}
pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
&self.inh.param_env
}
pub fn sess(&self) -> &Session {
&self.tcx().sess
}
pub fn err_count_since_creation(&self) -> usize {
self.ccx.tcx.sess.err_count() - self.err_count_on_creation
}
/// Resolves type variables in `ty` if possible. Unlike the infcx
/// version, this version will also select obligations if it seems
/// useful, in an effort to get more type information.
fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
debug!("resolve_type_vars_if_possible(ty={})", ty.repr(self.tcx()));
// No ty::infer()? Nothing needs doing.
if !ty::type_has_ty_infer(ty) {
debug!("resolve_type_vars_if_possible: ty={}", ty.repr(self.tcx()));
return ty;
}
// If `ty` is a type variable, see whether we already know what it is.
ty = self.infcx().resolve_type_vars_if_possible(&ty);
if !ty::type_has_ty_infer(ty) {
debug!("resolve_type_vars_if_possible: ty={}", ty.repr(self.tcx()));
return ty;
}
// If not, try resolving any new fcx obligations that have cropped up.
self.select_new_obligations();
ty = self.infcx().resolve_type_vars_if_possible(&ty);
if !ty::type_has_ty_infer(ty) {
debug!("resolve_type_vars_if_possible: ty={}", ty.repr(self.tcx()));
return ty;
}
// If not, try resolving *all* pending obligations as much as
// possible. This can help substantially when there are
// indirect dependencies that don't seem worth tracking
// precisely.
self.select_obligations_where_possible();
ty = self.infcx().resolve_type_vars_if_possible(&ty);
debug!("resolve_type_vars_if_possible: ty={}", ty.repr(self.tcx()));
ty
}
/// Resolves all type variables in `t` and then, if any were left
/// unresolved, substitutes an error type. This is used after the
/// main checking when doing a second pass before writeback. The
/// justification is that writeback will produce an error for
/// these unconstrained type variables.
fn resolve_type_vars_or_error(&self, t: &Ty<'tcx>) -> mc::McResult<Ty<'tcx>> {
let t = self.infcx().resolve_type_vars_if_possible(t);
if ty::type_has_ty_infer(t) || ty::type_is_error(t) { Err(()) } else { Ok(t) }
}
fn record_deferred_call_resolution(&self,
closure_def_id: ast::DefId,
r: DeferredCallResolutionHandler<'tcx>) {
let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
}
fn remove_deferred_call_resolutions(&self,
closure_def_id: ast::DefId)
-> Vec<DeferredCallResolutionHandler<'tcx>>
{
let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
}
pub fn tag(&self) -> String {
let self_ptr: *const FnCtxt = self;
format!("{:?}", self_ptr)
}
pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
match self.inh.locals.borrow().get(&nid) {
Some(&t) => t,
None => {
self.tcx().sess.span_err(
span,
&format!("no type for local variable {}", nid));
self.tcx().types.err
}
}
}
/// Apply "fallbacks" to some types
/// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
pub fn default_type_parameters(&self) {
use middle::ty::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat, Neither};
for (_, &mut ref ty) in &mut *self.inh.node_types.borrow_mut() {
let resolved = self.infcx().resolve_type_vars_if_possible(ty);
if self.infcx().type_var_diverges(resolved) {
demand::eqtype(self, codemap::DUMMY_SP, *ty, ty::mk_nil(self.tcx()));
} else {
match self.infcx().type_is_unconstrained_numeric(resolved) {
UnconstrainedInt => {
demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
},
UnconstrainedFloat => {
demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
}
Neither => { }
}
}
}
}
#[inline]
pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
debug!("write_ty({}, {}) in fcx {}",
node_id, ppaux::ty_to_string(self.tcx(), ty), self.tag());
self.inh.node_types.borrow_mut().insert(node_id, ty);
}
pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
if !substs.substs.is_noop() {
debug!("write_substs({}, {}) in fcx {}",
node_id,
substs.repr(self.tcx()),
self.tag());
self.inh.item_substs.borrow_mut().insert(node_id, substs);
}
}
pub fn write_autoderef_adjustment(&self,
node_id: ast::NodeId,
derefs: usize) {
self.write_adjustment(
node_id,
ty::AdjustDerefRef(ty::AutoDerefRef {
autoderefs: derefs,
autoref: None,
unsize: None
})
);
}
pub fn write_adjustment(&self,
node_id: ast::NodeId,
adj: ty::AutoAdjustment<'tcx>) {
debug!("write_adjustment(node_id={}, adj={})", node_id, adj.repr(self.tcx()));
if adj.is_identity() {
return;
}
self.inh.adjustments.borrow_mut().insert(node_id, adj);
}
/// Basically whenever we are converting from a type scheme into
/// the fn body space, we always want to normalize associated
/// types as well. This function combines the two.
fn instantiate_type_scheme<T>(&self,
span: Span,
substs: &Substs<'tcx>,
value: &T)
-> T
where T : TypeFoldable<'tcx> + Clone + HasProjectionTypes + Repr<'tcx>
{
let value = value.subst(self.tcx(), substs);
let result = self.normalize_associated_types_in(span, &value);
debug!("instantiate_type_scheme(value={}, substs={}) = {}",
value.repr(self.tcx()),
substs.repr(self.tcx()),
result.repr(self.tcx()));
result
}
/// As `instantiate_type_scheme`, but for the bounds found in a
/// generic type scheme.
fn instantiate_bounds(&self,
span: Span,
substs: &Substs<'tcx>,
bounds: &ty::GenericPredicates<'tcx>)
-> ty::InstantiatedPredicates<'tcx>
{
ty::InstantiatedPredicates {
predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
}
}
fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
where T : TypeFoldable<'tcx> + Clone + HasProjectionTypes + Repr<'tcx>
{
self.inh.normalize_associated_types_in(self, span, self.body_id, value)
}
fn normalize_associated_type(&self,
span: Span,
trait_ref: ty::TraitRef<'tcx>,
item_name: ast::Name)
-> Ty<'tcx>
{
let cause = traits::ObligationCause::new(span,
self.body_id,
traits::ObligationCauseCode::MiscObligation);
self.inh.fulfillment_cx
.borrow_mut()
.normalize_projection_type(self.infcx(),
self,
ty::ProjectionTy {
trait_ref: trait_ref,
item_name: item_name,
},
cause)
}
/// Returns the type of `def_id` with all generics replaced by by fresh type/region variables.
/// Also returns the substitution from the type parameters on `def_id` to the fresh variables.
/// Registers any trait obligations specified on `def_id` at the same time.
///
/// Note that function is only intended to be used with types (notably, not fns). This is
/// because it doesn't do any instantiation of late-bound regions.
pub fn instantiate_type(&self,
span: Span,
def_id: ast::DefId)
-> TypeAndSubsts<'tcx>
{
let type_scheme =
ty::lookup_item_type(self.tcx(), def_id);
let type_predicates =
ty::lookup_predicates(self.tcx(), def_id);
let substs =
self.infcx().fresh_substs_for_generics(
span,
&type_scheme.generics);
let bounds =
self.instantiate_bounds(span, &substs, &type_predicates);
self.add_obligations_for_parameters(
traits::ObligationCause::new(
span,
self.body_id,
traits::ItemObligation(def_id)),
&bounds);
let monotype =
self.instantiate_type_scheme(span, &substs, &type_scheme.ty);
TypeAndSubsts {
ty: monotype,
substs: substs
}
}
/// Returns the type that this AST path refers to. If the path has no type
/// parameters and the corresponding type has type parameters, fresh type
/// and/or region variables are substituted.
///
/// This is used when checking the constructor in struct literals.
fn instantiate_struct_literal_ty(&self,
did: ast::DefId,
path: &ast::Path)
-> TypeAndSubsts<'tcx>
{
let tcx = self.tcx();
let ty::TypeScheme { generics, ty: decl_ty } =
ty::lookup_item_type(tcx, did);
let substs = astconv::ast_path_substs_for_ty(self, self,
path.span,
PathParamMode::Optional,
&generics,
path.segments.last().unwrap());
let ty = self.instantiate_type_scheme(path.span, &substs, &decl_ty);
TypeAndSubsts { substs: substs, ty: ty }
}
pub fn write_nil(&self, node_id: ast::NodeId) {
self.write_ty(node_id, ty::mk_nil(self.tcx()));
}
pub fn write_error(&self, node_id: ast::NodeId) {
self.write_ty(node_id, self.tcx().types.err);
}
pub fn require_type_meets(&self,
ty: Ty<'tcx>,
span: Span,
code: traits::ObligationCauseCode<'tcx>,
bound: ty::BuiltinBound)
{
self.register_builtin_bound(
ty,
bound,
traits::ObligationCause::new(span, self.body_id, code));
}
pub fn require_type_is_sized(&self,
ty: Ty<'tcx>,
span: Span,
code: traits::ObligationCauseCode<'tcx>)
{
self.require_type_meets(ty, span, code, ty::BoundSized);
}
pub fn require_expr_have_sized_type(&self,
expr: &ast::Expr,
code: traits::ObligationCauseCode<'tcx>)
{
self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
}
pub fn type_is_known_to_be_sized(&self,
ty: Ty<'tcx>,
span: Span)
-> bool
{
traits::type_known_to_meet_builtin_bound(self.infcx(),
self.param_env(),
ty,
ty::BoundSized,
span)
}
pub fn register_builtin_bound(&self,
ty: Ty<'tcx>,
builtin_bound: ty::BuiltinBound,
cause: traits::ObligationCause<'tcx>)
{
self.inh.fulfillment_cx.borrow_mut()
.register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
}
pub fn register_predicate(&self,
obligation: traits::PredicateObligation<'tcx>)
{
debug!("register_predicate({})",
obligation.repr(self.tcx()));
self.inh.fulfillment_cx
.borrow_mut()
.register_predicate_obligation(self.infcx(), obligation);
}
pub fn to_ty(&self, ast_t: &ast::Ty) -> Ty<'tcx> {
let t = ast_ty_to_ty(self, self, ast_t);
let mut bounds_checker = wf::BoundsChecker::new(self,
ast_t.span,
self.body_id,
None);
bounds_checker.check_ty(t);
t
}
pub fn pat_to_string(&self, pat: &ast::Pat) -> String {
pat.repr(self.tcx())
}
pub fn expr_ty(&self, ex: &ast::Expr) -> Ty<'tcx> {
match self.inh.node_types.borrow().get(&ex.id) {
Some(&t) => t,
None => {
self.tcx().sess.bug(&format!("no type for expr in fcx {}",
self.tag()));
}
}
}
/// Apply `adjustment` to the type of `expr`
pub fn adjust_expr_ty(&self,
expr: &ast::Expr,
adjustment: Option<&ty::AutoAdjustment<'tcx>>)
-> Ty<'tcx>
{
let raw_ty = self.expr_ty(expr);
let raw_ty = self.infcx().shallow_resolve(raw_ty);
let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
ty::adjust_ty(self.tcx(),
expr.span,
expr.id,
raw_ty,
adjustment,
|method_call| self.inh.method_map.borrow()
.get(&method_call)
.map(|method| resolve_ty(method.ty)))
}
pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
match self.inh.node_types.borrow().get(&id) {
Some(&t) => t,
None if self.err_count_since_creation() != 0 => self.tcx().types.err,
None => {
self.tcx().sess.bug(
&format!("no type for node {}: {} in fcx {}",
id, self.tcx().map.node_to_string(id),
self.tag()));
}
}
}
pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
self.inh.item_substs.borrow()
}
pub fn opt_node_ty_substs<F>(&self,
id: ast::NodeId,
f: F) where
F: FnOnce(&ty::ItemSubsts<'tcx>),
{
match self.inh.item_substs.borrow().get(&id) {
Some(s) => { f(s) }
None => { }
}
}
pub fn mk_subty(&self,
a_is_expected: bool,
origin: infer::TypeOrigin,
sub: Ty<'tcx>,
sup: Ty<'tcx>)
-> Result<(), ty::type_err<'tcx>> {
infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
}
pub fn mk_eqty(&self,
a_is_expected: bool,
origin: infer::TypeOrigin,
sub: Ty<'tcx>,
sup: Ty<'tcx>)
-> Result<(), ty::type_err<'tcx>> {
infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
}
pub fn mk_subr(&self,
origin: infer::SubregionOrigin<'tcx>,
sub: ty::Region,
sup: ty::Region) {
infer::mk_subr(self.infcx(), origin, sub, sup)
}
pub fn type_error_message<M>(&self,
sp: Span,
mk_msg: M,
actual_ty: Ty<'tcx>,
err: Option<&ty::type_err<'tcx>>) where
M: FnOnce(String) -> String,
{
self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
}
pub fn report_mismatched_types(&self,
sp: Span,
e: Ty<'tcx>,
a: Ty<'tcx>,
err: &ty::type_err<'tcx>) {
self.infcx().report_mismatched_types(sp, e, a, err)
}
/// Registers an obligation for checking later, during regionck, that the type `ty` must
/// outlive the region `r`.
pub fn register_region_obligation(&self,
ty: Ty<'tcx>,
region: ty::Region,
cause: traits::ObligationCause<'tcx>)
{
let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut();
fulfillment_cx.register_region_obligation(self.infcx(), ty, region, cause);
}
pub fn add_default_region_param_bounds(&self,
substs: &Substs<'tcx>,
expr: &ast::Expr)
{
for &ty in substs.types.iter() {
let default_bound = ty::ReScope(CodeExtent::from_node_id(expr.id));
let cause = traits::ObligationCause::new(expr.span, self.body_id,
traits::MiscObligation);
self.register_region_obligation(ty, default_bound, cause);
}
}
/// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
/// type/region parameter was instantiated (`substs`), creates and registers suitable
/// trait/region obligations.
///
/// For example, if there is a function:
///
/// ```
/// fn foo<'a,T:'a>(...)
/// ```
///
/// and a reference:
///
/// ```
/// let f = foo;
/// ```
///
/// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
/// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
pub fn add_obligations_for_parameters(&self,
cause: traits::ObligationCause<'tcx>,
predicates: &ty::InstantiatedPredicates<'tcx>)
{
assert!(!predicates.has_escaping_regions());
debug!("add_obligations_for_parameters(predicates={})",
predicates.repr(self.tcx()));
let obligations = traits::predicates_for_generics(self.tcx(),
cause,
predicates);
obligations.map_move(|o| self.register_predicate(o));
}
// Only for fields! Returns <none> for methods>
// Indifferent to privacy flags
pub fn lookup_field_ty(&self,
span: Span,
class_id: ast::DefId,
items: &[ty::field_ty],
fieldname: ast::Name,
substs: &subst::Substs<'tcx>)
-> Option<Ty<'tcx>>
{
let o_field = items.iter().find(|f| f.name == fieldname);
o_field.map(|f| ty::lookup_field_type(self.tcx(), class_id, f.id, substs))
.map(|t| self.normalize_associated_types_in(span, &t))
}
pub fn lookup_tup_field_ty(&self,
span: Span,
class_id: ast::DefId,
items: &[ty::field_ty],
idx: usize,
substs: &subst::Substs<'tcx>)
-> Option<Ty<'tcx>>
{
let o_field = if idx < items.len() { Some(&items[idx]) } else { None };
o_field.map(|f| ty::lookup_field_type(self.tcx(), class_id, f.id, substs))
.map(|t| self.normalize_associated_types_in(span, &t))
}
fn check_casts(&self) {
let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
for cast in deferred_cast_checks.drain(..) {
cast.check(self);
}
}
fn select_all_obligations_and_apply_defaults(&self) {
debug!("select_all_obligations_and_apply_defaults");
self.select_obligations_where_possible();
self.default_type_parameters();
self.select_obligations_where_possible();
}
fn select_all_obligations_or_error(&self) {
debug!("select_all_obligations_or_error");
// upvar inference should have ensured that all deferred call
// resolutions are handled by now.
assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
self.select_all_obligations_and_apply_defaults();
let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut();
match fulfillment_cx.select_all_or_error(self.infcx(), self) {
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
}
}
/// Select as many obligations as we can at present.
fn select_obligations_where_possible(&self) {
match
self.inh.fulfillment_cx
.borrow_mut()
.select_where_possible(self.infcx(), self)
{
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
}
}
/// Try to select any fcx obligation that we haven't tried yet, in an effort
/// to improve inference. You could just call
/// `select_obligations_where_possible` except that it leads to repeated
/// work.
fn select_new_obligations(&self) {
match
self.inh.fulfillment_cx
.borrow_mut()
.select_new_obligations(self.infcx(), self)
{
Ok(()) => { }
Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
}
}
}
impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
// RFC #599 specifies that object lifetime defaults take
// precedence over other defaults. But within a fn body we
// don't have a *default* region, rather we use inference to
// find the *correct* region, which is strictly more general
// (and anyway, within a fn body the right region may not even
// be something the user can write explicitly, since it might
// be some expression).
Some(self.infcx().next_region_var(infer::MiscVariable(span)))
}
fn anon_regions(&self, span: Span, count: usize)
-> Result<Vec<ty::Region>, Option<Vec<(String, usize)>>> {
Ok((0..count).map(|_| {
self.infcx().next_region_var(infer::MiscVariable(span))
}).collect())
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum LvaluePreference {
PreferMutLvalue,
NoPreference
}
/// Whether `autoderef` requires types to resolve.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum UnresolvedTypeAction {
/// Produce an error and return `ty_err` whenever a type cannot
/// be resolved (i.e. it is `ty_infer`).
Error,
/// Go on without emitting any errors, and return the unresolved
/// type. Useful for probing, e.g. in coercions.
Ignore
}
/// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
/// whether to terminate the loop. Returns the final type and number of derefs that it performed.
///
/// Note: this method does not modify the adjustments table. The caller is responsible for
/// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
pub fn autoderef<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
base_ty: Ty<'tcx>,
opt_expr: Option<&ast::Expr>,
unresolved_type_action: UnresolvedTypeAction,
mut lvalue_pref: LvaluePreference,
mut should_stop: F)
-> (Ty<'tcx>, usize, Option<T>)
where F: FnMut(Ty<'tcx>, usize) -> Option<T>,
{
debug!("autoderef(base_ty={}, opt_expr={}, lvalue_pref={:?})",
base_ty.repr(fcx.tcx()),
opt_expr.repr(fcx.tcx()),
lvalue_pref);
let mut t = base_ty;
for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
let resolved_t = match unresolved_type_action {
UnresolvedTypeAction::Error => {
structurally_resolved_type(fcx, sp, t)
}
UnresolvedTypeAction::Ignore => {
// We can continue even when the type cannot be resolved
// (i.e. it is an inference variable) because `ty::deref`
// and `try_overloaded_deref` both simply return `None`
// in such a case without producing spurious errors.
fcx.resolve_type_vars_if_possible(t)
}
};
if ty::type_is_error(resolved_t) {
return (resolved_t, autoderefs, None);
}
match should_stop(resolved_t, autoderefs) {
Some(x) => return (resolved_t, autoderefs, Some(x)),
None => {}
}
// Otherwise, deref if type is derefable:
let mt = match ty::deref(resolved_t, false) {
Some(mt) => Some(mt),
None => {
let method_call =
opt_expr.map(|expr| MethodCall::autoderef(expr.id, autoderefs as u32));
// Super subtle: it might seem as though we should
// pass `opt_expr` to `try_overloaded_deref`, so that
// the (implicit) autoref of using an overloaded deref
// would get added to the adjustment table. However we
// do not do that, because it's kind of a
// "meta-adjustment" -- instead, we just leave it
// unrecorded and know that there "will be" an
// autoref. regionck and other bits of the code base,
// when they encounter an overloaded autoderef, have
// to do some reconstructive surgery. This is a pretty
// complex mess that is begging for a proper MIR.
try_overloaded_deref(fcx, sp, method_call, None, resolved_t, lvalue_pref)
}
};
match mt {
Some(mt) => {
t = mt.ty;
if mt.mutbl == ast::MutImmutable {
lvalue_pref = NoPreference;
}
}
None => return (resolved_t, autoderefs, None)
}
}
// We've reached the recursion limit, error gracefully.
span_err!(fcx.tcx().sess, sp, E0055,
"reached the recursion limit while auto-dereferencing {}",
base_ty.repr(fcx.tcx()));
(fcx.tcx().types.err, 0, None)
}
fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_call: Option<MethodCall>,
base_expr: Option<&ast::Expr>,
base_ty: Ty<'tcx>,
lvalue_pref: LvaluePreference)
-> Option<ty::mt<'tcx>>
{
// Try DerefMut first, if preferred.
let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
(PreferMutLvalue, Some(trait_did)) => {
method::lookup_in_trait(fcx, span, base_expr,
token::intern("deref_mut"), trait_did,
base_ty, None)
}
_ => None
};
// Otherwise, fall back to Deref.
let method = match (method, fcx.tcx().lang_items.deref_trait()) {
(None, Some(trait_did)) => {
method::lookup_in_trait(fcx, span, base_expr,
token::intern("deref"), trait_did,
base_ty, None)
}
(method, _) => method
};
make_overloaded_lvalue_return_type(fcx, method_call, method)
}
/// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
/// actual type we assign to the *expression* is `T`. So this function just peels off the return
/// type by one layer to yield `T`. It also inserts the `method-callee` into the method map.
fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
method_call: Option<MethodCall>,
method: Option<MethodCallee<'tcx>>)
-> Option<ty::mt<'tcx>>
{
match method {
Some(method) => {
// extract method method return type, which will be &T;
// all LB regions should have been instantiated during method lookup
let ret_ty = ty::ty_fn_ret(method.ty);
let ret_ty = ty::no_late_bound_regions(fcx.tcx(), &ret_ty).unwrap().unwrap();
if let Some(method_call) = method_call {
fcx.inh.method_map.borrow_mut().insert(method_call, method);
}
// method returns &T, but the type as visible to user is T, so deref
ty::deref(ret_ty, true)
}
None => None,
}
}
fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &ast::Expr,
base_expr: &'tcx ast::Expr,
base_ty: Ty<'tcx>,
idx_ty: Ty<'tcx>,
lvalue_pref: LvaluePreference)
-> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
{
// FIXME(#18741) -- this is almost but not quite the same as the
// autoderef that normal method probing does. They could likely be
// consolidated.
let (ty, autoderefs, final_mt) = autoderef(fcx,
base_expr.span,
base_ty,
Some(base_expr),
UnresolvedTypeAction::Error,
lvalue_pref,
|adj_ty, idx| {
try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
adj_ty, idx, false, lvalue_pref, idx_ty)
});
if final_mt.is_some() {
return final_mt;
}
// After we have fully autoderef'd, if the resulting type is [T, ..n], then
// do a final unsized coercion to yield [T].
if let ty::ty_vec(element_ty, Some(_)) = ty.sty {
let adjusted_ty = ty::mk_vec(fcx.tcx(), element_ty, None);
try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
} else {
None
}
}
/// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
/// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
/// This loop implements one step in that search; the autoderef loop is implemented by
/// `lookup_indexing`.
fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
method_call: MethodCall,
expr: &ast::Expr,
base_expr: &'tcx ast::Expr,
adjusted_ty: Ty<'tcx>,
autoderefs: usize,
unsize: bool,
lvalue_pref: LvaluePreference,
index_ty: Ty<'tcx>)
-> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
{
let tcx = fcx.tcx();
debug!("try_index_step(expr={}, base_expr.id={}, adjusted_ty={}, \
autoderefs={}, unsize={}, index_ty={})",
expr.repr(tcx),
base_expr.repr(tcx),
adjusted_ty.repr(tcx),
autoderefs,
unsize,
index_ty.repr(tcx));
let input_ty = fcx.infcx().next_ty_var();
// First, try built-in indexing.
match (ty::index(adjusted_ty), &index_ty.sty) {
(Some(ty), &ty::ty_uint(ast::TyUs)) | (Some(ty), &ty::ty_infer(ty::IntVar(_))) => {
debug!("try_index_step: success, using built-in indexing");
// If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
assert!(!unsize);
fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
return Some((tcx.types.usize, ty));
}
_ => {}
}
// Try `IndexMut` first, if preferred.
let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
(PreferMutLvalue, Some(trait_did)) => {
method::lookup_in_trait_adjusted(fcx,
expr.span,
Some(&*base_expr),
token::intern("index_mut"),
trait_did,
autoderefs,
unsize,
adjusted_ty,
Some(vec![input_ty]))
}
_ => None,
};
// Otherwise, fall back to `Index`.
let method = match (method, tcx.lang_items.index_trait()) {
(None, Some(trait_did)) => {
method::lookup_in_trait_adjusted(fcx,
expr.span,
Some(&*base_expr),
token::intern("index"),
trait_did,
autoderefs,
unsize,
adjusted_ty,
Some(vec![input_ty]))
}
(method, _) => method,
};
// If some lookup succeeds, write callee into table and extract index/element
// type from the method signature.
// If some lookup succeeded, install method in table
method.and_then(|method| {
debug!("try_index_step: success, using overloaded indexing");
make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)).
map(|ret| (input_ty, ret.ty))
})
}
fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
method_fn_ty: Ty<'tcx>,
callee_expr: &'tcx ast::Expr,
args_no_rcvr: &'tcx [P<ast::Expr>],
tuple_arguments: TupleArgumentsFlag,
expected: Expectation<'tcx>)
-> ty::FnOutput<'tcx> {
if ty::type_is_error(method_fn_ty) {
let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
let err_inputs = match tuple_arguments {
DontTupleArguments => err_inputs,
TupleArguments => vec![ty::mk_tup(fcx.tcx(), err_inputs)],
};
check_argument_types(fcx,
sp,
&err_inputs[..],
&[],
args_no_rcvr,
false,
tuple_arguments);
ty::FnConverging(fcx.tcx().types.err)
} else {
match method_fn_ty.sty {
ty::ty_bare_fn(_, ref fty) => {
// HACK(eddyb) ignore self in the definition (see above).
let expected_arg_tys = expected_types_for_fn_args(fcx,
sp,
expected,
fty.sig.0.output,
&fty.sig.0.inputs[1..]);
check_argument_types(fcx,
sp,
&fty.sig.0.inputs[1..],
&expected_arg_tys[..],
args_no_rcvr,
fty.sig.0.variadic,
tuple_arguments);
fty.sig.0.output
}
_ => {
fcx.tcx().sess.span_bug(callee_expr.span,
"method without bare fn type");
}
}
}
}
/// Generic function that factors out common logic from function calls, method calls and overloaded
/// operators.
fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
fn_inputs: &[Ty<'tcx>],
expected_arg_tys: &[Ty<'tcx>],
args: &'tcx [P<ast::Expr>],
variadic: bool,
tuple_arguments: TupleArgumentsFlag) {
let tcx = fcx.ccx.tcx;
// Grab the argument types, supplying fresh type variables
// if the wrong number of arguments were supplied
let supplied_arg_count = if tuple_arguments == DontTupleArguments {
args.len()
} else {
1
};
let mut expected_arg_tys = expected_arg_tys;
let expected_arg_count = fn_inputs.len();
let formal_tys = if tuple_arguments == TupleArguments {
let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
match tuple_type.sty {
ty::ty_tup(ref arg_types) => {
if arg_types.len() != args.len() {
span_err!(tcx.sess, sp, E0057,
"this function takes {} parameter{} but {} parameter{} supplied",
arg_types.len(),
if arg_types.len() == 1 {""} else {"s"},
args.len(),
if args.len() == 1 {" was"} else {"s were"});
expected_arg_tys = &[];
err_args(fcx.tcx(), args.len())
} else {
expected_arg_tys = match expected_arg_tys.get(0) {
Some(&ty) => match ty.sty {
ty::ty_tup(ref tys) => &**tys,
_ => &[]
},
None => &[]
};
(*arg_types).clone()
}
}
_ => {
span_err!(tcx.sess, sp, E0059,
"cannot use call notation; the first type parameter \
for the function trait is neither a tuple nor unit");
expected_arg_tys = &[];
err_args(fcx.tcx(), args.len())
}
}
} else if expected_arg_count == supplied_arg_count {
fn_inputs.to_vec()
} else if variadic {
if supplied_arg_count >= expected_arg_count {
fn_inputs.to_vec()
} else {
span_err!(tcx.sess, sp, E0060,
"this function takes at least {} parameter{} \
but {} parameter{} supplied",
expected_arg_count,
if expected_arg_count == 1 {""} else {"s"},
supplied_arg_count,
if supplied_arg_count == 1 {" was"} else {"s were"});
expected_arg_tys = &[];
err_args(fcx.tcx(), supplied_arg_count)
}
} else {
span_err!(tcx.sess, sp, E0061,
"this function takes {} parameter{} but {} parameter{} supplied",
expected_arg_count,
if expected_arg_count == 1 {""} else {"s"},
supplied_arg_count,
if supplied_arg_count == 1 {" was"} else {"s were"});
expected_arg_tys = &[];
err_args(fcx.tcx(), supplied_arg_count)
};
debug!("check_argument_types: formal_tys={:?}",
formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
// Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments
// that are not anonymous functions, then we typecheck the anonymous
// functions. This is so that we have more information about the types
// of arguments when we typecheck the functions. This isn't really the
// right way to do this.
let xs = [false, true];
for check_blocks in &xs {
let check_blocks = *check_blocks;
debug!("check_blocks={}", check_blocks);
// More awful hacks: before we check argument types, try to do
// an "opportunistic" vtable resolution of any trait bounds on
// the call. This helps coercions.
if check_blocks {
fcx.select_new_obligations();
}
// For variadic functions, we don't have a declared type for all of
// the arguments hence we only do our usual type checking with
// the arguments who's types we do know.
let t = if variadic {
expected_arg_count
} else if tuple_arguments == TupleArguments {
args.len()
} else {
supplied_arg_count
};
for (i, arg) in args.iter().take(t).enumerate() {
let is_block = match arg.node {
ast::ExprClosure(..) => true,
_ => false
};
if is_block == check_blocks {
debug!("checking the argument");
let formal_ty = formal_tys[i];
// The special-cased logic below has three functions:
// 1. Provide as good of an expected type as possible.
let expected = expected_arg_tys.get(i).map(|&ty| {
Expectation::rvalue_hint(ty)
});
check_expr_with_unifier(fcx, &**arg,
expected.unwrap_or(ExpectHasType(formal_ty)),
NoPreference, || {
// 2. Coerce to the most detailed type that could be coerced
// to, which is `expected_ty` if `rvalue_hint` returns an
// `ExprHasType(expected_ty)`, or the `formal_ty` otherwise.
let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &**arg);
// 3. Relate the expected type and the formal one,
// if the expected type was used for the coercion.
coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
});
}
}
}
// We also need to make sure we at least write the ty of the other
// arguments which we skipped above.
if variadic {
for arg in args.iter().skip(expected_arg_count) {
check_expr(fcx, &**arg);
// There are a few types which get autopromoted when passed via varargs
// in C but we just error out instead and require explicit casts.
let arg_ty = structurally_resolved_type(fcx, arg.span,
fcx.expr_ty(&**arg));
match arg_ty.sty {
ty::ty_float(ast::TyF32) => {
fcx.type_error_message(arg.span,
|t| {
format!("can't pass an {} to variadic \
function, cast to c_double", t)
}, arg_ty, None);
}
ty::ty_int(ast::TyI8) | ty::ty_int(ast::TyI16) | ty::ty_bool => {
fcx.type_error_message(arg.span, |t| {
format!("can't pass {} to variadic \
function, cast to c_int",
t)
}, arg_ty, None);
}
ty::ty_uint(ast::TyU8) | ty::ty_uint(ast::TyU16) => {
fcx.type_error_message(arg.span, |t| {
format!("can't pass {} to variadic \
function, cast to c_uint",
t)
}, arg_ty, None);
}
_ => {}
}
}
}
}
// FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
fn err_args<'tcx>(tcx: &ty::ctxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
(0..len).map(|_| tcx.types.err).collect()
}
fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
call_expr: &ast::Expr,
output: ty::FnOutput<'tcx>) {
fcx.write_ty(call_expr.id, match output {
ty::FnConverging(output_ty) => output_ty,
ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
});
}
// AST fragment checking
fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
lit: &ast::Lit,
expected: Expectation<'tcx>)
-> Ty<'tcx>
{
let tcx = fcx.ccx.tcx;
match lit.node {
ast::LitStr(..) => ty::mk_str_slice(tcx, tcx.mk_region(ty::ReStatic), ast::MutImmutable),
ast::LitBinary(ref v) => {
ty::mk_rptr(tcx, tcx.mk_region(ty::ReStatic), ty::mt {
ty: ty::mk_vec(tcx, tcx.types.u8, Some(v.len())),
mutbl: ast::MutImmutable,
})
}
ast::LitByte(_) => tcx.types.u8,
ast::LitChar(_) => tcx.types.char,
ast::LitInt(_, ast::SignedIntLit(t, _)) => ty::mk_mach_int(tcx, t),
ast::LitInt(_, ast::UnsignedIntLit(t)) => ty::mk_mach_uint(tcx, t),
ast::LitInt(_, ast::UnsuffixedIntLit(_)) => {
let opt_ty = expected.to_option(fcx).and_then(|ty| {
match ty.sty {
ty::ty_int(_) | ty::ty_uint(_) => Some(ty),
ty::ty_char => Some(tcx.types.u8),
ty::ty_ptr(..) => Some(tcx.types.usize),
ty::ty_bare_fn(..) => Some(tcx.types.usize),
_ => None
}
});
opt_ty.unwrap_or_else(
|| ty::mk_int_var(tcx, fcx.infcx().next_int_var_id()))
}
ast::LitFloat(_, t) => ty::mk_mach_float(tcx, t),
ast::LitFloatUnsuffixed(_) => {
let opt_ty = expected.to_option(fcx).and_then(|ty| {
match ty.sty {
ty::ty_float(_) => Some(ty),
_ => None
}
});
opt_ty.unwrap_or_else(
|| ty::mk_float_var(tcx, fcx.infcx().next_float_var_id()))
}
ast::LitBool(_) => tcx.types.bool
}
}
pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Ty<'tcx>) {
check_expr_with_unifier(
fcx, expr, ExpectHasType(expected), NoPreference,
|| demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)));
}
fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Ty<'tcx>) {
check_expr_with_unifier(
fcx, expr, ExpectHasType(expected), NoPreference,
|| demand::coerce(fcx, expr.span, expected, expr));
}
fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx ast::Expr,
expected: Ty<'tcx>) {
check_expr_with_unifier(
fcx, expr, ExpectHasType(expected), NoPreference,
|| ())
}
fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Expectation<'tcx>) {
check_expr_with_unifier(
fcx, expr, expected, NoPreference,
|| ())
}
fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Expectation<'tcx>,
lvalue_pref: LvaluePreference)
{
check_expr_with_unifier(fcx, expr, expected, lvalue_pref, || ())
}
fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx ast::Expr) {
check_expr_with_unifier(fcx, expr, NoExpectation, NoPreference, || ())
}
fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx ast::Expr,
lvalue_pref: LvaluePreference) {
check_expr_with_unifier(fcx, expr, NoExpectation, lvalue_pref, || ())
}
// determine the `self` type, using fresh variables for all variables
// declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
// would return ($0, $1) where $0 and $1 are freshly instantiated type
// variables.
pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span, // (potential) receiver for this impl<|fim▁hole|> let tcx = fcx.tcx();
let ity = ty::lookup_item_type(tcx, did);
let (n_tps, rps, raw_ty) =
(ity.generics.types.len(subst::TypeSpace),
ity.generics.regions.get_slice(subst::TypeSpace),
ity.ty);
let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
let tps = fcx.inh.infcx.next_ty_vars(n_tps);
let substs = subst::Substs::new_type(tps, rps);
let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
TypeAndSubsts { substs: substs, ty: substd_ty }
}
/// Controls whether the arguments are tupled. This is used for the call
/// operator.
///
/// Tupling means that all call-side arguments are packed into a tuple and
/// passed as a single parameter. For example, if tupling is enabled, this
/// function:
///
/// fn f(x: (isize, isize))
///
/// Can be called as:
///
/// f(1, 2);
///
/// Instead of:
///
/// f((1, 2));
#[derive(Clone, Eq, PartialEq)]
enum TupleArgumentsFlag {
DontTupleArguments,
TupleArguments,
}
/// Unifies the return type with the expected type early, for more coercions
/// and forward type information on the argument expressions.
fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
call_span: Span,
expected_ret: Expectation<'tcx>,
formal_ret: ty::FnOutput<'tcx>,
formal_args: &[Ty<'tcx>])
-> Vec<Ty<'tcx>> {
let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
if let ty::FnConverging(formal_ret_ty) = formal_ret {
fcx.infcx().commit_regions_if_ok(|| {
// Attempt to apply a subtyping relationship between the formal
// return type (likely containing type variables if the function
// is polymorphic) and the expected return type.
// No argument expectations are produced if unification fails.
let origin = infer::Misc(call_span);
let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
// FIXME(#15760) can't use try! here, FromError doesn't default
// to identity so the resulting type is not constrained.
if let Err(e) = ures {
return Err(e);
}
// Record all the argument types, with the substitutions
// produced from the above subtyping unification.
Ok(formal_args.iter().map(|ty| {
fcx.infcx().resolve_type_vars_if_possible(ty)
}).collect())
}).ok()
} else {
None
}
}).unwrap_or(vec![]);
debug!("expected_types_for_fn_args(formal={} -> {}, expected={} -> {})",
formal_args.repr(fcx.tcx()), formal_ret.repr(fcx.tcx()),
expected_args.repr(fcx.tcx()), expected_ret.repr(fcx.tcx()));
expected_args
}
/// Invariant:
/// If an expression has any sub-expressions that result in a type error,
/// inspecting that expression's type with `ty::type_is_error` will return
/// true. Likewise, if an expression is known to diverge, inspecting its
/// type with `ty::type_is_bot` will return true (n.b.: since Rust is
/// strict, _|_ can appear in the type of an expression that does not,
/// itself, diverge: for example, fn() -> _|_.)
/// Note that inspecting a type's structure *directly* may expose the fact
/// that there are actually multiple representations for `ty_err`, so avoid
/// that when err needs to be handled differently.
fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Expectation<'tcx>,
lvalue_pref: LvaluePreference,
unifier: F) where
F: FnOnce(),
{
debug!(">> typechecking: expr={} expected={}",
expr.repr(fcx.tcx()), expected.repr(fcx.tcx()));
// Checks a method call.
fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
method_name: ast::SpannedIdent,
args: &'tcx [P<ast::Expr>],
tps: &[P<ast::Ty>],
expected: Expectation<'tcx>,
lvalue_pref: LvaluePreference) {
let rcvr = &*args[0];
check_expr_with_lvalue_pref(fcx, &*rcvr, lvalue_pref);
// no need to check for bot/err -- callee does that
let expr_t = structurally_resolved_type(fcx,
expr.span,
fcx.expr_ty(&*rcvr));
let tps = tps.iter().map(|ast_ty| fcx.to_ty(&**ast_ty)).collect::<Vec<_>>();
let fn_ty = match method::lookup(fcx,
method_name.span,
method_name.node.name,
expr_t,
tps,
expr,
rcvr) {
Ok(method) => {
let method_ty = method.ty;
let method_call = MethodCall::expr(expr.id);
fcx.inh.method_map.borrow_mut().insert(method_call, method);
method_ty
}
Err(error) => {
method::report_error(fcx, method_name.span, expr_t,
method_name.node.name, Some(rcvr), error);
fcx.write_error(expr.id);
fcx.tcx().types.err
}
};
// Call the generic checker.
let ret_ty = check_method_argument_types(fcx,
method_name.span,
fn_ty,
expr,
&args[1..],
DontTupleArguments,
expected);
write_call(fcx, expr, ret_ty);
}
// A generic function for checking the then and else in an if
// or if-else.
fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
cond_expr: &'tcx ast::Expr,
then_blk: &'tcx ast::Block,
opt_else_expr: Option<&'tcx ast::Expr>,
id: ast::NodeId,
sp: Span,
expected: Expectation<'tcx>) {
check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
let expected = expected.adjust_for_branches(fcx);
check_block_with_expected(fcx, then_blk, expected);
let then_ty = fcx.node_ty(then_blk.id);
let branches_ty = match opt_else_expr {
Some(ref else_expr) => {
check_expr_with_expectation(fcx, &**else_expr, expected);
let else_ty = fcx.expr_ty(&**else_expr);
infer::common_supertype(fcx.infcx(),
infer::IfExpression(sp),
true,
then_ty,
else_ty)
}
None => {
infer::common_supertype(fcx.infcx(),
infer::IfExpressionWithNoElse(sp),
false,
then_ty,
ty::mk_nil(fcx.tcx()))
}
};
let cond_ty = fcx.expr_ty(cond_expr);
let if_ty = if ty::type_is_error(cond_ty) {
fcx.tcx().types.err
} else {
branches_ty
};
fcx.write_ty(id, if_ty);
}
// Check field access expressions
fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
expr: &'tcx ast::Expr,
lvalue_pref: LvaluePreference,
base: &'tcx ast::Expr,
field: &ast::SpannedIdent) {
let tcx = fcx.ccx.tcx;
check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
let expr_t = structurally_resolved_type(fcx, expr.span,
fcx.expr_ty(base));
// FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
let (_, autoderefs, field_ty) = autoderef(fcx,
expr.span,
expr_t,
Some(base),
UnresolvedTypeAction::Error,
lvalue_pref,
|base_t, _| {
match base_t.sty {
ty::ty_struct(base_id, substs) => {
debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
fcx.lookup_field_ty(expr.span, base_id, &fields[..],
field.node.name, &(*substs))
}
_ => None
}
});
match field_ty {
Some(field_ty) => {
fcx.write_ty(expr.id, field_ty);
fcx.write_autoderef_adjustment(base.id, autoderefs);
return;
}
None => {}
}
if method::exists(fcx, field.span, field.node.name, expr_t, expr.id) {
fcx.type_error_message(
field.span,
|actual| {
format!("attempted to take value of method `{}` on type \
`{}`", token::get_ident(field.node), actual)
},
expr_t, None);
tcx.sess.fileline_help(field.span,
"maybe a `()` to call it is missing? \
If not, try an anonymous function");
} else {
fcx.type_error_message(
expr.span,
|actual| {
format!("attempted access of field `{}` on \
type `{}`, but no field with that \
name was found",
token::get_ident(field.node),
actual)
},
expr_t, None);
if let ty::ty_struct(did, _) = expr_t.sty {
suggest_field_names(did, field, tcx, vec![]);
}
}
fcx.write_error(expr.id);
}
// displays hints about the closest matches in field names
fn suggest_field_names<'tcx>(id : DefId,
field : &ast::SpannedIdent,
tcx : &ty::ctxt<'tcx>,
skip : Vec<&str>) {
let ident = token::get_ident(field.node);
let name = &ident;
// only find fits with at least one matching letter
let mut best_dist = name.len();
let fields = ty::lookup_struct_fields(tcx, id);
let mut best = None;
for elem in &fields {
let n = elem.name.as_str();
// ignore already set fields
if skip.iter().any(|&x| x == n) {
continue;
}
// ignore private fields from non-local crates
if id.krate != ast::LOCAL_CRATE && elem.vis != Visibility::Public {
continue;
}
let dist = lev_distance(n, name);
if dist < best_dist {
best = Some(n);
best_dist = dist;
}
}
if let Some(n) = best {
tcx.sess.span_help(field.span,
&format!("did you mean `{}`?", n));
}
}
// Check tuple index expressions
fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
expr: &'tcx ast::Expr,
lvalue_pref: LvaluePreference,
base: &'tcx ast::Expr,
idx: codemap::Spanned<usize>) {
let tcx = fcx.ccx.tcx;
check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
let expr_t = structurally_resolved_type(fcx, expr.span,
fcx.expr_ty(base));
let mut tuple_like = false;
// FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
let (_, autoderefs, field_ty) = autoderef(fcx,
expr.span,
expr_t,
Some(base),
UnresolvedTypeAction::Error,
lvalue_pref,
|base_t, _| {
match base_t.sty {
ty::ty_struct(base_id, substs) => {
tuple_like = ty::is_tuple_struct(tcx, base_id);
if tuple_like {
debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
fcx.lookup_tup_field_ty(expr.span, base_id, &fields[..],
idx.node, &(*substs))
} else {
None
}
}
ty::ty_tup(ref v) => {
tuple_like = true;
if idx.node < v.len() { Some(v[idx.node]) } else { None }
}
_ => None
}
});
match field_ty {
Some(field_ty) => {
fcx.write_ty(expr.id, field_ty);
fcx.write_autoderef_adjustment(base.id, autoderefs);
return;
}
None => {}
}
fcx.type_error_message(
expr.span,
|actual| {
if tuple_like {
format!("attempted out-of-bounds tuple index `{}` on \
type `{}`",
idx.node,
actual)
} else {
format!("attempted tuple index `{}` on type `{}`, but the \
type was not a tuple or tuple struct",
idx.node,
actual)
}
},
expr_t, None);
fcx.write_error(expr.id);
}
fn check_struct_or_variant_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
struct_ty: Ty<'tcx>,
span: Span,
class_id: ast::DefId,
node_id: ast::NodeId,
substitutions: &'tcx subst::Substs<'tcx>,
field_types: &[ty::field_ty],
ast_fields: &'tcx [ast::Field],
check_completeness: bool,
enum_id_opt: Option<ast::DefId>) {
let tcx = fcx.ccx.tcx;
let mut class_field_map = FnvHashMap();
let mut fields_found = 0;
for field in field_types {
class_field_map.insert(field.name, (field.id, false));
}
let mut error_happened = false;
// Typecheck each field.
for field in ast_fields {
let mut expected_field_type = tcx.types.err;
let pair = class_field_map.get(&field.ident.node.name).cloned();
match pair {
None => {
fcx.type_error_message(
field.ident.span,
|actual| match enum_id_opt {
Some(enum_id) => {
let variant_type = ty::enum_variant_with_id(tcx,
enum_id,
class_id);
format!("struct variant `{}::{}` has no field named `{}`",
actual, variant_type.name.as_str(),
token::get_ident(field.ident.node))
}
None => {
format!("structure `{}` has no field named `{}`",
actual,
token::get_ident(field.ident.node))
}
},
struct_ty,
None);
// prevent all specified fields from being suggested
let skip_fields = ast_fields.iter().map(|ref x| x.ident.node.name.as_str());
let actual_id = match enum_id_opt {
Some(_) => class_id,
None => ty::ty_to_def_id(struct_ty).unwrap()
};
suggest_field_names(actual_id, &field.ident, tcx, skip_fields.collect());
error_happened = true;
}
Some((_, true)) => {
span_err!(fcx.tcx().sess, field.ident.span, E0062,
"field `{}` specified more than once",
token::get_ident(field.ident.node));
error_happened = true;
}
Some((field_id, false)) => {
expected_field_type =
ty::lookup_field_type(
tcx, class_id, field_id, substitutions);
expected_field_type =
fcx.normalize_associated_types_in(
field.span, &expected_field_type);
class_field_map.insert(
field.ident.node.name, (field_id, true));
fields_found += 1;
}
}
// Make sure to give a type to the field even if there's
// an error, so we can continue typechecking
check_expr_coercable_to_type(fcx, &*field.expr, expected_field_type);
}
if error_happened {
fcx.write_error(node_id);
}
if check_completeness && !error_happened {
// Make sure the programmer specified all the fields.
assert!(fields_found <= field_types.len());
if fields_found < field_types.len() {
let mut missing_fields = Vec::new();
for class_field in field_types {
let name = class_field.name;
let (_, seen) = *class_field_map.get(&name).unwrap();
if !seen {
missing_fields.push(
format!("`{}`", &token::get_name(name)))
}
}
span_err!(tcx.sess, span, E0063,
"missing field{}: {}",
if missing_fields.len() == 1 {""} else {"s"},
missing_fields.connect(", "));
}
}
if !error_happened {
fcx.write_ty(node_id, ty::mk_struct(fcx.ccx.tcx,
class_id, substitutions));
}
}
fn check_struct_constructor<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
id: ast::NodeId,
span: codemap::Span,
class_id: ast::DefId,
fields: &'tcx [ast::Field],
base_expr: Option<&'tcx ast::Expr>) {
let tcx = fcx.ccx.tcx;
// Generate the struct type.
let TypeAndSubsts {
ty: mut struct_type,
substs: struct_substs
} = fcx.instantiate_type(span, class_id);
// Look up and check the fields.
let class_fields = ty::lookup_struct_fields(tcx, class_id);
check_struct_or_variant_fields(fcx,
struct_type,
span,
class_id,
id,
fcx.ccx.tcx.mk_substs(struct_substs),
&class_fields[..],
fields,
base_expr.is_none(),
None);
if ty::type_is_error(fcx.node_ty(id)) {
struct_type = tcx.types.err;
}
// Check the base expression if necessary.
match base_expr {
None => {}
Some(base_expr) => {
check_expr_has_type(fcx, &*base_expr, struct_type);
}
}
// Write in the resulting type.
fcx.write_ty(id, struct_type);
}
fn check_struct_enum_variant<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
id: ast::NodeId,
span: codemap::Span,
enum_id: ast::DefId,
variant_id: ast::DefId,
fields: &'tcx [ast::Field]) {
let tcx = fcx.ccx.tcx;
// Look up the number of type parameters and the raw type, and
// determine whether the enum is region-parameterized.
let TypeAndSubsts {
ty: enum_type,
substs: substitutions
} = fcx.instantiate_type(span, enum_id);
// Look up and check the enum variant fields.
let variant_fields = ty::lookup_struct_fields(tcx, variant_id);
check_struct_or_variant_fields(fcx,
enum_type,
span,
variant_id,
id,
fcx.ccx.tcx.mk_substs(substitutions),
&variant_fields[..],
fields,
true,
Some(enum_id));
fcx.write_ty(id, enum_type);
}
fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
id: ast::NodeId,
fields: &'tcx [ast::Field],
base_expr: &'tcx Option<P<ast::Expr>>) {
// Make sure to still write the types
// otherwise we might ICE
fcx.write_error(id);
for field in fields {
check_expr(fcx, &*field.expr);
}
match *base_expr {
Some(ref base) => check_expr(fcx, &**base),
None => {}
}
}
type ExprCheckerWithTy = fn(&FnCtxt, &ast::Expr, Ty);
let tcx = fcx.ccx.tcx;
let id = expr.id;
match expr.node {
ast::ExprBox(ref opt_place, ref subexpr) => {
opt_place.as_ref().map(|place|check_expr(fcx, &**place));
check_expr(fcx, &**subexpr);
let mut checked = false;
opt_place.as_ref().map(|place| match place.node {
ast::ExprPath(None, ref path) => {
// FIXME(pcwalton): For now we hardcode the only permissible
// place: the exchange heap.
let definition = lookup_full_def(tcx, path.span, place.id);
let def_id = definition.def_id();
let referent_ty = fcx.expr_ty(&**subexpr);
if tcx.lang_items.exchange_heap() == Some(def_id) {
fcx.write_ty(id, ty::mk_uniq(tcx, referent_ty));
checked = true
}
}
_ => {}
});
if !checked {
span_err!(tcx.sess, expr.span, E0066,
"only the exchange heap is currently supported");
fcx.write_ty(id, tcx.types.err);
}
}
ast::ExprLit(ref lit) => {
let typ = check_lit(fcx, &**lit, expected);
fcx.write_ty(id, typ);
}
ast::ExprBinary(op, ref lhs, ref rhs) => {
op::check_binop(fcx, expr, op, lhs, rhs);
}
ast::ExprAssignOp(op, ref lhs, ref rhs) => {
op::check_binop_assign(fcx, expr, op, lhs, rhs);
}
ast::ExprUnary(unop, ref oprnd) => {
let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
match unop {
ast::UnUniq => match ty.sty {
ty::ty_uniq(ty) => {
Expectation::rvalue_hint(ty)
}
_ => {
NoExpectation
}
},
ast::UnNot | ast::UnNeg => {
expected
}
ast::UnDeref => {
NoExpectation
}
}
});
let lvalue_pref = match unop {
ast::UnDeref => lvalue_pref,
_ => NoPreference
};
check_expr_with_expectation_and_lvalue_pref(
fcx, &**oprnd, expected_inner, lvalue_pref);
let mut oprnd_t = fcx.expr_ty(&**oprnd);
if !ty::type_is_error(oprnd_t) {
match unop {
ast::UnUniq => {
oprnd_t = ty::mk_uniq(tcx, oprnd_t);
}
ast::UnDeref => {
oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
oprnd_t = match ty::deref(oprnd_t, true) {
Some(mt) => mt.ty,
None => match try_overloaded_deref(fcx, expr.span,
Some(MethodCall::expr(expr.id)),
Some(&**oprnd), oprnd_t, lvalue_pref) {
Some(mt) => mt.ty,
None => {
fcx.type_error_message(expr.span, |actual| {
format!("type `{}` cannot be \
dereferenced", actual)
}, oprnd_t, None);
tcx.types.err
}
}
};
}
ast::UnNot => {
oprnd_t = structurally_resolved_type(fcx, oprnd.span,
oprnd_t);
if !(ty::type_is_integral(oprnd_t) ||
oprnd_t.sty == ty::ty_bool) {
oprnd_t = op::check_user_unop(fcx, "!", "not",
tcx.lang_items.not_trait(),
expr, &**oprnd, oprnd_t, unop);
}
}
ast::UnNeg => {
oprnd_t = structurally_resolved_type(fcx, oprnd.span,
oprnd_t);
if !(ty::type_is_integral(oprnd_t) ||
ty::type_is_fp(oprnd_t)) {
oprnd_t = op::check_user_unop(fcx, "-", "neg",
tcx.lang_items.neg_trait(),
expr, &**oprnd, oprnd_t, unop);
}
if let ty::ty_uint(_) = oprnd_t.sty {
if !tcx.sess.features.borrow().negate_unsigned {
feature_gate::emit_feature_err(
&tcx.sess.parse_sess.span_diagnostic,
"negate_unsigned",
expr.span,
"unary negation of unsigned integers may be removed in the future");
}
}
}
}
}
fcx.write_ty(id, oprnd_t);
}
ast::ExprAddrOf(mutbl, ref oprnd) => {
let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
match ty.sty {
ty::ty_rptr(_, ref mt) | ty::ty_ptr(ref mt) => {
if ty::expr_is_lval(fcx.tcx(), &**oprnd) {
// Lvalues may legitimately have unsized types.
// For example, dereferences of a fat pointer and
// the last field of a struct can be unsized.
ExpectHasType(mt.ty)
} else {
Expectation::rvalue_hint(mt.ty)
}
}
_ => NoExpectation
}
});
let lvalue_pref = match mutbl {
ast::MutMutable => PreferMutLvalue,
ast::MutImmutable => NoPreference
};
check_expr_with_expectation_and_lvalue_pref(fcx,
&**oprnd,
hint,
lvalue_pref);
let tm = ty::mt { ty: fcx.expr_ty(&**oprnd), mutbl: mutbl };
let oprnd_t = if ty::type_is_error(tm.ty) {
tcx.types.err
} else {
// Note: at this point, we cannot say what the best lifetime
// is to use for resulting pointer. We want to use the
// shortest lifetime possible so as to avoid spurious borrowck
// errors. Moreover, the longest lifetime will depend on the
// precise details of the value whose address is being taken
// (and how long it is valid), which we don't know yet until type
// inference is complete.
//
// Therefore, here we simply generate a region variable. The
// region inferencer will then select the ultimate value.
// Finally, borrowck is charged with guaranteeing that the
// value whose address was taken can actually be made to live
// as long as it needs to live.
let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
ty::mk_rptr(tcx, tcx.mk_region(region), tm)
};
fcx.write_ty(id, oprnd_t);
}
ast::ExprPath(ref maybe_qself, ref path) => {
let opt_self_ty = maybe_qself.as_ref().map(|qself| {
fcx.to_ty(&qself.ty)
});
let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
d
} else if let Some(ast::QSelf { position: 0, .. }) = *maybe_qself {
// Create some fake resolution that can't possibly be a type.
def::PathResolution {
base_def: def::DefMod(local_def(ast::CRATE_NODE_ID)),
last_private: LastMod(AllPublic),
depth: path.segments.len()
}
} else {
tcx.sess.span_bug(expr.span,
&format!("unbound path {}", expr.repr(tcx)))
};
if let Some((opt_ty, segments, def)) =
resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
expr.span, expr.id) {
let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
expr.span,
def);
instantiate_path(fcx,
segments,
scheme,
&predicates,
opt_ty,
def,
expr.span,
id);
}
// We always require that the type provided as the value for
// a type parameter outlives the moment of instantiation.
constrain_path_type_parameters(fcx, expr);
}
ast::ExprInlineAsm(ref ia) => {
for &(_, ref input) in &ia.inputs {
check_expr(fcx, &**input);
}
for &(_, ref out, _) in &ia.outputs {
check_expr(fcx, &**out);
}
fcx.write_nil(id);
}
ast::ExprMac(_) => tcx.sess.bug("unexpanded macro"),
ast::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
ast::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
ast::ExprRet(ref expr_opt) => {
match fcx.ret_ty {
ty::FnConverging(result_type) => {
match *expr_opt {
None =>
if let Err(_) = fcx.mk_eqty(false, infer::Misc(expr.span),
result_type, ty::mk_nil(fcx.tcx())) {
span_err!(tcx.sess, expr.span, E0069,
"`return;` in a function whose return type is \
not `()`");
},
Some(ref e) => {
check_expr_coercable_to_type(fcx, &**e, result_type);
}
}
}
ty::FnDiverging => {
if let Some(ref e) = *expr_opt {
check_expr(fcx, &**e);
}
span_err!(tcx.sess, expr.span, E0166,
"`return` in a function declared as diverging");
}
}
fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
}
ast::ExprParen(ref a) => {
check_expr_with_expectation_and_lvalue_pref(fcx,
&**a,
expected,
lvalue_pref);
fcx.write_ty(id, fcx.expr_ty(&**a));
}
ast::ExprAssign(ref lhs, ref rhs) => {
check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue);
let tcx = fcx.tcx();
if !ty::expr_is_lval(tcx, &**lhs) {
span_err!(tcx.sess, expr.span, E0070,
"illegal left-hand side expression");
}
let lhs_ty = fcx.expr_ty(&**lhs);
check_expr_coercable_to_type(fcx, &**rhs, lhs_ty);
let rhs_ty = fcx.expr_ty(&**rhs);
fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized);
if ty::type_is_error(lhs_ty) || ty::type_is_error(rhs_ty) {
fcx.write_error(id);
} else {
fcx.write_nil(id);
}
}
ast::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e),
id, expr.span, expected);
}
ast::ExprIfLet(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprIfLet");
}
ast::ExprWhile(ref cond, ref body, _) => {
check_expr_has_type(fcx, &**cond, tcx.types.bool);
check_block_no_value(fcx, &**body);
let cond_ty = fcx.expr_ty(&**cond);
let body_ty = fcx.node_ty(body.id);
if ty::type_is_error(cond_ty) || ty::type_is_error(body_ty) {
fcx.write_error(id);
}
else {
fcx.write_nil(id);
}
}
ast::ExprWhileLet(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprWhileLet");
}
ast::ExprForLoop(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprForLoop");
}
ast::ExprLoop(ref body, _) => {
check_block_no_value(fcx, &**body);
if !may_break(tcx, expr.id, &**body) {
fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
} else {
fcx.write_nil(id);
}
}
ast::ExprMatch(ref discrim, ref arms, match_src) => {
_match::check_match(fcx, expr, &**discrim, arms, expected, match_src);
}
ast::ExprClosure(capture, ref decl, ref body) => {
closure::check_expr_closure(fcx, expr, capture, &**decl, &**body, expected);
}
ast::ExprBlock(ref b) => {
check_block_with_expected(fcx, &**b, expected);
fcx.write_ty(id, fcx.node_ty(b.id));
}
ast::ExprCall(ref callee, ref args) => {
callee::check_call(fcx, expr, &**callee, &args[..], expected);
}
ast::ExprMethodCall(ident, ref tps, ref args) => {
check_method_call(fcx, expr, ident, &args[..], &tps[..], expected, lvalue_pref);
let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
let args_err = arg_tys.fold(false,
|rest_err, a| {
rest_err || ty::type_is_error(a)});
if args_err {
fcx.write_error(id);
}
}
ast::ExprCast(ref e, ref t) => {
if let ast::TyFixedLengthVec(_, ref count_expr) = t.node {
check_expr_with_hint(fcx, &**count_expr, tcx.types.usize);
}
// Find the type of `e`. Supply hints based on the type we are casting to,
// if appropriate.
let t_cast = fcx.to_ty(t);
let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
let t_expr = fcx.expr_ty(e);
// Eagerly check for some obvious errors.
if ty::type_is_error(t_expr) {
fcx.write_error(id);
} else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
} else {
// Write a type for the whole expression, assuming everything is going
// to work out Ok.
fcx.write_ty(id, t_cast);
// Defer other checks until we're done type checking.
let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
let cast_check = cast::CastCheck::new((**e).clone(), t_expr, t_cast, expr.span);
deferred_cast_checks.push(cast_check);
}
}
ast::ExprVec(ref args) => {
let uty = expected.to_option(fcx).and_then(|uty| {
match uty.sty {
ty::ty_vec(ty, _) => Some(ty),
_ => None
}
});
let typ = match uty {
Some(uty) => {
for e in args {
check_expr_coercable_to_type(fcx, &**e, uty);
}
uty
}
None => {
let t: Ty = fcx.infcx().next_ty_var();
for e in args {
check_expr_has_type(fcx, &**e, t);
}
t
}
};
let typ = ty::mk_vec(tcx, typ, Some(args.len()));
fcx.write_ty(id, typ);
}
ast::ExprRepeat(ref element, ref count_expr) => {
check_expr_has_type(fcx, &**count_expr, tcx.types.usize);
let count = ty::eval_repeat_count(fcx.tcx(), &**count_expr);
let uty = match expected {
ExpectHasType(uty) => {
match uty.sty {
ty::ty_vec(ty, _) => Some(ty),
_ => None
}
}
_ => None
};
let (element_ty, t) = match uty {
Some(uty) => {
check_expr_coercable_to_type(fcx, &**element, uty);
(uty, uty)
}
None => {
let t: Ty = fcx.infcx().next_ty_var();
check_expr_has_type(fcx, &**element, t);
(fcx.expr_ty(&**element), t)
}
};
if count > 1 {
// For [foo, ..n] where n > 1, `foo` must have
// Copy type:
fcx.require_type_meets(
t,
expr.span,
traits::RepeatVec,
ty::BoundCopy);
}
if ty::type_is_error(element_ty) {
fcx.write_error(id);
} else {
let t = ty::mk_vec(tcx, t, Some(count));
fcx.write_ty(id, t);
}
}
ast::ExprTup(ref elts) => {
let flds = expected.only_has_type(fcx).and_then(|ty| {
match ty.sty {
ty::ty_tup(ref flds) => Some(&flds[..]),
_ => None
}
});
let mut err_field = false;
let elt_ts = elts.iter().enumerate().map(|(i, e)| {
let t = match flds {
Some(ref fs) if i < fs.len() => {
let ety = fs[i];
check_expr_coercable_to_type(fcx, &**e, ety);
ety
}
_ => {
check_expr_with_expectation(fcx, &**e, NoExpectation);
fcx.expr_ty(&**e)
}
};
err_field = err_field || ty::type_is_error(t);
t
}).collect();
if err_field {
fcx.write_error(id);
} else {
let typ = ty::mk_tup(tcx, elt_ts);
fcx.write_ty(id, typ);
}
}
ast::ExprStruct(ref path, ref fields, ref base_expr) => {
// Resolve the path.
let def = lookup_full_def(tcx, path.span, id);
let struct_id = match def {
def::DefVariant(enum_id, variant_id, true) => {
check_struct_enum_variant(fcx, id, expr.span, enum_id,
variant_id, &fields[..]);
enum_id
}
def::DefTrait(def_id) => {
span_err!(tcx.sess, path.span, E0159,
"use of trait `{}` as a struct constructor",
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
&fields[..],
base_expr);
def_id
},
def => {
// Verify that this was actually a struct.
let typ = ty::lookup_item_type(fcx.ccx.tcx, def.def_id());
match typ.ty.sty {
ty::ty_struct(struct_did, _) => {
check_struct_constructor(fcx,
id,
expr.span,
struct_did,
&fields[..],
base_expr.as_ref().map(|e| &**e));
}
_ => {
span_err!(tcx.sess, path.span, E0071,
"`{}` does not name a structure",
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
&fields[..],
base_expr);
}
}
def.def_id()
}
};
// Turn the path into a type and verify that that type unifies with
// the resulting structure type. This is needed to handle type
// parameters correctly.
let actual_structure_type = fcx.expr_ty(&*expr);
if !ty::type_is_error(actual_structure_type) {
let type_and_substs = fcx.instantiate_struct_literal_ty(struct_id, path);
match fcx.mk_subty(false,
infer::Misc(path.span),
actual_structure_type,
type_and_substs.ty) {
Ok(()) => {}
Err(type_error) => {
let type_error_description =
ty::type_err_to_str(tcx, &type_error);
span_err!(fcx.tcx().sess, path.span, E0235,
"structure constructor specifies a \
structure of type `{}`, but this \
structure has type `{}`: {}",
fcx.infcx()
.ty_to_string(type_and_substs.ty),
fcx.infcx()
.ty_to_string(
actual_structure_type),
type_error_description);
ty::note_and_explain_type_err(tcx, &type_error, path.span);
}
}
}
fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
}
ast::ExprField(ref base, ref field) => {
check_field(fcx, expr, lvalue_pref, &**base, field);
}
ast::ExprTupField(ref base, idx) => {
check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
}
ast::ExprIndex(ref base, ref idx) => {
check_expr_with_lvalue_pref(fcx, &**base, lvalue_pref);
check_expr(fcx, &**idx);
let base_t = fcx.expr_ty(&**base);
let idx_t = fcx.expr_ty(&**idx);
if ty::type_is_error(base_t) {
fcx.write_ty(id, base_t);
} else if ty::type_is_error(idx_t) {
fcx.write_ty(id, idx_t);
} else {
let base_t = structurally_resolved_type(fcx, expr.span, base_t);
match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
Some((index_ty, element_ty)) => {
let idx_expr_ty = fcx.expr_ty(idx);
demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
fcx.write_ty(id, element_ty);
}
None => {
check_expr_has_type(fcx, &**idx, fcx.tcx().types.err);
fcx.type_error_message(
expr.span,
|actual| {
format!("cannot index a value of type `{}`",
actual)
},
base_t,
None);
fcx.write_ty(id, fcx.tcx().types.err);
}
}
}
}
ast::ExprRange(ref start, ref end) => {
let t_start = start.as_ref().map(|e| {
check_expr(fcx, &**e);
fcx.expr_ty(&**e)
});
let t_end = end.as_ref().map(|e| {
check_expr(fcx, &**e);
fcx.expr_ty(&**e)
});
let idx_type = match (t_start, t_end) {
(Some(ty), None) | (None, Some(ty)) => {
Some(ty)
}
(Some(t_start), Some(t_end)) if (ty::type_is_error(t_start) ||
ty::type_is_error(t_end)) => {
Some(fcx.tcx().types.err)
}
(Some(t_start), Some(t_end)) => {
Some(infer::common_supertype(fcx.infcx(),
infer::RangeExpression(expr.span),
true,
t_start,
t_end))
}
_ => None
};
// Note that we don't check the type of start/end satisfy any
// bounds because right now the range structs do not have any. If we add
// some bounds, then we'll need to check `t_start` against them here.
let range_type = match idx_type {
Some(idx_type) if ty::type_is_error(idx_type) => {
fcx.tcx().types.err
}
Some(idx_type) => {
// Find the did from the appropriate lang item.
let did = match (start, end) {
(&Some(_), &Some(_)) => tcx.lang_items.range_struct(),
(&Some(_), &None) => tcx.lang_items.range_from_struct(),
(&None, &Some(_)) => tcx.lang_items.range_to_struct(),
(&None, &None) => {
tcx.sess.span_bug(expr.span, "full range should be dealt with above")
}
};
if let Some(did) = did {
let predicates = ty::lookup_predicates(tcx, did);
let substs = Substs::new_type(vec![idx_type], vec![]);
let bounds = fcx.instantiate_bounds(expr.span, &substs, &predicates);
fcx.add_obligations_for_parameters(
traits::ObligationCause::new(expr.span,
fcx.body_id,
traits::ItemObligation(did)),
&bounds);
ty::mk_struct(tcx, did, tcx.mk_substs(substs))
} else {
span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
fcx.tcx().types.err
}
}
None => {
// Neither start nor end => RangeFull
if let Some(did) = tcx.lang_items.range_full_struct() {
let substs = Substs::new_type(vec![], vec![]);
ty::mk_struct(tcx, did, tcx.mk_substs(substs))
} else {
span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
fcx.tcx().types.err
}
}
};
fcx.write_ty(id, range_type);
}
}
debug!("type of expr({}) {} is...", expr.id,
syntax::print::pprust::expr_to_string(expr));
debug!("... {}, expected is {}",
ppaux::ty_to_string(tcx, fcx.expr_ty(expr)),
expected.repr(tcx));
unifier();
}
pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
path_res: def::PathResolution,
opt_self_ty: Option<Ty<'tcx>>,
path: &'a ast::Path,
span: Span,
node_id: ast::NodeId)
-> Option<(Option<Ty<'tcx>>,
&'a [ast::PathSegment],
def::Def)>
{
// If fully resolved already, we don't have to do anything.
if path_res.depth == 0 {
Some((opt_self_ty, &path.segments, path_res.base_def))
} else {
let mut def = path_res.base_def;
let ty_segments = path.segments.init();
let base_ty_end = path.segments.len() - path_res.depth;
let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
PathParamMode::Optional,
&mut def,
opt_self_ty,
&ty_segments[..base_ty_end],
&ty_segments[base_ty_end..]);
let item_segment = path.segments.last().unwrap();
let item_name = item_segment.identifier.name;
match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
Ok((def, lp)) => {
// Write back the new resolution.
fcx.ccx.tcx.def_map.borrow_mut()
.insert(node_id, def::PathResolution {
base_def: def,
last_private: path_res.last_private.or(lp),
depth: 0
});
Some((Some(ty), slice::ref_slice(item_segment), def))
}
Err(error) => {
method::report_error(fcx, span, ty,
item_name, None, error);
fcx.write_error(node_id);
None
}
}
}
}
fn constrain_path_type_parameters(fcx: &FnCtxt,
expr: &ast::Expr)
{
fcx.opt_node_ty_substs(expr.id, |item_substs| {
fcx.add_default_region_param_bounds(&item_substs.substs, expr);
});
}
impl<'tcx> Expectation<'tcx> {
/// Provide an expectation for an rvalue expression given an *optional*
/// hint, which is not required for type safety (the resulting type might
/// be checked higher up, as is the case with `&expr` and `box expr`), but
/// is useful in determining the concrete type.
///
/// The primary use case is where the expected type is a fat pointer,
/// like `&[isize]`. For example, consider the following statement:
///
/// let x: &[isize] = &[1, 2, 3];
///
/// In this case, the expected type for the `&[1, 2, 3]` expression is
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
/// expectation `ExpectHasType([isize])`, that would be too strong --
/// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
/// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
/// to the type `&[isize]`. Therefore, we propagate this more limited hint,
/// which still is useful, because it informs integer literals and the like.
/// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
/// for examples of where this comes up,.
fn rvalue_hint(ty: Ty<'tcx>) -> Expectation<'tcx> {
match ty.sty {
ty::ty_vec(_, None) | ty::ty_trait(..) => {
ExpectRvalueLikeUnsized(ty)
}
_ => ExpectHasType(ty)
}
}
// Resolves `expected` by a single level if it is a variable. If
// there is no expected type or resolution is not possible (e.g.,
// no constraints yet present), just returns `None`.
fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
match self {
NoExpectation => {
NoExpectation
}
ExpectCastableToType(t) => {
ExpectCastableToType(
fcx.infcx().resolve_type_vars_if_possible(&t))
}
ExpectHasType(t) => {
ExpectHasType(
fcx.infcx().resolve_type_vars_if_possible(&t))
}
ExpectRvalueLikeUnsized(t) => {
ExpectRvalueLikeUnsized(
fcx.infcx().resolve_type_vars_if_possible(&t))
}
}
}
fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
NoExpectation => None,
ExpectCastableToType(ty) |
ExpectHasType(ty) |
ExpectRvalueLikeUnsized(ty) => Some(ty),
}
}
fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
ExpectHasType(ty) => Some(ty),
_ => None
}
}
}
impl<'tcx> Repr<'tcx> for Expectation<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
match *self {
NoExpectation => format!("NoExpectation"),
ExpectHasType(t) => format!("ExpectHasType({})",
t.repr(tcx)),
ExpectCastableToType(t) => format!("ExpectCastableToType({})",
t.repr(tcx)),
ExpectRvalueLikeUnsized(t) => format!("ExpectRvalueLikeUnsized({})",
t.repr(tcx)),
}
}
}
pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
local: &'tcx ast::Local,
init: &'tcx ast::Expr)
{
let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
let local_ty = fcx.local_ty(init.span, local.id);
if !ref_bindings {
check_expr_coercable_to_type(fcx, init, local_ty)
} else {
// Somewhat subtle: if we have a `ref` binding in the pattern,
// we want to avoid introducing coercions for the RHS. This is
// both because it helps preserve sanity and, in the case of
// ref mut, for soundness (issue #23116). In particular, in
// the latter case, we need to be clear that the type of the
// referent for the reference that results is *equal to* the
// type of the lvalue it is referencing, and not some
// supertype thereof.
check_expr(fcx, init);
let init_ty = fcx.expr_ty(init);
demand::eqtype(fcx, init.span, init_ty, local_ty);
};
}
pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx ast::Local) {
let tcx = fcx.ccx.tcx;
let t = fcx.local_ty(local.span, local.id);
fcx.write_ty(local.id, t);
if let Some(ref init) = local.init {
check_decl_initializer(fcx, local, &**init);
let init_ty = fcx.expr_ty(&**init);
if ty::type_is_error(init_ty) {
fcx.write_ty(local.id, init_ty);
}
}
let pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(&tcx.def_map, &*local.pat),
};
_match::check_pat(&pcx, &*local.pat, t);
let pat_ty = fcx.node_ty(local.pat.id);
if ty::type_is_error(pat_ty) {
fcx.write_ty(local.id, pat_ty);
}
}
pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx ast::Stmt) {
let node_id;
let mut saw_bot = false;
let mut saw_err = false;
match stmt.node {
ast::StmtDecl(ref decl, id) => {
node_id = id;
match decl.node {
ast::DeclLocal(ref l) => {
check_decl_local(fcx, &**l);
let l_t = fcx.node_ty(l.id);
saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
saw_err = saw_err || ty::type_is_error(l_t);
}
ast::DeclItem(_) => {/* ignore for now */ }
}
}
ast::StmtExpr(ref expr, id) => {
node_id = id;
// Check with expected type of ()
check_expr_has_type(fcx, &**expr, ty::mk_nil(fcx.tcx()));
let expr_ty = fcx.expr_ty(&**expr);
saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
saw_err = saw_err || ty::type_is_error(expr_ty);
}
ast::StmtSemi(ref expr, id) => {
node_id = id;
check_expr(fcx, &**expr);
let expr_ty = fcx.expr_ty(&**expr);
saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
saw_err |= ty::type_is_error(expr_ty);
}
ast::StmtMac(..) => fcx.ccx.tcx.sess.bug("unexpanded macro")
}
if saw_bot {
fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
}
else if saw_err {
fcx.write_error(node_id);
}
else {
fcx.write_nil(node_id)
}
}
pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx ast::Block) {
check_block_with_expected(fcx, blk, ExpectHasType(ty::mk_nil(fcx.tcx())));
let blkty = fcx.node_ty(blk.id);
if ty::type_is_error(blkty) {
fcx.write_error(blk.id);
} else {
let nilty = ty::mk_nil(fcx.tcx());
demand::suptype(fcx, blk.span, nilty, blkty);
}
}
fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
blk: &'tcx ast::Block,
expected: Expectation<'tcx>) {
let prev = {
let mut fcx_ps = fcx.ps.borrow_mut();
let unsafety_state = fcx_ps.recurse(blk);
replace(&mut *fcx_ps, unsafety_state)
};
let mut warned = false;
let mut any_diverges = false;
let mut any_err = false;
for s in &blk.stmts {
check_stmt(fcx, &**s);
let s_id = ast_util::stmt_id(&**s);
let s_ty = fcx.node_ty(s_id);
if any_diverges && !warned && match s.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => true,
_ => false,
}
}
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
_ => false
} {
fcx.ccx
.tcx
.sess
.add_lint(lint::builtin::UNREACHABLE_CODE,
s_id,
s.span,
"unreachable statement".to_string());
warned = true;
}
any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
any_err = any_err || ty::type_is_error(s_ty);
}
match blk.expr {
None => if any_err {
fcx.write_error(blk.id);
} else if any_diverges {
fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
} else {
fcx.write_nil(blk.id);
},
Some(ref e) => {
if any_diverges && !warned {
fcx.ccx
.tcx
.sess
.add_lint(lint::builtin::UNREACHABLE_CODE,
e.id,
e.span,
"unreachable expression".to_string());
}
let ety = match expected {
ExpectHasType(ety) => {
check_expr_coercable_to_type(fcx, &**e, ety);
ety
}
_ => {
check_expr_with_expectation(fcx, &**e, expected);
fcx.expr_ty(&**e)
}
};
if any_err {
fcx.write_error(blk.id);
} else if any_diverges {
fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
} else {
fcx.write_ty(blk.id, ety);
}
}
};
*fcx.ps.borrow_mut() = prev;
}
/// Checks a constant appearing in a type. At the moment this is just the
/// length expression in a fixed-length vector, but someday it might be
/// extended to type-level numeric literals.
fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
expr: &'tcx ast::Expr,
expected_type: Ty<'tcx>) {
let inh = static_inherited_fields(ccx);
let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
check_const_with_ty(&fcx, expr.span, expr, expected_type);
}
fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
sp: Span,
e: &'tcx ast::Expr,
id: ast::NodeId) {
let inh = static_inherited_fields(ccx);
let rty = ty::node_id_to_type(ccx.tcx, id);
let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
let declty = fcx.ccx.tcx.tcache.borrow().get(&local_def(id)).unwrap().ty;
check_const_with_ty(&fcx, sp, e, declty);
}
fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
_: Span,
e: &'tcx ast::Expr,
declty: Ty<'tcx>) {
// Gather locals in statics (because of block expressions).
// This is technically unnecessary because locals in static items are forbidden,
// but prevents type checking from blowing up before const checking can properly
// emit a error.
GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
check_expr_with_hint(fcx, e, declty);
demand::coerce(fcx, e.span, declty, e);
fcx.select_all_obligations_or_error();
fcx.check_casts();
regionck::regionck_expr(fcx, e);
writeback::resolve_type_vars_in_expr(fcx, e);
}
/// Checks whether a type can be represented in memory. In particular, it
/// identifies types that contain themselves without indirection through a
/// pointer, which would mean their size is unbounded. This is different from
/// the question of whether a type can be instantiated. See the definition of
/// `check_instantiable`.
pub fn check_representable(tcx: &ty::ctxt,
sp: Span,
item_id: ast::NodeId,
designation: &str) -> bool {
let rty = ty::node_id_to_type(tcx, item_id);
// Check that it is possible to represent this type. This call identifies
// (1) types that contain themselves and (2) types that contain a different
// recursive type. It is only necessary to throw an error on those that
// contain themselves. For case 2, there must be an inner type that will be
// caught by case 1.
match ty::is_type_representable(tcx, sp, rty) {
ty::SelfRecursive => {
span_err!(tcx.sess, sp, E0072,
"illegal recursive {} type; \
wrap the inner value in a box to make it representable",
designation);
return false
}
ty::Representable | ty::ContainsRecursive => (),
}
return true
}
/// Checks whether a type can be created without an instance of itself.
/// This is similar but different from the question of whether a type
/// can be represented. For example, the following type:
///
/// enum foo { None, Some(foo) }
///
/// is instantiable but is not representable. Similarly, the type
///
/// enum foo { Some(@foo) }
///
/// is representable, but not instantiable.
pub fn check_instantiable(tcx: &ty::ctxt,
sp: Span,
item_id: ast::NodeId)
-> bool {
let item_ty = ty::node_id_to_type(tcx, item_id);
if !ty::is_instantiable(tcx, item_ty) {
span_err!(tcx.sess, sp, E0073,
"this type cannot be instantiated without an \
instance of itself");
fileline_help!(tcx.sess, sp, "consider using `Option<{}>`",
ppaux::ty_to_string(tcx, item_ty));
false
} else {
true
}
}
pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) {
let t = ty::node_id_to_type(tcx, id);
if ty::type_needs_subst(t) {
span_err!(tcx.sess, sp, E0074, "SIMD vector cannot be generic");
return;
}
match t.sty {
ty::ty_struct(did, substs) => {
let fields = ty::lookup_struct_fields(tcx, did);
if fields.is_empty() {
span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
return;
}
let e = ty::lookup_field_type(tcx, did, fields[0].id, substs);
if !fields.iter().all(
|f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {
span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
return;
}
if !ty::type_is_machine(e) {
span_err!(tcx.sess, sp, E0077,
"SIMD vector element type should be machine type");
return;
}
}
_ => ()
}
}
pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
sp: Span,
vs: &'tcx [P<ast::Variant>],
id: ast::NodeId) {
fn disr_in_range(ccx: &CrateCtxt,
ty: attr::IntType,
disr: ty::Disr) -> bool {
fn uint_in_range(ccx: &CrateCtxt, ty: ast::UintTy, disr: ty::Disr) -> bool {
match ty {
ast::TyU8 => disr as u8 as Disr == disr,
ast::TyU16 => disr as u16 as Disr == disr,
ast::TyU32 => disr as u32 as Disr == disr,
ast::TyU64 => disr as u64 as Disr == disr,
ast::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr)
}
}
fn int_in_range(ccx: &CrateCtxt, ty: ast::IntTy, disr: ty::Disr) -> bool {
match ty {
ast::TyI8 => disr as i8 as Disr == disr,
ast::TyI16 => disr as i16 as Disr == disr,
ast::TyI32 => disr as i32 as Disr == disr,
ast::TyI64 => disr as i64 as Disr == disr,
ast::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr)
}
}
match ty {
attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
}
}
fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
vs: &'tcx [P<ast::Variant>],
id: ast::NodeId,
hint: attr::ReprAttr) {
#![allow(trivial_numeric_casts)]
let rty = ty::node_id_to_type(ccx.tcx, id);
let mut disr_vals: Vec<ty::Disr> = Vec::new();
let inh = static_inherited_fields(ccx);
let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
let (_, repr_type_ty) = ty::enum_repr_type(ccx.tcx, Some(&hint));
for v in vs {
if let Some(ref e) = v.node.disr_expr {
check_const_with_ty(&fcx, e.span, e, repr_type_ty);
}
}
let def_id = local_def(id);
// ty::enum_variants guards against discriminant overflows, so
// we need not check for that.
let variants = ty::enum_variants(ccx.tcx, def_id);
for (v, variant) in vs.iter().zip(variants.iter()) {
let current_disr_val = variant.disr_val;
// Check for duplicate discriminant values
match disr_vals.iter().position(|&x| x == current_disr_val) {
Some(i) => {
span_err!(ccx.tcx.sess, v.span, E0081,
"discriminant value `{}` already exists", disr_vals[i]);
span_note!(ccx.tcx.sess, ccx.tcx.map.span(variants[i].id.node),
"conflicting discriminant here")
}
None => {}
}
// Check for unrepresentable discriminant values
match hint {
attr::ReprAny | attr::ReprExtern => (),
attr::ReprInt(sp, ity) => {
if !disr_in_range(ccx, ity, current_disr_val) {
span_err!(ccx.tcx.sess, v.span, E0082,
"discriminant value outside specified type");
span_note!(ccx.tcx.sess, sp,
"discriminant type specified here");
}
}
attr::ReprPacked => {
ccx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
}
}
disr_vals.push(current_disr_val);
}
}
let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id })
.get(0).unwrap_or(&attr::ReprAny);
if hint != attr::ReprAny && vs.len() <= 1 {
if vs.len() == 1 {
span_err!(ccx.tcx.sess, sp, E0083,
"unsupported representation for univariant enum");
} else {
span_err!(ccx.tcx.sess, sp, E0084,
"unsupported representation for zero-variant enum");
};
}
do_check(ccx, vs, id, hint);
check_representable(ccx.tcx, sp, id, "enum");
// Check that it is possible to instantiate this enum:
//
// This *sounds* like the same that as representable, but it's
// not. See def'n of `check_instantiable()` for details.
check_instantiable(ccx.tcx, sp, id);
}
// Returns the type parameter count and the type for the given definition.
fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
defn: def::Def)
-> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
match defn {
def::DefLocal(nid) | def::DefUpvar(nid, _) => {
let typ = fcx.local_ty(sp, nid);
(ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
ty::GenericPredicates::empty())
}
def::DefFn(id, _) | def::DefMethod(id, _) |
def::DefStatic(id, _) | def::DefVariant(_, id, _) |
def::DefStruct(id) | def::DefConst(id) | def::DefAssociatedConst(id, _) => {
(ty::lookup_item_type(fcx.tcx(), id), ty::lookup_predicates(fcx.tcx(), id))
}
def::DefTrait(_) |
def::DefTy(..) |
def::DefAssociatedTy(..) |
def::DefPrimTy(_) |
def::DefTyParam(..) |
def::DefMod(..) |
def::DefForeignMod(..) |
def::DefUse(..) |
def::DefRegion(..) |
def::DefLabel(..) |
def::DefSelfTy(..) => {
fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
}
}
}
// Instantiates the given path, which must refer to an item with the given
// number of type parameters and type.
pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
segments: &[ast::PathSegment],
type_scheme: TypeScheme<'tcx>,
type_predicates: &ty::GenericPredicates<'tcx>,
opt_self_ty: Option<Ty<'tcx>>,
def: def::Def,
span: Span,
node_id: ast::NodeId) {
debug!("instantiate_path(path={:?}, def={}, node_id={}, type_scheme={})",
segments,
def.repr(fcx.tcx()),
node_id,
type_scheme.repr(fcx.tcx()));
// We need to extract the type parameters supplied by the user in
// the path `path`. Due to the current setup, this is a bit of a
// tricky-process; the problem is that resolve only tells us the
// end-point of the path resolution, and not the intermediate steps.
// Luckily, we can (at least for now) deduce the intermediate steps
// just from the end-point.
//
// There are basically four cases to consider:
//
// 1. Reference to a *type*, such as a struct or enum:
//
// mod a { struct Foo<T> { ... } }
//
// Because we don't allow types to be declared within one
// another, a path that leads to a type will always look like
// `a::b::Foo<T>` where `a` and `b` are modules. This implies
// that only the final segment can have type parameters, and
// they are located in the TypeSpace.
//
// *Note:* Generally speaking, references to types don't
// actually pass through this function, but rather the
// `ast_ty_to_ty` function in `astconv`. However, in the case
// of struct patterns (and maybe literals) we do invoke
// `instantiate_path` to get the general type of an instance of
// a struct. (In these cases, there are actually no type
// parameters permitted at present, but perhaps we will allow
// them in the future.)
//
// 1b. Reference to a enum variant or tuple-like struct:
//
// struct foo<T>(...)
// enum E<T> { foo(...) }
//
// In these cases, the parameters are declared in the type
// space.
//
// 2. Reference to a *fn item*:
//
// fn foo<T>() { }
//
// In this case, the path will again always have the form
// `a::b::foo::<T>` where only the final segment should have
// type parameters. However, in this case, those parameters are
// declared on a value, and hence are in the `FnSpace`.
//
// 3. Reference to a *method*:
//
// impl<A> SomeStruct<A> {
// fn foo<B>(...)
// }
//
// Here we can have a path like
// `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
// may appear in two places. The penultimate segment,
// `SomeStruct::<A>`, contains parameters in TypeSpace, and the
// final segment, `foo::<B>` contains parameters in fn space.
//
// 4. Reference to an *associated const*:
//
// impl<A> AnotherStruct<A> {
// const FOO: B = BAR;
// }
//
// The path in this case will look like
// `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
// only will have parameters in TypeSpace.
//
// The first step then is to categorize the segments appropriately.
assert!(!segments.is_empty());
let mut ufcs_method = None;
let mut segment_spaces: Vec<_>;
match def {
// Case 1 and 1b. Reference to a *type* or *enum variant*.
def::DefSelfTy(..) |
def::DefStruct(..) |
def::DefVariant(..) |
def::DefTy(..) |
def::DefAssociatedTy(..) |
def::DefTrait(..) |
def::DefPrimTy(..) |
def::DefTyParam(..) => {
// Everything but the final segment should have no
// parameters at all.
segment_spaces = repeat(None).take(segments.len() - 1).collect();
segment_spaces.push(Some(subst::TypeSpace));
}
// Case 2. Reference to a top-level value.
def::DefFn(..) |
def::DefConst(..) |
def::DefStatic(..) => {
segment_spaces = repeat(None).take(segments.len() - 1).collect();
segment_spaces.push(Some(subst::FnSpace));
}
// Case 3. Reference to a method.
def::DefMethod(_, provenance) => {
match provenance {
def::FromTrait(trait_did) => {
callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
}
def::FromImpl(_) => {}
}
if segments.len() >= 2 {
segment_spaces = repeat(None).take(segments.len() - 2).collect();
segment_spaces.push(Some(subst::TypeSpace));
segment_spaces.push(Some(subst::FnSpace));
} else {
// `<T>::method` will end up here, and so can `T::method`.
let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
segment_spaces = vec![Some(subst::FnSpace)];
ufcs_method = Some((provenance, self_ty));
}
}
def::DefAssociatedConst(_, provenance) => {
match provenance {
def::FromTrait(trait_did) => {
callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
}
def::FromImpl(_) => {}
}
if segments.len() >= 2 {
segment_spaces = repeat(None).take(segments.len() - 2).collect();
segment_spaces.push(Some(subst::TypeSpace));
segment_spaces.push(None);
} else {
segment_spaces = vec![None];
}
}
// Other cases. Various nonsense that really shouldn't show up
// here. If they do, an error will have been reported
// elsewhere. (I hope)
def::DefMod(..) |
def::DefForeignMod(..) |
def::DefLocal(..) |
def::DefUse(..) |
def::DefRegion(..) |
def::DefLabel(..) |
def::DefUpvar(..) => {
segment_spaces = repeat(None).take(segments.len()).collect();
}
}
assert_eq!(segment_spaces.len(), segments.len());
// In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
// `opt_self_ty` can also be Some for `Foo::method`, where Foo's
// type parameters are not mandatory.
let require_type_space = opt_self_ty.is_some() && ufcs_method.is_none();
debug!("segment_spaces={:?}", segment_spaces);
// Next, examine the definition, and determine how many type
// parameters we expect from each space.
let type_defs = &type_scheme.generics.types;
let region_defs = &type_scheme.generics.regions;
// Now that we have categorized what space the parameters for each
// segment belong to, let's sort out the parameters that the user
// provided (if any) into their appropriate spaces. We'll also report
// errors if type parameters are provided in an inappropriate place.
let mut substs = Substs::empty();
for (opt_space, segment) in segment_spaces.iter().zip(segments.iter()) {
match *opt_space {
None => {
check_path_args(fcx.tcx(), slice::ref_slice(segment),
NO_TPS | NO_REGIONS);
}
Some(space) => {
push_explicit_parameters_from_segment_to_substs(fcx,
space,
span,
type_defs,
region_defs,
segment,
&mut substs);
}
}
}
if let Some(self_ty) = opt_self_ty {
if type_defs.len(subst::SelfSpace) == 1 {
substs.types.push(subst::SelfSpace, self_ty);
}
}
// Now we have to compare the types that the user *actually*
// provided against the types that were *expected*. If the user
// did not provide any types, then we want to substitute inference
// variables. If the user provided some types, we may still need
// to add defaults. If the user provided *too many* types, that's
// a problem.
for &space in &ParamSpace::all() {
adjust_type_parameters(fcx, span, space, type_defs,
require_type_space, &mut substs);
assert_eq!(substs.types.len(space), type_defs.len(space));
adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
assert_eq!(substs.regions().len(space), region_defs.len(space));
}
// The things we are substituting into the type should not contain
// escaping late-bound regions, and nor should the base type scheme.
assert!(!substs.has_regions_escaping_depth(0));
assert!(!type_scheme.has_escaping_regions());
// Add all the obligations that are required, substituting and
// normalized appropriately.
let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
fcx.add_obligations_for_parameters(
traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
&bounds);
// Substitute the values for the type parameters into the type of
// the referenced item.
let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
if let Some((def::FromImpl(impl_def_id), self_ty)) = ufcs_method {
// In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
// is inherent, there is no `Self` parameter, instead, the impl needs
// type parameters, which we can infer by unifying the provided `Self`
// with the substituted impl type.
let impl_scheme = ty::lookup_item_type(fcx.tcx(), impl_def_id);
assert_eq!(substs.types.len(subst::TypeSpace),
impl_scheme.generics.types.len(subst::TypeSpace));
assert_eq!(substs.regions().len(subst::TypeSpace),
impl_scheme.generics.regions.len(subst::TypeSpace));
let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
if fcx.mk_subty(false, infer::Misc(span), self_ty, impl_ty).is_err() {
fcx.tcx().sess.span_bug(span,
&format!(
"instantiate_path: (UFCS) {} was a subtype of {} but now is not?",
self_ty.repr(fcx.tcx()),
impl_ty.repr(fcx.tcx())));
}
}
fcx.write_ty(node_id, ty_substituted);
fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
return;
/// Finds the parameters that the user provided and adds them to `substs`. If too many
/// parameters are provided, then reports an error and clears the output vector.
///
/// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
/// use inference variables. This seems less likely to lead to derived errors.
///
/// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
/// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
/// here because we can easily use the precise span of the N+1'th parameter.
fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
fcx: &FnCtxt<'a, 'tcx>,
space: subst::ParamSpace,
span: Span,
type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
segment: &ast::PathSegment,
substs: &mut Substs<'tcx>)
{
match segment.parameters {
ast::AngleBracketedParameters(ref data) => {
push_explicit_angle_bracketed_parameters_from_segment_to_substs(
fcx, space, type_defs, region_defs, data, substs);
}
ast::ParenthesizedParameters(ref data) => {
span_err!(fcx.tcx().sess, span, E0238,
"parenthesized parameters may only be used with a trait");
push_explicit_parenthesized_parameters_from_segment_to_substs(
fcx, space, span, type_defs, data, substs);
}
}
}
fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
fcx: &FnCtxt<'a, 'tcx>,
space: subst::ParamSpace,
type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
data: &ast::AngleBracketedParameterData,
substs: &mut Substs<'tcx>)
{
{
let type_count = type_defs.len(space);
assert_eq!(substs.types.len(space), 0);
for (i, typ) in data.types.iter().enumerate() {
let t = fcx.to_ty(&**typ);
if i < type_count {
substs.types.push(space, t);
} else if i == type_count {
span_err!(fcx.tcx().sess, typ.span, E0087,
"too many type parameters provided: \
expected at most {} parameter(s), \
found {} parameter(s)",
type_count, data.types.len());
substs.types.truncate(space, 0);
break;
}
}
}
if !data.bindings.is_empty() {
span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
"unexpected binding of associated item in expression path \
(only allowed in type paths)");
}
{
let region_count = region_defs.len(space);
assert_eq!(substs.regions().len(space), 0);
for (i, lifetime) in data.lifetimes.iter().enumerate() {
let r = ast_region_to_region(fcx.tcx(), lifetime);
if i < region_count {
substs.mut_regions().push(space, r);
} else if i == region_count {
span_err!(fcx.tcx().sess, lifetime.span, E0088,
"too many lifetime parameters provided: \
expected {} parameter(s), found {} parameter(s)",
region_count,
data.lifetimes.len());
substs.mut_regions().truncate(space, 0);
break;
}
}
}
}
/// As with
/// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
/// but intended for `Foo(A,B) -> C` form. This expands to
/// roughly the same thing as `Foo<(A,B),C>`. One important
/// difference has to do with the treatment of anonymous
/// regions, which are translated into bound regions (NYI).
fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
fcx: &FnCtxt<'a, 'tcx>,
space: subst::ParamSpace,
span: Span,
type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
data: &ast::ParenthesizedParameterData,
substs: &mut Substs<'tcx>)
{
let type_count = type_defs.len(space);
if type_count < 2 {
span_err!(fcx.tcx().sess, span, E0167,
"parenthesized form always supplies 2 type parameters, \
but only {} parameter(s) were expected",
type_count);
}
let input_tys: Vec<Ty> =
data.inputs.iter().map(|ty| fcx.to_ty(&**ty)).collect();
let tuple_ty =
ty::mk_tup(fcx.tcx(), input_tys);
if type_count >= 1 {
substs.types.push(space, tuple_ty);
}
let output_ty: Option<Ty> =
data.output.as_ref().map(|ty| fcx.to_ty(&**ty));
let output_ty =
output_ty.unwrap_or(ty::mk_nil(fcx.tcx()));
if type_count >= 2 {
substs.types.push(space, output_ty);
}
}
fn adjust_type_parameters<'a, 'tcx>(
fcx: &FnCtxt<'a, 'tcx>,
span: Span,
space: ParamSpace,
defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
require_type_space: bool,
substs: &mut Substs<'tcx>)
{
let provided_len = substs.types.len(space);
let desired = defs.get_slice(space);
let required_len = desired.iter()
.take_while(|d| d.default.is_none())
.count();
debug!("adjust_type_parameters(space={:?}, \
provided_len={}, \
desired_len={}, \
required_len={})",
space,
provided_len,
desired.len(),
required_len);
// Enforced by `push_explicit_parameters_from_segment_to_substs()`.
assert!(provided_len <= desired.len());
// Nothing specified at all: supply inference variables for
// everything.
if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
substs.types.replace(space, fcx.infcx().next_ty_vars(desired.len()));
return;
}
// Too few parameters specified: report an error and use Err
// for everything.
if provided_len < required_len {
let qualifier =
if desired.len() != required_len { "at least " } else { "" };
span_err!(fcx.tcx().sess, span, E0089,
"too few type parameters provided: expected {}{} parameter(s) \
, found {} parameter(s)",
qualifier, required_len, provided_len);
substs.types.replace(space, repeat(fcx.tcx().types.err).take(desired.len()).collect());
return;
}
// Otherwise, add in any optional parameters that the user
// omitted. The case of *too many* parameters is handled
// already by
// push_explicit_parameters_from_segment_to_substs(). Note
// that the *default* type are expressed in terms of all prior
// parameters, so we have to substitute as we go with the
// partial substitution that we have built up.
for i in provided_len..desired.len() {
let default = desired[i].default.unwrap();
let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
substs.types.push(space, default);
}
assert_eq!(substs.types.len(space), desired.len());
debug!("Final substs: {}", substs.repr(fcx.tcx()));
}
fn adjust_region_parameters(
fcx: &FnCtxt,
span: Span,
space: ParamSpace,
defs: &VecPerParamSpace<ty::RegionParameterDef>,
substs: &mut Substs)
{
let provided_len = substs.mut_regions().len(space);
let desired = defs.get_slice(space);
// Enforced by `push_explicit_parameters_from_segment_to_substs()`.
assert!(provided_len <= desired.len());
// If nothing was provided, just use inference variables.
if provided_len == 0 {
substs.mut_regions().replace(
space,
fcx.infcx().region_vars_for_defs(span, desired));
return;
}
// If just the right number were provided, everybody is happy.
if provided_len == desired.len() {
return;
}
// Otherwise, too few were provided. Report an error and then
// use inference variables.
span_err!(fcx.tcx().sess, span, E0090,
"too few lifetime parameters provided: expected {} parameter(s), \
found {} parameter(s)",
desired.len(), provided_len);
substs.mut_regions().replace(
space,
fcx.infcx().region_vars_for_defs(span, desired));
}
}
fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
ty: Ty<'tcx>,
f: F) -> Ty<'tcx>
where F: Fn() -> Ty<'tcx>
{
let mut ty = fcx.resolve_type_vars_if_possible(ty);
if ty::type_is_ty_var(ty) {
let alternative = f();
// If not, error.
if ty::type_is_ty_var(alternative) || ty::type_is_error(alternative) {
fcx.type_error_message(sp, |_actual| {
"the type of this value must be known in this context".to_string()
}, ty, None);
demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
ty = fcx.tcx().types.err;
} else {
demand::suptype(fcx, sp, alternative, ty);
ty = alternative;
}
}
ty
}
// Resolves `typ` by a single level if `typ` is a type variable. If no
// resolution is possible, then an error is reported.
pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
sp: Span,
ty: Ty<'tcx>)
-> Ty<'tcx>
{
structurally_resolve_type_or_else(fcx, sp, ty, || {
fcx.tcx().types.err
})
}
// Returns true if b contains a break that can exit from b
pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &ast::Block) -> bool {
// First: is there an unlabeled break immediately
// inside the loop?
(loop_query(&*b, |e| {
match *e {
ast::ExprBreak(None) => true,
_ => false
}
})) ||
// Second: is there a labeled break with label
// <id> nested anywhere inside the loop?
(block_query(b, |e| {
if let ast::ExprBreak(Some(_)) = e.node {
lookup_full_def(cx, e.span, e.id) == def::DefLabel(id)
} else {
false
}
}))
}
pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
span: Span,
tps: &OwnedSlice<ast::TyParam>,
ty: Ty<'tcx>) {
debug!("check_bounds_are_used(n_tps={}, ty={})",
tps.len(), ppaux::ty_to_string(ccx.tcx, ty));
// make a vector of booleans initially false, set to true when used
if tps.is_empty() { return; }
let mut tps_used: Vec<_> = repeat(false).take(tps.len()).collect();
ty::walk_ty(ty, |t| {
match t.sty {
ty::ty_param(ParamTy {idx, ..}) => {
debug!("Found use of ty param num {}", idx);
tps_used[idx as usize] = true;
}
_ => ()
}
});
for (i, b) in tps_used.iter().enumerate() {
if !*b {
span_err!(ccx.tcx.sess, span, E0091,
"type parameter `{}` is unused",
token::get_ident(tps[i].ident));
}
}
}
/// Remember to add all intrinsics here, in librustc_trans/trans/intrinsic.rs,
/// and in libcore/intrinsics.rs
pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> {
let name = token::intern(&format!("P{}", n));
ty::mk_param(ccx.tcx, subst::FnSpace, n, name)
}
let tcx = ccx.tcx;
let name = token::get_ident(it.ident);
let (n_tps, inputs, output) = if name.starts_with("atomic_") {
let split : Vec<&str> = name.split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
//We only care about the operation here
let (n_tps, inputs, output) = match split[1] {
"cxchg" => (1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)),
param(ccx, 0),
param(ccx, 0)),
param(ccx, 0)),
"load" => (1, vec!(ty::mk_imm_ptr(tcx, param(ccx, 0))),
param(ccx, 0)),
"store" => (1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0)),
ty::mk_nil(tcx)),
"xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
"min" | "umax" | "umin" => {
(1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0)),
param(ccx, 0))
}
"fence" | "singlethreadfence" => {
(0, Vec::new(), ty::mk_nil(tcx))
}
op => {
span_err!(tcx.sess, it.span, E0092,
"unrecognized atomic operation function: `{}`", op);
return;
}
};
(n_tps, inputs, ty::FnConverging(output))
} else if &name[..] == "abort" || &name[..] == "unreachable" {
(0, Vec::new(), ty::FnDiverging)
} else {
let (n_tps, inputs, output) = match &name[..] {
"breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)),
"size_of" |
"pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.usize),
"size_of_val" | "min_align_of_val" => {
(1, vec![
ty::mk_imm_rptr(tcx,
tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1),
ty::BrAnon(0))),
param(ccx, 0))
], ccx.tcx.types.usize)
}
"init" | "init_dropped" => (1, Vec::new(), param(ccx, 0)),
"uninit" => (1, Vec::new(), param(ccx, 0)),
"forget" => (1, vec!( param(ccx, 0) ), ty::mk_nil(tcx)),
"transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)),
"move_val_init" => {
(1,
vec!(
ty::mk_mut_rptr(tcx,
tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1),
ty::BrAnon(0))),
param(ccx, 0)),
param(ccx, 0)
),
ty::mk_nil(tcx))
}
"drop_in_place" => {
(1, vec![ty::mk_mut_ptr(tcx, param(ccx, 0))], ty::mk_nil(tcx))
}
"needs_drop" => (1, Vec::new(), ccx.tcx.types.bool),
"type_name" => (1, Vec::new(), ty::mk_str_slice(tcx, tcx.mk_region(ty::ReStatic),
ast::MutImmutable)),
"type_id" => (1, Vec::new(), ccx.tcx.types.u64),
"offset" | "arith_offset" => {
(1,
vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ccx.tcx.types.isize
),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}))
}
"copy" | "copy_nonoverlapping" => {
(1,
vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
tcx.types.usize,
),
ty::mk_nil(tcx))
}
"volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => {
(1,
vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
tcx.types.usize,
),
ty::mk_nil(tcx))
}
"write_bytes" | "volatile_set_memory" => {
(1,
vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
tcx.types.u8,
tcx.types.usize,
),
ty::mk_nil(tcx))
}
"sqrtf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"sqrtf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"powif32" => {
(0,
vec!( tcx.types.f32, tcx.types.i32 ),
tcx.types.f32)
}
"powif64" => {
(0,
vec!( tcx.types.f64, tcx.types.i32 ),
tcx.types.f64)
}
"sinf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"sinf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"cosf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"cosf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"powf32" => {
(0,
vec!( tcx.types.f32, tcx.types.f32 ),
tcx.types.f32)
}
"powf64" => {
(0,
vec!( tcx.types.f64, tcx.types.f64 ),
tcx.types.f64)
}
"expf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"expf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"exp2f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"exp2f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"logf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"logf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"log10f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"log10f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"log2f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"log2f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"fmaf32" => {
(0,
vec!( tcx.types.f32, tcx.types.f32, tcx.types.f32 ),
tcx.types.f32)
}
"fmaf64" => {
(0,
vec!( tcx.types.f64, tcx.types.f64, tcx.types.f64 ),
tcx.types.f64)
}
"fabsf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"fabsf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"copysignf32" => (0, vec!( tcx.types.f32, tcx.types.f32 ), tcx.types.f32),
"copysignf64" => (0, vec!( tcx.types.f64, tcx.types.f64 ), tcx.types.f64),
"floorf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"floorf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"ceilf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"ceilf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"truncf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"truncf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"rintf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"rintf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"nearbyintf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"nearbyintf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"roundf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32),
"roundf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64),
"ctpop8" => (0, vec!( tcx.types.u8 ), tcx.types.u8),
"ctpop16" => (0, vec!( tcx.types.u16 ), tcx.types.u16),
"ctpop32" => (0, vec!( tcx.types.u32 ), tcx.types.u32),
"ctpop64" => (0, vec!( tcx.types.u64 ), tcx.types.u64),
"ctlz8" => (0, vec!( tcx.types.u8 ), tcx.types.u8),
"ctlz16" => (0, vec!( tcx.types.u16 ), tcx.types.u16),
"ctlz32" => (0, vec!( tcx.types.u32 ), tcx.types.u32),
"ctlz64" => (0, vec!( tcx.types.u64 ), tcx.types.u64),
"cttz8" => (0, vec!( tcx.types.u8 ), tcx.types.u8),
"cttz16" => (0, vec!( tcx.types.u16 ), tcx.types.u16),
"cttz32" => (0, vec!( tcx.types.u32 ), tcx.types.u32),
"cttz64" => (0, vec!( tcx.types.u64 ), tcx.types.u64),
"bswap16" => (0, vec!( tcx.types.u16 ), tcx.types.u16),
"bswap32" => (0, vec!( tcx.types.u32 ), tcx.types.u32),
"bswap64" => (0, vec!( tcx.types.u64 ), tcx.types.u64),
"volatile_load" =>
(1, vec!( ty::mk_imm_ptr(tcx, param(ccx, 0)) ), param(ccx, 0)),
"volatile_store" =>
(1, vec!( ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ), ty::mk_nil(tcx)),
"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
(0, vec!(tcx.types.i8, tcx.types.i8),
ty::mk_tup(tcx, vec!(tcx.types.i8, tcx.types.bool))),
"i16_add_with_overflow" | "i16_sub_with_overflow" | "i16_mul_with_overflow" =>
(0, vec!(tcx.types.i16, tcx.types.i16),
ty::mk_tup(tcx, vec!(tcx.types.i16, tcx.types.bool))),
"i32_add_with_overflow" | "i32_sub_with_overflow" | "i32_mul_with_overflow" =>
(0, vec!(tcx.types.i32, tcx.types.i32),
ty::mk_tup(tcx, vec!(tcx.types.i32, tcx.types.bool))),
"i64_add_with_overflow" | "i64_sub_with_overflow" | "i64_mul_with_overflow" =>
(0, vec!(tcx.types.i64, tcx.types.i64),
ty::mk_tup(tcx, vec!(tcx.types.i64, tcx.types.bool))),
"u8_add_with_overflow" | "u8_sub_with_overflow" | "u8_mul_with_overflow" =>
(0, vec!(tcx.types.u8, tcx.types.u8),
ty::mk_tup(tcx, vec!(tcx.types.u8, tcx.types.bool))),
"u16_add_with_overflow" | "u16_sub_with_overflow" | "u16_mul_with_overflow" =>
(0, vec!(tcx.types.u16, tcx.types.u16),
ty::mk_tup(tcx, vec!(tcx.types.u16, tcx.types.bool))),
"u32_add_with_overflow" | "u32_sub_with_overflow" | "u32_mul_with_overflow"=>
(0, vec!(tcx.types.u32, tcx.types.u32),
ty::mk_tup(tcx, vec!(tcx.types.u32, tcx.types.bool))),
"u64_add_with_overflow" | "u64_sub_with_overflow" | "u64_mul_with_overflow" =>
(0, vec!(tcx.types.u64, tcx.types.u64),
ty::mk_tup(tcx, vec!(tcx.types.u64, tcx.types.bool))),
"unchecked_udiv" | "unchecked_sdiv" | "unchecked_urem" | "unchecked_srem" =>
(1, vec![param(ccx, 0), param(ccx, 0)], param(ccx, 0)),
"overflowing_add" | "overflowing_sub" | "overflowing_mul" =>
(1, vec![param(ccx, 0), param(ccx, 0)], param(ccx, 0)),
"return_address" => (0, vec![], ty::mk_imm_ptr(tcx, tcx.types.u8)),
"assume" => (0, vec![tcx.types.bool], ty::mk_nil(tcx)),
"discriminant_value" => (1, vec![
ty::mk_imm_rptr(tcx,
tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1),
ty::BrAnon(0))),
param(ccx, 0))], tcx.types.u64),
ref other => {
span_err!(tcx.sess, it.span, E0093,
"unrecognized intrinsic function: `{}`", *other);
return;
}
};
(n_tps, inputs, ty::FnConverging(output))
};
let fty = ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(ty::BareFnTy {
unsafety: ast::Unsafety::Unsafe,
abi: abi::RustIntrinsic,
sig: ty::Binder(FnSig {
inputs: inputs,
output: output,
variadic: false,
}),
}));
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.types.len(subst::FnSpace);
if i_n_tps != n_tps {
span_err!(tcx.sess, it.span, E0094,
"intrinsic has wrong number of type \
parameters: found {}, expected {}",
i_n_tps, n_tps);
} else {
require_same_types(tcx,
None,
false,
it.span,
i_ty.ty,
fty,
|| {
format!("intrinsic has wrong type: expected `{}`",
ppaux::ty_to_string(ccx.tcx, fty))
});
}
}<|fim▁end|> | did: ast::DefId)
-> TypeAndSubsts<'tcx> { |
<|file_name|>config_env.rs<|end_file_name|><|fim▁begin|>use envconfig::Envconfig;
use crate::domain::key_derivation::KeyDerivationFunction;
lazy_static! {
static ref APP_ENV_CONFIG: AppEnvConfig = AppEnvConfig::init().unwrap();
}
pub fn get_app_env_config() -> &'static AppEnvConfig {
return &APP_ENV_CONFIG
}
#[derive(Envconfig, Debug)]
pub struct AppEnvConfig {
#[envconfig(from = "NEW_AGENT_KDF", default = "RAW")]
pub new_agent_kdf: KeyDerivationFunction,
#[envconfig(from = "RESTORE_ON_DEMAND", default = "false")]
pub restore_on_demand: bool,
}
#[cfg(test)]
mod tests {
use super::*;
use std::env;
#[test]
fn should_construct_app_env_config_with_correct_kdf() {
env::remove_var("NEW_AGENT_KDF");
let app_config = AppEnvConfig::init().unwrap();
assert_eq!(app_config.new_agent_kdf, KeyDerivationFunction::Raw, "Default new_agent_kdf should be Raw");<|fim▁hole|> assert_eq!(app_config.new_agent_kdf, KeyDerivationFunction::Raw, "Expected new_agent_kdf to be Raw.");
env::set_var("NEW_AGENT_KDF", "ARGON2I_INT");
let app_config = AppEnvConfig::init().unwrap();
assert_eq!(app_config.new_agent_kdf, KeyDerivationFunction::Argon2iInt, "Expected new_agent_kdf to be Argon2iInt.");
env::set_var("NEW_AGENT_KDF", "ARGON2I_MOD");
let app_config = AppEnvConfig::init().unwrap();
assert_eq!(app_config.new_agent_kdf, KeyDerivationFunction::Argon2iMod, "Expected new_agent_kdf to be Argon2iMod.");
env::set_var("NEW_AGENT_KDF", "FOOBAR");
assert!(AppEnvConfig::init().is_err())
}
}<|fim▁end|> |
env::set_var("NEW_AGENT_KDF", "RAW");
let app_config = AppEnvConfig::init().unwrap(); |
<|file_name|>fish.ts<|end_file_name|><|fim▁begin|>import { FishingSpot } from './fishing-spot';
export interface Fish {
guide: string;
icon: number;<|fim▁hole|><|fim▁end|> | spots: FishingSpot[];
folklore?: number;
} |
<|file_name|>class-implement-trait-cross-crate.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_trait.rs
extern crate cci_class_trait;
use cci_class_trait::animals::noisy;
struct cat {
meows: uint,
how_hungry : int,
name : String,
}
impl cat {
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;<|fim▁hole|> return true;
}
else {
println!("Not hungry!");
return false;
}
}
}
impl noisy for cat {
fn speak(&mut self) { self.meow(); }
}
impl cat {
fn meow(&mut self) {
println!("Meow");
self.meows += 1u;
if self.meows % 5u == 0u {
self.how_hungry += 1;
}
}
}
fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
pub fn main() {
let mut nyan = cat(0u, 2, "nyan".to_string());
nyan.eat();
assert!((!nyan.eat()));
for _ in 1u..10u { nyan.speak(); };
assert!((nyan.eat()));
}<|fim▁end|> | |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>const path = require('path');
const nodeExternals = require('webpack-node-externals'); // 외부 Node.js 모듈들을 포함하지 않기 위해 로드.
const WebpackShellPlugin = require('webpack-shell-plugin');
const OutputFileName = 'aether.agent.package.js';
var serverCfg = {
context: path.resolve(__dirname, 'src'),
entry: './app.js',
target: 'node',
externals: [nodeExternals()],//node_modules는 무시하도록 설정
output: {<|fim▁hole|> rules:[{
test: /\.js$/,
include: path.resolve(__dirname, 'src'),
exclude: /node_modules/,
use: [{
loader: 'babel-loader',
options: {
presets: [
['es2016']
],
plugins: [require('babel-plugin-transform-strict-mode')]// 'use strict'가 없어도 별다른 경고 뱉지 않도록 빌드시 페이지마다 'use strict' 자동삽입
}
}]
}]
},
plugins: [
new WebpackShellPlugin({
onBuildStart:['echo "Webpack Start"'],
onBuildEnd:['echo "Build End: ' + path.resolve(__dirname, 'dist') + '/' + OutputFileName + '"'],
})
]
};
module.exports = serverCfg;<|fim▁end|> | path: path.resolve(__dirname, 'dist'),
filename: OutputFileName
},
module: { |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Authors: Karl MacMillan <[email protected]>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
# pylint: disable=deprecated-module
from optparse import (
Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError)
# pylint: enable=deprecated-module
from copy import copy
from configparser import SafeConfigParser
from urllib.parse import urlsplit
import socket
import functools
from dns.exception import DNSException
import dns.name
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.dnsutil import query_srv
from ipapython.ipautil import CheckedIPAddress, CheckedIPAddressLoopback
class IPAConfigError(Exception):
def __init__(self, msg=''):
self.msg = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class IPAFormatter(IndentedHelpFormatter):
"""Our own optparse formatter that indents multiple lined usage string."""
def format_usage(self, usage):
usage_string = "Usage:"
spacing = " " * len(usage_string)
lines = usage.split("\n")
ret = "%s %s\n" % (usage_string, lines[0])
for line in lines[1:]:
ret += "%s %s\n" % (spacing, line)
return ret
def check_ip_option(option, opt, value, allow_loopback=False):
try:
if allow_loopback:
return CheckedIPAddressLoopback(value)
else:
return CheckedIPAddress(value)
except Exception as e:
raise OptionValueError("option {}: invalid IP address {}: {}"
.format(opt, value, e))
def check_dn_option(option, opt, value):
try:
return DN(value)
except Exception as e:
raise OptionValueError("option %s: invalid DN: %s" % (opt, e))
def check_constructor(option, opt, value):
con = option.constructor
assert con is not None, "Oops! Developer forgot to set 'constructor' kwarg"
try:
return con(value)
except Exception as e:
raise OptionValueError("option {} invalid: {}".format(opt, e))
class IPAOption(Option):
"""
optparse.Option subclass with support of options labeled as
security-sensitive such as passwords.
"""
ATTRS = Option.ATTRS + ["sensitive", "constructor"]
TYPES = Option.TYPES + ("ip", "dn", "constructor", "ip_with_loopback")
TYPE_CHECKER = copy(Option.TYPE_CHECKER)
TYPE_CHECKER["ip"] = check_ip_option
TYPE_CHECKER["ip_with_loopback"] = functools.partial(check_ip_option,
allow_loopback=True)
TYPE_CHECKER["dn"] = check_dn_option
TYPE_CHECKER["constructor"] = check_constructor
class IPAOptionParser(OptionParser):
"""
optparse.OptionParser subclass that uses IPAOption by default
for storing options.
"""
def __init__(self,
usage=None,
option_list=None,
option_class=IPAOption,
version=None,
conflict_handler="error",
description=None,
formatter=None,
add_help_option=True,
prog=None):
OptionParser.__init__(self, usage, option_list, option_class,
version, conflict_handler, description,
formatter, add_help_option, prog)
def get_safe_opts(self, opts):
"""
Returns all options except those with sensitive=True in the same
fashion as parse_args would
"""
all_opts_dict = {
o.dest: o for o in self._get_all_options()
if hasattr(o, 'sensitive')
}
safe_opts_dict = {}
for option, value in opts.__dict__.items():
if not all_opts_dict[option].sensitive:
safe_opts_dict[option] = value
return Values(safe_opts_dict)
def verify_args(parser, args, needed_args = None):
"""Verify that we have all positional arguments we need, if not, exit."""
if needed_args:
needed_list = needed_args.split(" ")
else:
needed_list = []
len_need = len(needed_list)
len_have = len(args)
if len_have > len_need:
parser.error("too many arguments")
elif len_have < len_need:
parser.error("no %s specified" % needed_list[len_have])
class IPAConfig:
def __init__(self):
self.default_realm = None
self.default_server = []
self.default_domain = None
def get_realm(self):
if self.default_realm:
return self.default_realm
else:
raise IPAConfigError("no default realm")
def get_server(self):
if len(self.default_server):
return self.default_server
else:
raise IPAConfigError("no default server")
def get_domain(self):
if self.default_domain:
return self.default_domain
else:
raise IPAConfigError("no default domain")
# Global library config
config = IPAConfig()
def __parse_config(discover_server = True):
p = SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
if not config.default_realm:
config.default_realm = p.get("global", "realm")
except Exception:
pass
if discover_server:
try:
s = p.get("global", "xmlrpc_uri")
server = urlsplit(s)
config.default_server.append(server.netloc)
except Exception:
pass
try:
if not config.default_domain:
config.default_domain = p.get("global", "domain")
except Exception:
pass
def __discover_config(discover_server = True):
servers = []
try:
if not config.default_domain:
# try once with REALM -> domain
domain = str(config.default_realm).lower()
name = "_ldap._tcp." + domain
try:
servers = query_srv(name)
except DNSException:
# try cycling on domain components of FQDN
try:
domain = dns.name.from_text(socket.getfqdn())
except DNSException:
return False
while True:
domain = domain.parent()
if str(domain) == '.':
return False
name = "_ldap._tcp.%s" % domain
try:
servers = query_srv(name)
break
except DNSException:
pass
<|fim▁hole|> if discover_server:
if not servers:
name = "_ldap._tcp.%s." % config.default_domain
try:
servers = query_srv(name)
except DNSException:
pass
for server in servers:
hostname = str(server.target).rstrip(".")
config.default_server.append(hostname)
except Exception:
pass
return None
def add_standard_options(parser):
parser.add_option("--realm", dest="realm", help="Override default IPA realm")
parser.add_option("--server", dest="server",
help="Override default FQDN of IPA server")
parser.add_option("--domain", dest="domain", help="Override default IPA DNS domain")
def init_config(options=None):
if options:
config.default_realm = options.realm
config.default_domain = options.domain
if options.server:
config.default_server.extend(options.server.split(","))
if len(config.default_server):
discover_server = False
else:
discover_server = True
__parse_config(discover_server)
__discover_config(discover_server)
# make sure the server list only contains unique items
new_server = []
for server in config.default_server:
if server not in new_server:
new_server.append(server)
config.default_server = new_server
if not config.default_realm:
raise IPAConfigError("IPA realm not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_server:
raise IPAConfigError("IPA server not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_domain:
raise IPAConfigError("IPA domain not found in the config file (/etc/ipa/default.conf) or on the command line.")<|fim▁end|> | config.default_domain = str(domain).rstrip(".")
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from . import models<|fim▁end|> | from django.contrib import admin |
<|file_name|>issue-4542.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>// except according to those terms.
// xfail-test
pub fn main() {
for os::args().each |arg| {
match arg.clone() {
s => { }
}
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Package initialization."""
from .core import *<|fim▁hole|><|fim▁end|> | from .main import main
from .pandocattributes import PandocAttributes |
<|file_name|>intro_evals.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import structlog
from flask import Blueprint, request
from conditional.util.ldap import ldap_get_intro_members
from conditional.models.models import FreshmanCommitteeAttendance
from conditional.models.models import CommitteeMeeting
from conditional.models.models import FreshmanAccount
from conditional.models.models import FreshmanEvalData
from conditional.models.models import FreshmanHouseMeetingAttendance
from conditional.models.models import FreshmanSeminarAttendance
from conditional.models.models import MemberHouseMeetingAttendance
from conditional.models.models import MemberSeminarAttendance
from conditional.models.models import HouseMeeting
from conditional.models.models import TechnicalSeminar
from conditional.util.flask import render_template
from conditional.util.member import get_cm, get_hm
from conditional import start_of_year
intro_evals_bp = Blueprint('intro_evals_bp', __name__)
logger = structlog.get_logger()
@intro_evals_bp.route('/intro_evals/')
def display_intro_evals(internal=False):
log = logger.new(request=request)
log.info('Display Intro Evals Listing')
# get user data
def get_fid_cm_count(member_id):
return len([a for a in FreshmanCommitteeAttendance.query.filter(
FreshmanCommitteeAttendance.fid == member_id)
if CommitteeMeeting.query.filter(CommitteeMeeting.id == a.meeting_id).first().approved])
user_name = None
if not internal:
user_name = request.headers.get('x-webauth-user')
members = [account for account in ldap_get_intro_members()]
ie_members = []
# freshmen who don't have accounts
fids = [f for f in FreshmanAccount.query.filter(
FreshmanAccount.eval_date > start_of_year(),
FreshmanAccount.eval_date > datetime.now())]
for fid in fids:
h_meetings = [m.meeting_id for m in
FreshmanHouseMeetingAttendance.query.filter(
FreshmanHouseMeetingAttendance.fid == fid.id
).filter(
FreshmanHouseMeetingAttendance.attendance_status == "Absent"
)]
if fid.signatures_missed is None:
signatures_missed = -1
else:
signatures_missed = fid.signatures_missed
freshman = {
'name': fid.name,
'uid': fid.id,
'eval_date': fid.eval_date.strftime("%Y-%m-%d"),
'signatures_missed': signatures_missed,<|fim▁hole|> 'committee_meetings': get_fid_cm_count(fid.id),
'committee_meetings_passed': get_fid_cm_count(fid.id) >= 10,
'house_meetings_missed':
[
{
"date": m.date.strftime("%Y-%m-%d"),
"reason":
FreshmanHouseMeetingAttendance.query.filter(
FreshmanHouseMeetingAttendance.fid == fid.id).filter(
FreshmanHouseMeetingAttendance.meeting_id == m.id).first().excuse
}
for m in HouseMeeting.query.filter(
HouseMeeting.id.in_(h_meetings)
)
],
'technical_seminars':
[s.name for s in TechnicalSeminar.query.filter(
TechnicalSeminar.id.in_(
[a.seminar_id for a in FreshmanSeminarAttendance.query.filter(
FreshmanSeminarAttendance.fid == fid.id)
if TechnicalSeminar.query.filter(TechnicalSeminar.id == a.seminar_id).first().approved]
))
],
'social_events': '',
'freshman_project': "Pending",
'comments': "",
'ldap_account': False,
'status': "Pending"
}
ie_members.append(freshman)
# freshmen who have accounts
for member in members:
uid = member.uid
name = member.cn
freshman_data = FreshmanEvalData.query.filter(
FreshmanEvalData.eval_date > start_of_year(),
FreshmanEvalData.uid == uid).first()
if freshman_data is None:
continue
elif freshman_data.freshman_eval_result != "Pending" and internal:
continue
h_meetings = [m.meeting_id for m in get_hm(member)]
member_info = {
'name': name,
'uid': uid,
'eval_date': freshman_data.eval_date.strftime("%Y-%m-%d"),
'signatures_missed': freshman_data.signatures_missed,
'committee_meetings': len(get_cm(member)),
'committee_meetings_passed': len(get_cm(member)) >= 10,
'house_meetings_missed':
[
{
"date": m.date.strftime("%Y-%m-%d"),
"reason":
MemberHouseMeetingAttendance.query.filter(
MemberHouseMeetingAttendance.uid == uid,
MemberHouseMeetingAttendance.meeting_id == m.id).first().excuse
}
for m in HouseMeeting.query.filter(
HouseMeeting.id.in_(h_meetings)
)
],
'technical_seminars':
[s.name for s in TechnicalSeminar.query.filter(
TechnicalSeminar.id.in_(
[a.seminar_id for a in MemberSeminarAttendance.query.filter(
MemberSeminarAttendance.uid == uid)
if TechnicalSeminar.query.filter(
TechnicalSeminar.id == a.seminar_id,
TechnicalSeminar.timestamp > start_of_year()).first().approved]
))
],
'social_events': freshman_data.social_events,
'freshman_project': freshman_data.freshman_project,
'comments': freshman_data.other_notes,
'ldap_account': True,
'status': freshman_data.freshman_eval_result
}
ie_members.append(member_info)
ie_members.sort(key=lambda x: x['freshman_project'] == "Passed")
ie_members.sort(key=lambda x: len(x['house_meetings_missed']))
ie_members.sort(key=lambda x: x['committee_meetings'], reverse=True)
ie_members.sort(key=lambda x: x['signatures_missed'])
ie_members.sort(key=lambda x: x['status'] == "Passed")
if internal:
return ie_members
# return names in 'first last (username)' format
return render_template(request,
'intro_evals.html',
username=user_name,
members=ie_members)<|fim▁end|> | |
<|file_name|>table_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
import numpy as np
import bs
<|fim▁hole|>n_rows = 10
n_cols = 5
t.init (n_rows, n_cols);
for i in xrange (n_cols):
t.set_col_name (i, "Col " + str (i))
a = np.linspace (float (i), float (i + 1), n_rows)
t.set_col_values (i, a)
print t<|fim▁end|> | t = bs.common_types.table ()
|
<|file_name|>submit.py<|end_file_name|><|fim▁begin|>import os
import logging
from jsub.util import safe_mkdir
from jsub.util import safe_rmdir
class Submit(object):
def __init__(self, manager, task_id, sub_ids=None, dry_run=False, resubmit=False):
self.__manager = manager
self.__task = self.__manager.load_task(task_id)
self.__sub_ids = sub_ids
self.__dry_run = dry_run
self.__resubmit = resubmit
self.__logger = logging.getLogger('JSUB')
if self.__sub_ids==None:
self.__sub_ids=range(len(self.__task.data['jobvar']))
self.__initialize_manager()
def __initialize_manager(self):
self.__config_mgr = self.__manager.load_config_manager()
self.__backend_mgr = self.__manager.load_backend_manager()
self.__bootstrap_mgr = self.__manager.load_bootstrap_manager()
self.__navigator_mgr = self.__manager.load_navigator_manager()
self.__context_mgr = self.__manager.load_context_manager()
self.__action_mgr = self.__manager.load_action_manager()
self.__launcher_mgr = self.__manager.load_launcher_manager()
def handle(self):
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
main_root = os.path.join(run_root, 'main')
safe_rmdir(main_root)
safe_mkdir(main_root)
self.__create_input(main_root)
self.__create_context(main_root)
self.__create_action(main_root)
self.__create_navigator(main_root)
self.__create_bootstrap(main_root)
launcher_param = self.__create_launcher(run_root)
self.__submit(launcher_param)
def __create_input(self, main_root):
content = self.__manager.load_content()
input_dir = os.path.join(main_root,'input')
try:
content.get(self.__task.data['id'], 'input', os.path.join(main_root, 'input'))
except:
safe_mkdir(input_dir)
def __create_context(self, main_root):
context_dir = os.path.join(main_root, 'context')
safe_mkdir(context_dir)
action_default = {}
for unit, param in self.__task.data['workflow'].items():
action_default[unit] = self.__action_mgr.default_config(param['type'])
navigators = self.__config_mgr.navigator()
context_format = self.__navigator_mgr.context_format(navigators)
self.__context_mgr.create_context_file(self.__task.data, action_default, context_format, context_dir)
def __create_action(self, main_root):
action_dir = os.path.join(main_root, 'action')
safe_mkdir(action_dir)
actions = set()
for unit, param in self.__task.data['workflow'].items():
actions.add(param['type'])
self.__action_mgr.create_actions(actions, action_dir)
def __create_navigator(self, main_root):
navigator_dir = os.path.join(main_root, 'navigator')
safe_mkdir(navigator_dir)<|fim▁hole|>
navigators = self.__config_mgr.navigator()
self.__navigator_mgr.create_navigators(navigators, navigator_dir)
def __create_bootstrap(self, main_root):
bootstrap_dir = os.path.join(main_root, 'bootstrap')
safe_mkdir(bootstrap_dir)
bootstrap = self.__config_mgr.bootstrap()
self.__bootstrap_mgr.create_bootstrap(bootstrap, bootstrap_dir)
def __create_launcher(self, run_root):
launcher = self.__task.data['backend']['launcher']
return self.__launcher_mgr.create_launcher(launcher, run_root)
def __submit(self, launcher_param):
if self.__dry_run:
return
if self.__resubmit==False:
if self.__task.data.get('backend_job_ids') or self.__task.data.get('backend_task_id'):
self.__logger.info('This task has already been submitted to backend, rerun the command with "-r" option if you wish to delete current jobs and resubmit the task.')
return
else:
self.__logger.info('Removing submitted jobs on backend before resubmission.')
task_id = self.__task.data.get('backend_task_id')
#remove previously generated files in job folder
job_ids = self.__task.data.get('backend_job_ids')
run_root = self.__backend_mgr.get_run_root(self.__task.data['backend'], self.__task.data['id'])
job_root=os.path.join(run_root,'subjobs')
safe_rmdir(job_root)
if task_id:
self.__backend_mgr.delete_task(self.__task.data['backend'],backend_task_id = task_id)
elif job_ids:
self.__backend_mgr.delete_jobs(self.__task.data['backend'],backend_job_ids = job_ids)
result = self.__backend_mgr.submit(self.__task.data['backend'], self.__task.data['id'], launcher_param, sub_ids = self.__sub_ids)
if not type(result) is dict:
result = {}
if 'backend_job_ids' in result:
njobs = len(result['backend_job_ids'])
else:
njobs = len(result)
if njobs>0:
self.__logger.info('%d jobs successfully submitted to backend.'%(njobs))
self.__task.data.setdefault('backend_job_ids',{})
backend_job_ids=result.get('backend_job_ids',{})
backend_task_id=result.get('backend_task_id',0)
self.__task.data['backend_job_ids'].update(backend_job_ids)
self.__task.data['backend_task_id']=backend_task_id
self.__task.data['status'] = 'Submitted'
task_pool = self.__manager.load_task_pool()
task_pool.save(self.__task)
self.__logger.debug(result)<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
'''Check online DNSSEC signing module (just basic checks).'''
import dns.rdatatype
from dnstest.test import Test
from dnstest.utils import *
from dnstest.module import ModOnlineSign
t = Test(stress=False)
ModOnlineSign.check()
knot = t.server("knot")
zones = t.zone_rnd(4, dnssec=False, records=5)
t.link(zones, knot, journal_content="none")
knot.add_module(zones[0], ModOnlineSign())
knot.add_module(zones[1], ModOnlineSign("ECDSAP384SHA384", key_size="384"))
knot.dnssec(zones[2]).enable = True
knot.dnssec(zones[3]).enable = True
knot.dnssec(zones[3]).nsec3 = True
def check_zone(zone, dnskey_rdata_start):
# Check SOA record.
soa1 = knot.dig(zone.name, "SOA", dnssec=True)
soa1.check(rcode="NOERROR", flags="QR AA")
soa1.check_count(1, "RRSIG")
t.sleep(1) # Ensure different RRSIGs.
soa2 = knot.dig(zone.name, "SOA", dnssec=True)
soa2.check(rcode="NOERROR", flags="QR AA")
soa2.check_count(1, "RRSIG")
for rrset in soa1.resp.answer:
if rrset.rdtype == dns.rdatatype.SOA:
if rrset not in soa2.resp.answer:
set_err("DIFFERENT SOA")
check_log("ERROR: DIFFERENT SOA")
elif rrset.rdtype == dns.rdatatype.RRSIG:
if rrset in soa2.resp.answer:
set_err("UNCHANGED RRSIG")
check_log("ERROR: UNCHANGED RRSIG")
else:
set_err("UNEXPECTED RRSET")
check_log("ERROR: UNEXPECTED RRSET")
detail_log("%s" % rrset)
# Check DNSKEY record.
resp = knot.dig(zone.name, "DNSKEY", dnssec=True)
resp.check(rcode="NOERROR", flags="QR AA")
resp.check_count(1, "DNSKEY")
resp.check_count(1, "RRSIG")
for rrset in resp.resp.answer:<|fim▁hole|> isset(dnskey_rdata_start in rrset.to_text(), "DNSKEY ALGORITHM")
# Check NSEC record.
resp = knot.dig("nx." + zone.name, "A", dnssec=True)
resp.check(rcode="NOERROR", flags="QR AA")
resp.check_count(0, section="answer")
resp.check_count(1, "SOA", section="authority")
resp.check_count(1, "NSEC", section="authority")
resp.check_count(2, "RRSIG", section="authority")
t.start()
serial = knot.zones_wait(zones)
check_zone(zones[0], "257 3 13")
check_zone(zones[1], "257 3 14")
for z in zones:
knot.update_zonefile(z, random=True)
knot.reload()
knot.zones_wait(zones, serial)
t.end()<|fim▁end|> | if rrset.rdtype != dns.rdatatype.DNSKEY:
continue
else: |
<|file_name|>TransformatorStructure.java<|end_file_name|><|fim▁begin|>package at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.impl;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.activation.UnsupportedDataTypeException;
import org.apache.commons.lang3.StringEscapeUtils;
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EAnnotation;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EDataType;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EEnumLiteral;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcoreFactory;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.util.BasicExtendedMetaData;
import org.eclipse.emf.ecore.util.ExtendedMetaData;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.FeatureMapUtil;
import org.eclipse.emf.ecore.xmi.XMLResource;
import org.eclipse.emf.ecore.xmi.impl.EcoreResourceFactoryImpl;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceImpl;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.CollectionValueTransformation;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.EAttributeTransformator;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.EReferenceTransformator;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.PartialObjectCopier;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.SingleObjectTransformator;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.Transformator;
import at.ac.tuwien.big.xmlintelledit.xmltext.ecoretransform.ValueTransformator;
import at.ac.tuwien.big.xtext.util.MyEcoreUtil;
@SuppressWarnings({"rawtypes", "unchecked", "unused"})
public class TransformatorStructure {
private Map<EAttribute, EAttributeTransformator> xmlToEcoreAttr = new HashMap<EAttribute, EAttributeTransformator>();
private Map<EAttribute, EAttributeTransformator> ecoreToXmlAttr = new HashMap<EAttribute, EAttributeTransformator>();
private Map<EStructuralFeature, PartialObjectCopier> xmlToEcoreChanger = new HashMap<>();
private Map<EStructuralFeature, PartialObjectCopier> ecoreToXmlChanger = new HashMap<>();
private TypeTransformatorStore store;
private Map<EReference, EReferenceTransformator> xmlToEcoreRef = new HashMap<EReference, EReferenceTransformator>();
private Map<EReference, EReferenceTransformator> ecoreToXmlRef = new HashMap<EReference, EReferenceTransformator>();
private Map<String, EObject> fragmentToXmlObject = new HashMap<String, EObject>();
private Map<EReference,EReference> xmlToEcoreReferences = new HashMap<EReference, EReference>();
private Map<EAttribute,EAttribute> xmlToEcoreAttribute = new HashMap<>();
private Map<EEnum,EEnum> copiedEEnums = new HashMap<EEnum, EEnum>();
private Map<EEnumLiteral,EEnumLiteral> copiedEEnumLiterals = new HashMap<EEnumLiteral, EEnumLiteral>();
private Map<EEnumLiteral,EEnumLiteral> backEEnumLiteral = new HashMap<EEnumLiteral, EEnumLiteral>();
private Map<String,EEnumLiteral> backEEnumLiteralStr = new HashMap<String, EEnumLiteral>();
private Map<EClass, EClass> xmlToEcoreClasses = new HashMap<EClass, EClass>();
private Map<EClass, EClass> ecoreToXmlClasses = new HashMap<EClass, EClass>();
private Map<EStructuralFeature,EStructuralFeature> ecoreToXmlFeature = new HashMap<EStructuralFeature, EStructuralFeature>();
private Map<String,EObject> targetMap = new HashMap<String, EObject>();
private Set<EObject> handledTargets = new HashSet<EObject>();
private Map<String,String> restrictedDatatypes = new HashMap<String,String>();
public Map<String, String> getRestrictedDatatypes() {
return restrictedDatatypes;
}
public String getStoreName(EObject eobj) {
if (eobj instanceof EClass) {
return ((EClass) eobj).getName();
} else if (eobj instanceof EEnum) {
return ((EEnum)eobj).getName();
} else if (eobj instanceof EStructuralFeature) {
EStructuralFeature esf = (EStructuralFeature)eobj;
return esf.getEContainingClass().getName()+"."+esf.getName();
}
return null;
}
public void readInBasicTarget(Resource targetRes) {
for (EObject eobj: (Iterable<EObject>)()->targetRes.getAllContents()) {
String storeName = getStoreName(eobj);
if (storeName != null) {
targetMap.put(storeName, eobj);
}
}
}
public EObject getIfExists(String targetName) {
EObject ret = targetMap.get(targetName);
if (ret != null) {
handledTargets.add(ret);
}
return ret;
}
//1 to one correspondance
public TransformatorStructure(Resource source, Resource target) {
for (EObject eobj: (Iterable<EObject>)source.getAllContents()) {
if (eobj instanceof EClass) {
EClass cl = (EClass)eobj;
EClass ecoreClass = (EClass)target.getEObject(source.getURIFragment(eobj));
xmlToEcoreClasses.put(cl, ecoreClass);
ecoreToXmlClasses.put(ecoreClass,cl);
System.out.println("Associating "+ cl+ " to "+ecoreClass);
//Not all, because then we would do something multiple times
for (EAttribute eattr: cl.getEAttributes()) {
EAttribute newA = (EAttribute)target.getEObject(source.getURIFragment(eattr));
xmlToEcoreAttribute.put(eattr, newA);
}
for (EReference eattr: cl.getEReferences()) {
EReference newRef = (EReference)target.getEObject(source.getURIFragment(eattr));
xmlToEcoreReferences.put(eattr, newRef);
}
} else if (eobj instanceof EEnum) {
EEnum eenum = (EEnum)eobj;
copiedEEnums.put(eenum, (EEnum)target.getEObject(source.getURIFragment(eenum)));
for (EEnumLiteral lit: eenum.getELiterals()) {
EEnumLiteral back = (EEnumLiteral)target.getEObject(source.getURIFragment(lit));
copiedEEnumLiterals.put(lit, back);
backEEnumLiteral.put(back, lit);
backEEnumLiteralStr.put(eenum.getName()+"."+lit.getLiteral(), lit);
}
//Ignore for now
} else if (eobj instanceof EDataType) {
//???
} else if (eobj instanceof EAttribute) {
//Have handled every important above?
} else if (eobj instanceof EReference) {
//Have handled every important above?
}
}
//TODO: F�r kopierte ist es gef�hrlich ...
for (Entry<EClass,EClass> entr: xmlToEcoreClasses.entrySet()) {
if (!augmentEClassBasic(entr.getKey(), entr.getValue())) {
//TODO: Das stimmt so nicht ...
entr.setValue(null);
}
}
for (Entry<EAttribute,EAttribute> entr: xmlToEcoreAttribute.entrySet()) {
if (!augmentAttributeBasic(entr.getKey(), entr.getValue())) {
entr.setValue(null);
}
}
for (Entry<EReference,EReference> entr: xmlToEcoreReferences.entrySet()) {
if (!augmentReferenceBasic(entr.getKey(), entr.getValue())) {
entr.setValue(null);
}
}
}
private EClass mixedData;
private EClass mixedText;
private EClass mixedFeature;
private EClass mixedBaseClass;
private EReference mixedBaseMixedAttr;
private EAttribute mixedValueAttr;
private EPackage ecorePackage;
public void generateMixClasses() {
if (mixedData == null) {
mixedData = (EClass)getIfExists("MixedData");
if (mixedData == null) {
mixedData = EcoreFactory.eINSTANCE.createEClass();
mixedData.setName("MixedData");
mixedData.setAbstract(true);
mixedValueAttr = EcoreFactory.eINSTANCE.createEAttribute();
mixedValueAttr.setName("value");
mixedValueAttr.setEType(EcorePackage.Literals.ESTRING);
mixedValueAttr.setLowerBound(1);
mixedValueAttr.setUpperBound(1);
mixedData.getEStructuralFeatures().add(mixedValueAttr);
ecorePackage.getEClassifiers().add(mixedData);
} else {
mixedValueAttr = (EAttribute)mixedData.getEStructuralFeature("value");
}
mixedText = (EClass)getIfExists("MixedText");
if (mixedText == null) {
mixedText = EcoreFactory.eINSTANCE.createEClass();
mixedText.setName("MixedText");
mixedText.getESuperTypes().add(mixedData);
ecorePackage.getEClassifiers().add(mixedText);
}
mixedFeature = (EClass)getIfExists("MixedFeature");
if (mixedFeature == null) {
mixedFeature = EcoreFactory.eINSTANCE.createEClass();
mixedFeature.setName("MixedFeature");
mixedFeature.getESuperTypes().add(mixedData);
ecorePackage.getEClassifiers().add(mixedFeature);
}
mixedBaseClass = (EClass)getIfExists("MixedBaseClass");
if (mixedBaseClass == null) {
mixedBaseClass = EcoreFactory.eINSTANCE.createEClass();
mixedBaseClass.setName("MixedBaseClass");
mixedBaseClass.setAbstract(true);
mixedBaseMixedAttr = EcoreFactory.eINSTANCE.createEReference();
mixedBaseMixedAttr.setName("mixed");
mixedBaseMixedAttr.setLowerBound(0);
mixedBaseMixedAttr.setUpperBound(-1);
mixedBaseMixedAttr.setContainment(true);
mixedBaseMixedAttr.setEType(mixedData);
mixedBaseClass.getEStructuralFeatures().add(mixedBaseMixedAttr);
ecorePackage.getEClassifiers().add(mixedBaseClass);
} else {
mixedBaseMixedAttr = (EReference)mixedBaseClass.getEStructuralFeature("mixed");
}
}
}
public boolean isMixed(EStructuralFeature feat) {
//TODO: ... faster
if (!"mixed".equals(feat.getName())) {
return false;
}
if ((feat.getEType() instanceof EClass && mixedData.isSuperTypeOf((EClass)feat.getEType())) || (feat.getEType() != null && "EFeatureMapEntry".equals(feat.getEType().getName()))) {
return true;
}
return false;
}
public TransformatorStructure(TypeTransformatorStore store, ResourceSet resourceSet, File xmlEcore) {
this.store = store;
parseXmlEcore(resourceSet,xmlEcore);
}
private TransformatorStructure() {
}
public static TransformatorStructure withKnownResult(TypeTransformatorStore store, ResourceSet resourceSet,
Resource xmlResource, Resource ecoreResource) {
TransformatorStructure ret = new TransformatorStructure();
ret.store = store;
ret.xmlResource = ()->xmlResource.getAllContents();
ret.ecoreResources.add(ecoreResource);
ret.readInBasicTarget(ecoreResource);
ret.parseXmlEcoreBasic(ecoreResource, resourceSet, xmlResource.getURI(), ()->xmlResource.getAllContents(), false);
return ret;
}
public static TransformatorStructure fromXmlEcore(TypeTransformatorStore store,
ResourceSet resourceSet, Resource ecoreXmlResource, String targetFilename) {
TransformatorStructure ret = new TransformatorStructure();
ret.store = store;
ret.xmlResource = ()->ecoreXmlResource.getAllContents();
ret.parseXmlEcore(null,resourceSet,targetFilename==null?null:URI.createFileURI(targetFilename),ret.xmlResource, false);
return ret;
}
public static TransformatorStructure fromXmlEcores(TypeTransformatorStore store,
ResourceSet resourceSet, List<Resource> ecoreXmlResources, String targetFilename) {
TransformatorStructure ret = new TransformatorStructure();
ret.store = store;
int ind = 0;
for (Resource ecoreXmlResource: ecoreXmlResources) {
ret.xmlResource = ()->ecoreXmlResource.getAllContents();
ret.parseXmlEcore(null,resourceSet,targetFilename==null?null:URI.createFileURI(targetFilename+(++ind)+".ecore"),ret.xmlResource, false);
}
return ret;
}
public TransformatorStructure(TypeTransformatorStore store, ResourceSet resourceSet, Resource xmlResource) {
this.store = store;
this.xmlResource = ()->xmlResource.getAllContents();
parseXmlEcore(null,resourceSet,URI.createURI(xmlResource.getURI()+"simplified"),this.xmlResource,false);
}
public TransformatorStructure(TypeTransformatorStore store, ResourceSet resourceSet, File xmlResourceFile, Iterable<EObject> xmlResource) {
this.store = store;
this.xmlResource = xmlResource;
parseXmlEcore(null,resourceSet,URI.createFileURI(xmlResourceFile.getAbsolutePath()+".simple.ecore"),xmlResource,false);
}
private EAttribute commonIdAttribute = null;
private EClass commonIdClass = null;
private Map<EClass, EAttribute> realId = new HashMap<EClass, EAttribute>();
//private Map<EAttribute, EReference> attributeToReference = new HashMap<>();
//private Map<EReference, EAttribute> referenceToAttribute = new HashMap<>();
private void buildChangers() {
for (Entry<EAttribute,EAttributeTransformator> entry: xmlToEcoreAttr.entrySet()) {
// EAttribute attr = entry.getKey(); // TODO remove unused?
EAttributeTransformator tf = entry.getValue();
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl trans, EObject xmlObject, EObject ret) {
//Workaround - remove if ressource is always correct
try {
if (xmlObject.eIsSet(tf.getXml())) {
Collection c = MyEcoreUtil.getAsCollection(xmlObject, tf.getXml());
c = tf.convertToEcore(c);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getEcore(),c);
} else {
ret.eUnset(tf.getEcore());
}
} catch (IllegalArgumentException e) {
EStructuralFeature esf = xmlObject.eClass().getEStructuralFeature(tf.getXml().getName());
System.err.println(e.getMessage()+" => replaced by " + esf);
if (esf != null) {
if (xmlObject.eIsSet(esf)) {
Collection c = MyEcoreUtil.getAsCollection(xmlObject, esf);
c = tf.convertToEcore(c);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getEcore(),c);
} else {
ret.eUnset(tf.getEcore());
}
}
}
}
};
xmlToEcoreChanger.put(tf.getXml(),poc);
xmlToEcoreChanger.put(tf.getEcore(),poc);
}
for (Entry<EAttribute,EAttributeTransformator> entry: ecoreToXmlAttr.entrySet()) {
// EAttribute attr = entry.getKey(); // TODO remove unused?
EAttributeTransformator tf = entry.getValue();
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl trans, EObject eobject, EObject ret) {
if (eobject.eIsSet(tf.getEcore())) {
// String bla = attr.getName(); // TODO remove unused?
Collection c = MyEcoreUtil.getAsCollection(eobject, tf.getEcore());
c = tf.convertToXml(c);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getXml(),c);
} else {
ret.eUnset(tf.getXml());
}
}
};
ecoreToXmlChanger.put(tf.getXml(),poc);
ecoreToXmlChanger.put(tf.getEcore(),poc);
}
for (Entry<EReference,EReferenceTransformator> entry: xmlToEcoreRef.entrySet()) {
// EReference ref = entry.getKey(); // TODO remove unused?
EReferenceTransformator tf = entry.getValue();
// ResourceSet rs; // TODO remove unused?
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl trans, EObject xmlObject, EObject ret) {
try {
if (xmlObject.eIsSet(tf.getXml())) {
Collection c = MyEcoreUtil.getAsCollection(xmlObject, tf.getXml());
c = tf.convertToEcore(c, trans);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getEcore(),c);
} else {
ret.eUnset(tf.getEcore());
}
} catch (IllegalArgumentException e) {
EStructuralFeature esf = xmlObject.eClass().getEStructuralFeature(tf.getXml().getName());
System.err.println(e.getMessage()+" => replaced by " + esf);
if (esf != null) {
if (xmlObject.eIsSet(esf)) {
Collection c = MyEcoreUtil.getAsCollection(xmlObject, esf);
c = tf.convertToEcore(c, trans);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getEcore(),c);
} else {
ret.eUnset(tf.getEcore());
}
}
}
}
};
xmlToEcoreChanger.put(tf.getXml(),poc);
xmlToEcoreChanger.put(tf.getEcore(),poc);
}
for (Entry<EReference,EReferenceTransformator> entry: ecoreToXmlRef.entrySet()) {
// EReference ref = entry.getKey(); // TODO remove unused?
EReferenceTransformator tf = entry.getValue();
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl trans, EObject eobject, EObject ret) {
if (eobject.eIsSet(tf.getEcore())) {
Collection c = MyEcoreUtil.getAsCollection(eobject, tf.getEcore());
c = tf.convertToXml(c, trans);
MyEcoreUtil.setAsCollectionBasic(ret,tf.getXml(),c);
} else {
ret.eUnset(tf.getXml());
}
}
};
ecoreToXmlChanger.put(tf.getXml(),poc);
ecoreToXmlChanger.put(tf.getEcore(),poc);
}
}
private void calcId() {
//Baue �berklasse �ber alle IDs
List<EClass> allIdClasses = new ArrayList<EClass>();
for (EClass ecl: ecoreToXmlClasses.keySet()) {
for (EAttribute attr: ecl.getEAttributes()) {
if (attr.isID()) {
allIdClasses.add(ecl);
}
}
}
Set<EClass> allIdClassesSet = new HashSet<EClass>(allIdClasses);
if (allIdClasses.isEmpty()) {
//Nothing to do
return;
}
//If there is only one, just pick the first ID you find and you are done!
if (allIdClassesSet.size() == 1) {
commonIdClass = allIdClasses.get(0);
commonIdAttribute = commonIdClass.getEIDAttribute();
} else {
//Check if there is a superclass which is a superclass of all id classes
Set<EClass> superClasses = new HashSet<EClass>();
EClass first = allIdClasses.get(0);
superClasses.add(first);
superClasses.addAll(first.getEAllSuperTypes());
for (int i = 1; i < allIdClasses.size(); ++i) {
EClass cl = allIdClasses.get(i);
Set<EClass> subSuper = new HashSet<EClass>(cl.getEAllSuperTypes());
subSuper.add(cl);
superClasses.retainAll(subSuper);
}
//All of these classes are candidates, but there must exist no class which has an attribute added due to that fact
for (EClass cl: ecoreToXmlClasses.keySet()) {
if (allIdClassesSet.contains(cl)) {
continue;
}
Set<EClass> superTypes = new HashSet<>(cl.getEAllSuperTypes());
superTypes.retainAll(allIdClassesSet);
if (!superTypes.isEmpty()) {
continue;
}
superClasses.remove(cl);
superClasses.removeAll(cl.getEAllSuperTypes());
}
<|fim▁hole|> commonIdClass = superClasses.iterator().next();
} else {
//Create
commonIdClass = (EClass)getIfExists("CommonIdClass");
if (commonIdClass == null) {
commonIdClass = EcoreFactory.eINSTANCE.createEClass();
commonIdClass.setAbstract(true);
commonIdClass.setName("CommonIdClass");
ecorePackage.getEClassifiers().add(commonIdClass);
} else {
idAttributeExisted = true;
}
}
Object commonIdAttributeO = getIfExists("CommonIdClass.name");
if (commonIdAttributeO instanceof EAttribute) {
commonIdAttribute = (EAttribute)commonIdAttributeO;
} else {
commonIdAttribute = EcoreFactory.eINSTANCE.createEAttribute();
}
commonIdAttribute.setName("name"); //Good to provide an xtext ID!
commonIdAttribute.setUnique(true);
commonIdAttribute.setID(true);
commonIdAttribute.setLowerBound(1);
commonIdAttribute.setUpperBound(1);
commonIdAttribute.setEType(EcorePackage.Literals.ESTRING);
if (!idAttributeExisted) {
commonIdClass.getEStructuralFeatures().add(commonIdAttribute);
}
for (EClass cl: ecoreToXmlClasses.keySet()) {
realId.put(cl, cl.getEIDAttribute());
}
if (!idAttributeExisted) {
for (EClass cl: allIdClasses) {
EAttribute id = cl.getEIDAttribute();
if (cl != commonIdClass) {
if (id != null && id.getEContainingClass() == cl) {
cl.getEStructuralFeatures().remove(id);
}
if (!cl.getEAllSuperTypes().contains(commonIdClass)) {
cl.getESuperTypes().add(commonIdClass);
}
}
}
}
}
//Whenever you have an attribute which is an IDREF, replace it by a reference
for (Entry<EAttribute,EAttributeTransformator> entry: xmlToEcoreAttr.entrySet()) {
EAttribute attr = entry.getKey();
String attrEType = (attr.getEType() != null)?attr.getEType().getName():"";
if ("IDREF".equals(attrEType)) {
EAttribute ecoreAttr = entry.getValue().getEcore();
EObject erefO = getIfExists(getEcoreClassName(attr.getEContainingClass())+"."+getEcoreAttributeName(attr));
EReference ref = null;
boolean hadReference = false;
if (erefO instanceof EReference) {
ref = (EReference)erefO;
hadReference = true;
} else {
ref = EcoreFactory.eINSTANCE.createEReference();
setSimple(ecoreAttr, ref);
ref.setName(ecoreAttr.getName());
ref.setEType(commonIdClass);
}
EReference fref = ref;
//attributeToReference.put(ecoreAttr, ref);
//referenceToAttribute.put(ref, ecoreAttr);
if (!hadReference && ecoreAttr.getEContainingClass() != null) {
int idx = ecoreAttr.getEContainingClass().getEStructuralFeatures().indexOf(ecoreAttr);
ecoreAttr.getEContainingClass().getEStructuralFeatures().add(idx,ref);
ecoreAttr.getEContainingClass().getEStructuralFeatures().remove(ecoreAttr);
}
//Konvertiere jedes Objekt in seine ID
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject from, EObject to) {
Collection c = MyEcoreUtil.getAsCollection(from, fref);
List<Object> targetIds = new ArrayList<Object>();
for (Object o: c) {
EObject eo = (EObject)o;
EAttribute idAttr = null;
if (eo != null && eo.eClass() != null && eo.eClass().getEIDAttribute() != null) {
idAttr = eo.eClass().getEIDAttribute();
}
Collection ids = MyEcoreUtil.getAsCollection(eo, idAttr);
targetIds.addAll(ids);
}
MyEcoreUtil.setAsCollectionBasic(to, attr, targetIds);
}
};
ecoreToXmlChanger.put(ref,poc);
ecoreToXmlChanger.put(attr,poc);
ecoreToXmlFeature.put(ref, attr);
poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject from, EObject to) {
Collection c = MyEcoreUtil.getAsCollection(from, attr);
List<EObject> targetObjects = new ArrayList<>();
for (Object o: c) {
EObject eo = transformator.getXmlObject(String.valueOf(o));
if (eo != null) {
targetObjects.add(transformator.xml2Eobject(eo));
}
}
MyEcoreUtil.setAsCollectionBasic(to, fref, targetObjects);
}
};
xmlToEcoreChanger.put(attr, poc);
xmlToEcoreChanger.put(ref, poc);
}
if (attr.isID()) {
//Konvertiere jedes Objekt in seine ID
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject from, EObject to) {
Collection c = MyEcoreUtil.getAsCollection(from, commonIdAttribute);
MyEcoreUtil.setAsCollectionBasic(to, to.eClass().getEIDAttribute(), c);
}
};
ecoreToXmlChanger.put(commonIdAttribute,poc);
ecoreToXmlChanger.put(attr,poc);
poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject from, EObject to) {
Collection c = MyEcoreUtil.getAsCollection(from, from.eClass().getEIDAttribute());
MyEcoreUtil.setAsCollectionBasic(to, commonIdAttribute, c);
}
};
xmlToEcoreChanger.put(attr, poc);
xmlToEcoreChanger.put(commonIdAttribute, poc);
}
}
}
public void augmentWithStandardDatatypes() {
//Whenever you have an attribute which is an IDREF, replace it by a reference
for (Entry<EAttribute,EAttributeTransformator> entry: xmlToEcoreAttr.entrySet()) {
EAttribute attr = entry.getKey();
String attrEType = (attr.getEType() != null)?attr.getEType().getName():"";
EAttribute ecoreAttr = entry.getValue().getEcore();
if (ecoreAttr != null) {
String name = attr.getEAttributeType().getName();
String instanceClassName = attr.getEAttributeType().getInstanceClassName();
System.out.println("Have attribute with name "+name+ " of type "+attrEType+" with instance class "+instanceClassName);
//TODO: Warum ist das so? Gibt es auch andere unterschiede?
if ("AnyURI".equals(attrEType)) {
attrEType = "URI";
}
if (store.isStandardDatatype(attrEType)) {
EAnnotation annot = ecoreAttr.getEAnnotation("http://big.tuwien.ac.at/standardXMLDatatype");
if (annot == null) {
ecoreAttr.getEAnnotations().add(annot = EcoreFactory.eINSTANCE.createEAnnotation());
annot.setSource("http://big.tuwien.ac.at/standardXMLDatatype");
}
annot.getDetails().put("type",attrEType);
}
}
}
}
public EAttribute transformatorEcoreAttribute(EClass cl, EAttribute base) {
if (base == commonIdAttribute) {
return realId.getOrDefault(cl,base);
}
return base;
}
public EAttribute getIdAttribute() {
if (commonIdAttribute == null) {
calcId();
}
return commonIdAttribute;
}
private List<Resource> ecoreResources = new ArrayList<Resource>();
private Iterable<EObject> xmlResource;
public EClass getEcoreEClass(EClass xml) {
return xmlToEcoreClasses.get(xml);
}
public EClass getXmlEClass(EClass ecore) {
return ecoreToXmlClasses.get(ecore);
}
public PartialObjectCopier getChangerForXml(EStructuralFeature ecorefeat) {
return ecoreToXmlChanger.get(ecorefeat);
}
public PartialObjectCopier getChangerForEcore(EStructuralFeature xmlfeat) {
return xmlToEcoreChanger.get(xmlfeat);
}
private EAttributeTransformator getTransformatorForXml(EAttribute xml) {
EAttributeTransformator trafo = xmlToEcoreAttr.get(xml);
if (trafo == null) {
String fragment = xml.eResource().getURIFragment(xml);
EObject eobj = fragmentToXmlObject.get(fragment);
EAttributeTransformator ftrafo = xmlToEcoreAttr.get(eobj);
if (ftrafo == null) {
System.err.println("No transformator for "+xml +" found, eobject: " +eobj+"!");
} else {
trafo = new EAttributeTransformatorImpl(xml, ftrafo.getEcore(), ftrafo.getTransformation());
}
}
if (trafo.getEcore().isID() && trafo.getEcore() != commonIdAttribute) {
EAttributeTransformator ftrafo = trafo;
return new EAttributeTransformator() {
@Override
public EAttribute getXml() {
return xml;
}
@Override
public CollectionValueTransformation getTransformation() {
return ftrafo.getTransformation();
}
@Override
public EAttribute getEcore() {
return commonIdAttribute;
}
};
}
return trafo;
}
private EAttributeTransformator getTransformatorForEcore(EClass eClass, EAttribute ecore) {
return ecoreToXmlAttr.get(transformatorEcoreAttribute(eClass,ecore));
}
private EReferenceTransformator getTransformatorForXml(EReference xml) {
EReferenceTransformator trafo = xmlToEcoreRef.get(xml);
if (trafo == null) {
String fragment = xml.eResource().getURIFragment(xml);
EObject eobj = fragmentToXmlObject.get(fragment);
trafo = xmlToEcoreRef.get((EReference)eobj);
if (trafo == null) {
System.err.println("No transformator for "+xml +" found, eobject: " +eobj+"!");
} else {
trafo = new EReferenceTransformatorImpl(xml, trafo.getEcore(), trafo.getTransformation());
}
}
return trafo;
}
private EReferenceTransformator getTransformatorForEcore(EReference ecore) {
return ecoreToXmlRef.get(ecore);
}
private boolean addedAnyAnnotations = false;
private EClass documentRootClassXml;
private EClass rootClassXml;
private EClass rootClassEcore;
private EReference rootReferenceXml;
public void parseXmlEcoreBasic(Resource localEcore, ResourceSet resourceSet, URI targetEcoreUri, Iterable<EObject> xmlResource, boolean generateFile) {
EPackage xmlEPkg = null;
for (EObject eobj: xmlResource) {
if (eobj instanceof EPackage) {
xmlEPkg = (EPackage)eobj;
resourceSet.getPackageRegistry().put(xmlEPkg.getNsURI(), eobj);
}
}
ecorePackage = (EPackage)localEcore.getContents().get(0);
List<EAttribute> eattrs = new ArrayList<>();
List<EReference> erefs = new ArrayList<>();
List<EClass> eclasses = new ArrayList<>();
List<EEnum> eenums = new ArrayList<>();
resourceSet.getPackageRegistry().put(ecorePackage.getNsURI(), ecorePackage);
for (EObject eobj: xmlResource) {
if (eobj.eResource() != null) {
fragmentToXmlObject.put(eobj.eResource().getURIFragment(eobj),eobj);
}
if (eobj instanceof EClass) {
EClass cl = (EClass)eobj;
if (!cl.getName().equals("DocumentRoot")) {
EClass ecoreClass = generateShallowEClass(cl);
eclasses.add(cl);
xmlToEcoreClasses.put(cl, ecoreClass);
ecoreToXmlClasses.put(ecoreClass,cl);
System.out.println("Associating "+ cl+ " to "+ecoreClass);
//Not all, because then we would do something multiple times
for (EAttribute eattr: cl.getEAttributes()) {
xmlToEcoreAttribute.put(eattr, generateShallowAttribute(cl, ecoreClass, eattr));
eattrs.add(eattr);
}
for (EReference eattr: cl.getEReferences()) {
xmlToEcoreReferences.put(eattr, generateShallowReference(cl, ecoreClass, eattr));
erefs.add(eattr);
}
} else {
//Analyze subclass
documentRootClassXml = cl;
rootReferenceXml = TransformatorImpl.getRootFeature(cl);
rootClassXml = rootReferenceXml.getEReferenceType();
}
} else if (eobj instanceof EEnum) {
// EEnum eenum = (EEnum)eobj; // TODO remove unused?
EEnum targetEEnum = generateEEnum((EEnum)eobj);
eenums.add(targetEEnum);
//Ignore for now
} else if (eobj instanceof EDataType) {
//??
} else if (eobj instanceof EAttribute) {
//Have handled every important above?
} else if (eobj instanceof EReference) {
//Have handled every important above?
}
}
rootClassEcore = xmlToEcoreClasses.get(rootClassXml);
for (EClass key: eclasses) {
if (!augmentEClassBasic(key, xmlToEcoreClasses.get(key))) {
//TODO: Das stimmt so nicht ...
xmlToEcoreClasses.remove(key);
}
}
for (EAttribute attr: eattrs) {
if (!augmentAttributeBasic(attr, xmlToEcoreAttribute.get(attr))) {
xmlToEcoreAttribute.remove(attr);
}
}
for (EReference key: erefs) {
if (!augmentReferenceBasic(key, xmlToEcoreReferences.get(key))) {
xmlToEcoreReferences.remove(key);
}
}
buildChangers();
calcId();
augmentWithStandardDatatypes();
if (generateFile) {
try {
int ind = 0;
for (Resource ecoreRes: ecoreResources) {
ecoreRes.save(new FileOutputStream("testoutput"+(++ind)+".ecore"),null);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void parseXmlEcore(Resource localECoreResource, ResourceSet resourceSet, /*String xmlEcoreName, */URI targetEcoreUri, Iterable<EObject> xmlResource, boolean generateFile) {
EPackage xmlEPkg = null;
for (EObject eobj: xmlResource) {
if (eobj instanceof EPackage) {
xmlEPkg = (EPackage)eobj;
resourceSet.getPackageRegistry().put(xmlEPkg.getNsURI(), eobj);
}
}
if (xmlEPkg == null) {
for (EObject eobj: xmlResource) {
System.out.println("Found object: "+eobj);
}
}
if (localECoreResource == null) {
localECoreResource = targetEcoreUri==null?new XMIResourceImpl(): new XMIResourceImpl(
resourceSet.getURIConverter().normalize(targetEcoreUri)
);
this.ecoreResources.add(localECoreResource);
ecorePackage = EcoreFactory.eINSTANCE.createEPackage();
ecorePackage.setNsURI(xmlEPkg.getNsURI()+"simplified");
//epkg.setNsURI(xmlEPkg.getNsURI()+"-simplified");
//String xmlEcoreShortName = xmlEcoreName.split("\\.", 2)[0];
ecorePackage.setName((xmlEPkg.getName()+"Simplified").replace(".", ""));
ecorePackage.setNsPrefix(xmlEPkg.getNsPrefix()+"s");
localECoreResource.getContents().add(ecorePackage);
} else {
ecorePackage = (EPackage)localECoreResource.getContents().get(0);
}
List<EAttribute> eattrs = new ArrayList<>();
List<EReference> erefs = new ArrayList<>();
List<EClass> eclasses = new ArrayList<>();
List<EEnum> eenums = new ArrayList<>();
resourceSet.getPackageRegistry().put(ecorePackage.getNsURI(), ecorePackage);
for (EObject eobj: xmlResource) {
if (eobj.eResource() != null) {
fragmentToXmlObject.put(eobj.eResource().getURIFragment(eobj),eobj);
}
if (eobj instanceof EClass) {
EClass cl = (EClass)eobj;
if (!cl.getName().equals("DocumentRoot")) {
EClass ecoreClass = generateShallowEClass(cl);
eclasses.add(cl);
xmlToEcoreClasses.put(cl, ecoreClass);
ecoreToXmlClasses.put(ecoreClass,cl);
System.out.println("Associating "+ cl+ " to "+ecoreClass);
//Not all, because then we would do something multiple times
for (EAttribute eattr: cl.getEAttributes()) {
xmlToEcoreAttribute.put(eattr, generateShallowAttribute(cl, ecoreClass, eattr));
eattrs.add(eattr);
}
for (EReference eattr: cl.getEReferences()) {
xmlToEcoreReferences.put(eattr, generateShallowReference(cl, ecoreClass, eattr));
erefs.add(eattr);
}
ecorePackage.getEClassifiers().add(ecoreClass);
} else {
//Analyze subclass
if (rootReferenceXml == null) {
rootReferenceXml = TransformatorImpl.getRootFeature(cl);
if (rootReferenceXml != null) {
rootClassXml = rootReferenceXml.getEReferenceType();
documentRootClassXml = cl;
}
}
}
} else if (eobj instanceof EEnum) {
// EEnum eenum = (EEnum)eobj; // TODO remove unused?
EEnum targetEEnum = generateEEnum((EEnum)eobj);
ecorePackage.getEClassifiers().add(targetEEnum);
eenums.add(targetEEnum);
//Ignore for now
} else if (eobj instanceof EDataType) {
//???
} else if (eobj instanceof EAttribute) {
//Have handled every important above?
} else if (eobj instanceof EReference) {
//Have handled every important above?
}
}
rootClassEcore = xmlToEcoreClasses.get(rootClassXml);
for (EClass key: eclasses) {
if (!augmentEClass(key, xmlToEcoreClasses.get(key))) {
//TODO: Das stimmt so nicht ...
xmlToEcoreClasses.remove(key);
}
}
for (EAttribute attr: eattrs) {
if (!augmentAttribute(attr, xmlToEcoreAttribute.get(attr))) {
xmlToEcoreAttribute.remove(attr);
}
}
for (EReference key: erefs) {
if (!augmentReference(key, xmlToEcoreReferences.get(key))) {
xmlToEcoreReferences.remove(key);
}
}
//Add OCL expressions
for (EObject eobj: xmlResource) {
parseExtendedMetadata(eobj);
}
if (addedAnyAnnotations) {
EAnnotation annot = ecorePackage.getEAnnotation("http://www.eclipse.org/emf/2002/Ecore");
if (annot == null) {
annot = EcoreFactory.eINSTANCE.createEAnnotation();
annot.setSource("http://www.eclipse.org/emf/2002/Ecore");
ecorePackage.getEAnnotations().add(annot);
}
annot.getDetails().put("invocationDelegates","http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot");
annot.getDetails().put("settingDelegates","http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot");
annot.getDetails().put("validationDelegates","http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot");
}
buildChangers();
calcId();
augmentWithStandardDatatypes();
if (generateFile) {
try {
int ind = 0;
for (Resource ecoreRes: ecoreResources) {
ecoreRes.save(new FileOutputStream("testoutput"+(++ind)+".ecore"),null);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void parseXmlEcore(ResourceSet resourceSet, File xmlEcore) {
Resource.Factory.Registry reg = resourceSet.getResourceFactoryRegistry();
reg.getExtensionToFactoryMap().put(
"xmi",
new XMIResourceFactoryImpl());
reg.getExtensionToFactoryMap().put(
"ecore",
new EcoreResourceFactoryImpl());
//Register ecore file
final ExtendedMetaData extendedMetaData = new BasicExtendedMetaData(resourceSet.getPackageRegistry());
resourceSet.getLoadOptions().put(XMLResource.OPTION_EXTENDED_META_DATA, extendedMetaData);
Resource res = resourceSet.getResource(resourceSet.getURIConverter().normalize(URI.createFileURI(xmlEcore.getAbsolutePath())), true);
this.xmlResource = ()->res.getAllContents();
parseXmlEcore(null,resourceSet, URI.createFileURI(xmlEcore.getAbsolutePath()+".simple.ecore"), xmlResource, true);
}
public void parseExtendedMetadata(EClass xml, EClass ecore) {
}
public String toFirstUpper(String str) {
if (str.length() <= 1) {
return str.toUpperCase();
}
return Character.toUpperCase(str.charAt(0))+str.substring(1);
}
public void parseExtendedMetadata(EAttribute xmlAttr, EAttribute ecoreAttr, EClass xmlClass, EClass ecoreClass) {
if (ecoreAttr == null) {
System.err.println("No attribute matching for "+xmlAttr);
return;
}
EDataType dataType = xmlAttr.getEAttributeType();
//Also parse that
for (EAnnotation dataTypeAnnot: dataType.getEAnnotations()) {
System.out.println("DataTypeAnnotation: "+dataTypeAnnot.getSource());
if ("http:///org/eclipse/emf/ecore/util/ExtendedMetaData".equals(dataTypeAnnot.getSource())) {
String pattern = dataTypeAnnot.getDetails().get("pattern");
EAnnotation additonal = ecoreClass.getEAnnotation("http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot");
/* <eAnnotations source="http://www.eclipse.org/emf/2002/Ecore">
<details key="constraints" value="sameServics goodSpeed onlyOneImportant backupDifferent"/>
</eAnnotations>
<eAnnotations source="http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot">
<details key="sameServics" value="backup = null or backup.services->includesAll(services)"/>
<details key="goodSpeed" value="designSpeed <= server.speed->sum()"/>
<details key="onlyOneImportant" value="services->select(s | s.type = ServiceType::IMPORTANT)->size() <= 1"/>
<details key="backupDifferent" value="backup <> self"/>
</eAnnotations>*/
boolean needAdd = false;
boolean needAdd2 = false;
String curConstraints = "";
if (additonal == null) {
needAdd = true;
additonal = EcoreFactory.eINSTANCE.createEAnnotation();
additonal.setSource("http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot");
}
EAnnotation general = ecoreClass.getEAnnotation("http://www.eclipse.org/emf/2002/Ecore");
if (general != null) {
curConstraints = general.getDetails().get("constraints");
if (curConstraints == null) {
curConstraints = "";
}
} else {
needAdd2 = true;
general = EcoreFactory.eINSTANCE.createEAnnotation();
general.setSource("http://www.eclipse.org/emf/2002/Ecore");
}
String prepend = "self."+ecoreAttr.getName()+(MyEcoreUtil.isMany(ecoreAttr)?"->forAll(x | x":"");
String postpend = MyEcoreUtil.isMany(ecoreAttr)?")":"";
if (pattern != null) {
// 162 occurrences in eclass case study, but where do they all come from? there are only 84 occurrences of restrictions, which are not enumerations or length, and 143 in total
EAnnotation typeAnnotation = ((EClass) xmlAttr.eContainer()).getEAnnotations().get(0);
restrictedDatatypes.put(typeAnnotation.getDetails().get("name"), xmlAttr.getEAttributeType().getName());
String constraintName = "pattern"+toFirstUpper(ecoreAttr.getName());
String constraintValue = null;
constraintValue = ".matches('"+StringEscapeUtils.unescapeXml(pattern).replace("%20"," ").replace("\\", "\\\\").replace("'", "\\\"")+"')";
String[] baseConstraintValues = pattern.split("\\ ");
StringBuilder totalValue = new StringBuilder();
for (int bc = 0; bc < baseConstraintValues.length; ++bc) {
if (bc > 0) {
totalValue.append(" or ");
}
String spattern = baseConstraintValues[bc];
constraintValue = ".matches('"+StringEscapeUtils.unescapeXml(spattern).replace("%20"," ").replace("\\", "\\\\").replace("'", "\\\"")+"')";
String newValue = prepend+constraintValue+postpend;
totalValue.append(newValue);
}
String totalString = totalValue.toString();
if (xmlAttr.getLowerBound() == 0 && !xmlAttr.isMany() && baseConstraintValues.length > 0) {
totalString = "("+prepend+"=null) or "+totalString;
}
additonal.getDetails().put(constraintName, totalString);
curConstraints = curConstraints+ " "+constraintName;
}
String minLength = dataTypeAnnot.getDetails().get("minLength");
if (minLength != null) {
String constraintName = "minLength"+toFirstUpper(ecoreAttr.getName());
String constraintValue = ".size() >= "+minLength;
String prefix = (!xmlAttr.isMany()&&xmlAttr.getLowerBound()==0)?("("+prepend + " = null) or " + prepend):prepend;
additonal.getDetails().put(constraintName, prefix+constraintValue+postpend);
curConstraints = curConstraints+ " "+constraintName;
}
String maxLength = dataTypeAnnot.getDetails().get("maxLength");
if (maxLength != null) {
String constraintName = "maxLength"+toFirstUpper(ecoreAttr.getName());
String constraintValue = ".size() <= "+maxLength;
String prefix = (!xmlAttr.isMany()&&xmlAttr.getLowerBound()==0)?("("+prepend + " = null) or " + prepend):prepend;
additonal.getDetails().put(constraintName, prefix+constraintValue+postpend);
curConstraints = curConstraints+ " "+constraintName;
}
general.getDetails().put("constraints", curConstraints.trim());
if (needAdd2 && !curConstraints.trim().isEmpty()) {
ecoreClass.getEAnnotations().add(general);
}
if (needAdd && !additonal.getDetails().isEmpty()) {
ecoreClass.getEAnnotations().add(additonal);
addedAnyAnnotations = true;
}
}
}
}
public void parseExtendedMetadata(EReference xmlAttr, EReference ecoreAttr, EClass xmlClass, EClass ecoreClass) {
}
public void parseExtendedMetadata(EEnum xmlEnum, EEnum ecoreEnum) {
}
public void parseExtendedMetadata(EObject eobj) {
if (eobj instanceof EClass) {
parseExtendedMetadata((EClass)eobj,(EClass)xmlToEcoreClasses.get(eobj));
} else if (eobj instanceof EStructuralFeature) {
EStructuralFeature esf = (EStructuralFeature)eobj;
EClass srcCl = esf.getEContainingClass();
EClass trgCl = xmlToEcoreClasses.get(srcCl);
if (eobj instanceof EAttribute) {
parseExtendedMetadata((EAttribute)eobj,
(EAttribute)xmlToEcoreAttribute.get(eobj),srcCl,trgCl );
} else {
parseExtendedMetadata((EReference)eobj,
(EReference)xmlToEcoreReferences.get(eobj),srcCl,trgCl );
}
} else if (eobj instanceof EEnum) {
parseExtendedMetadata((EEnum)eobj,this.copiedEEnums.get(eobj));
}
}
public SingleObjectTransformator matchingObjectTransformation = new SingleObjectTransformator() {
@Override
public EObject convertToXml(EObject eobject, Transformator transformator) {
return transformator.eobject2xml(eobject);
}
@Override
public EObject convertToEcore(EObject xml, Transformator transformator) {
return transformator.xml2Eobject(xml);
}
};
private void setSimple(EStructuralFeature xmlFeature, EStructuralFeature target) {
target.setChangeable(true);
target.setLowerBound(xmlFeature.getLowerBound());
target.setUpperBound(xmlFeature.getUpperBound());
target.setOrdered(xmlFeature.isOrdered());
target.setTransient(false);
target.setUnique(xmlFeature.isUnique());
target.setVolatile(false);
}
public EEnum generateEEnum(EEnum from) {
EEnum ret = copiedEEnums.get(from);
if (ret != null) {
return ret;
}
copiedEEnums.put(from, ret = EcoreFactory.eINSTANCE.createEEnum());
ret.setName(from.getName());
for (EEnumLiteral lit: from.getELiterals()) {
EEnumLiteral target = copiedEEnumLiterals.get(lit);
if (target == null) {
copiedEEnumLiterals.put(lit, target = EcoreFactory.eINSTANCE.createEEnumLiteral());
backEEnumLiteral.put(target, lit);
backEEnumLiteralStr.put(from.getName()+"."+lit.getLiteral(), lit);
target.setLiteral(lit.getLiteral());
target.setName(lit.getName());
target.setValue(lit.getValue());
}
ret.getELiterals().add(target);
}
return ret;
}
public ValueTransformator<Object, Object> eenumTransformator(EEnum forEEnum) {
return new ValueTransformator<Object,Object>() {
@Override
public Object convertToEcore(Object xml) {
System.err.println("Convert to ecore needs to be reworked: was enumliteral->enumliteral, but appearanly others can be there too");
Object ret = copiedEEnumLiterals.get(xml);
if (ret == null && xml instanceof EEnumLiteral) {
String fragment = ((EEnumLiteral)xml).eResource().getURIFragment((EEnumLiteral)xml);
EObject eobj = fragmentToXmlObject.get(fragment);
ret = copiedEEnumLiterals.get(eobj);
} else {
// ret = ret;//xml; //Try?? TODO remove no-effect statement?
}
return ret;
}
@Override
public Object convertToXml(Object eobject) {
Object ret = backEEnumLiteral.get(eobject);
if (ret == null && eobject instanceof Enumerator) {
Enumerator enumerator = (Enumerator)eobject;
String totalStr = forEEnum.getName()+"."+enumerator.getLiteral();
ret = backEEnumLiteralStr.get(totalStr);
}
return ret;
}
};
}
public boolean augmentAttributeBasic(EAttribute xmlAttribute, EAttribute ecoreAttribute) {
EClass contCl = xmlToEcoreClasses.get(xmlAttribute.getEContainingClass());
if (contCl == null) {
System.err.println("No matching source found for "+xmlAttribute);
return false;
}
if (xmlAttribute.getEAttributeType() instanceof EEnum) {
//Directly reuse that enum (is this supported in the grammar?)
EEnum targetEEnum = copiedEEnums.get(xmlAttribute.getEAttributeType());
if (targetEEnum == null) {
System.err.println("I have not copied the eenum "+xmlAttribute.getEAttributeType());
return false;
} else {
EAttributeTransformatorImpl tfi = new EAttributeTransformatorImpl(xmlAttribute, ecoreAttribute,
new CollectionValueTransformationImpl(EEnumLiteral.class,
EEnumLiteral.class, eenumTransformator(targetEEnum)));
xmlToEcoreAttr.put(xmlAttribute, tfi);
ecoreToXmlAttr.put(ecoreAttribute, tfi);
return true;
}
}
CollectionValueTransformation trafo = store.getValueTransformationOrNull(xmlAttribute);
if (trafo == null) {
Boolean ret = checkMixedAttribute(contCl,xmlAttribute);
if (ret != null) {
return ret;
}
System.err.println("No transformation found for "+xmlAttribute);
return false;
}
EAttributeTransformatorImpl tfi = new EAttributeTransformatorImpl(xmlAttribute, ecoreAttribute, trafo);
xmlToEcoreAttr.put(xmlAttribute, tfi);
ecoreToXmlAttr.put(ecoreAttribute, tfi);
return true;
}
//There is no need to be a 1:1 correspondance!
public EStructuralFeature getXmlFeature(EStructuralFeature ecoreFeature) {
//Check id
if (java.util.Objects.equals(ecoreFeature,commonIdAttribute)) {
ecoreFeature = realId.getOrDefault(ecoreFeature,(EAttribute)ecoreFeature);
}
//Check reference - not necessary, I added it to ecoreToXmlFeature!
return ecoreToXmlFeature.get(ecoreFeature);
}
public Object getXmlValue(EObject eobj, EStructuralFeature ecoreFeature, int index) {
Collection col = MyEcoreUtil.getAsCollection(eobj, getXmlFeature(ecoreFeature));
if (col instanceof List) {
return ((List)col).get(index);
} else {
Object ret = null;
Iterator iter = col.iterator();
while (index >= 0) {
if (iter.hasNext()) {
ret = iter.next();
} else {
if (ecoreFeature instanceof EAttribute) {
EDataType dt = ((EAttribute)ecoreFeature).getEAttributeType();
ret = dt.getDefaultValue();
} else {
EReference ref = (EReference)ecoreFeature;
ret = MyEcoreUtil.createInstanceStatic(ref.getEReferenceType());
}
}
--index;
}
return ret;
}
}
public Boolean checkMixedAttribute(EClass contCl, EAttribute xmlAttribute) {
EDataType dt = xmlAttribute.getEAttributeType();
if (dt != null && "EFeatureMapEntry".equals(dt.getName()) && "mixed".equals(xmlAttribute.getName())) {
generateMixClasses();
contCl.getESuperTypes().add(mixedBaseClass);
PartialObjectCopier poc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject from, EObject to) {
//This must NOT refer to ecoreAttribute!! //TODO: Store in a map or something like that ...
//Because there is only one target attribute
EStructuralFeature ecoreAttribute = from.eClass().getEStructuralFeature("mixed");
Collection c = MyEcoreUtil.getAsCollection(from, ecoreAttribute);
Collection t = MyEcoreUtil.getAsCollection(to, mixedBaseMixedAttr);
t.clear();
for (Object o: c) {
FeatureMap.Entry entry = (FeatureMap.Entry)o;
EStructuralFeature esf = entry.getEStructuralFeature();
if (esf.getEContainingClass().isSuperTypeOf(from.eClass())) {
//It is a class attribute
EObject feature = MyEcoreUtil.createInstanceStatic(mixedFeature);
feature.eSet(mixedValueAttr, getTargetName(esf));
t.add(feature);
} else if ("text".equals(esf.getName())) { //TODO: Improve filtering
//It is a string literal
EObject feature = MyEcoreUtil.createInstanceStatic(mixedText);
feature.eSet(mixedValueAttr, entry.getValue());
t.add(feature);
} else {
//TODO: Implement me
throw new RuntimeException(new UnsupportedDataTypeException("I currently only support text features and owned structural features in mixed content"));
}
}
}
};
//1. Add Object-Delta of this object (!) - this is automatically done by other methods
//2. Add Feature-Map-Delta of this object, so this POC has to be executed last
//Ist ok, da das Attribut bekannt ist, kann man es ja im transformer sp�ter ausf�hren, muss nur
//das letzte pro objekt sein!
xmlToEcoreChanger.put(xmlAttribute, poc);
xmlToEcoreChanger.put(mixedBaseMixedAttr, poc);
PartialObjectCopier ecoreToXmlPoc = new PartialObjectCopier() {
@Override
public void copyFrom(TransformatorImpl transformator, EObject ecore, EObject xml) {
//This must NOT use any of this attributes since it must be generic!
Collection c = MyEcoreUtil.getAsCollection(ecore, mixedBaseMixedAttr);
EStructuralFeature xmlFeature = xml.eClass().getEStructuralFeature("mixed");
List t = new ArrayList<>();
//TODO: Ber�cksichtige gleich, wenn das target eine Sequence ist ...
Map<EStructuralFeature,Integer> usedIndices = new HashMap<EStructuralFeature, Integer>();
for (Object o: c) {
EObject eo = (EObject)o;
if (mixedFeature.isSuperTypeOf(eo.eClass())) {
EStructuralFeature ecorefeat = ecore.eClass().getEStructuralFeature((String)eo.eGet(mixedValueAttr));
//Jetzt brauche ich aber den korrespondierenden Wert (und das korrespondierende Feature)
//Wenn es eine Referenz ist, ist das vielleicht nicht gespeichert
EStructuralFeature xmlFeat = getXmlFeature(ecorefeat);
Integer index = usedIndices.getOrDefault(xmlFeat, 0);
Object value = getXmlValue(xml, ecorefeat, index);
FeatureMap.Entry entry = FeatureMapUtil.createEntry(xmlFeat, value);
usedIndices.put(xmlFeat, index+1);
t.add(entry);
} else if (mixedText.isSuperTypeOf(eo.eClass())) {
FeatureMap.Entry entry = FeatureMapUtil.createTextEntry((String)eo.eGet(mixedValueAttr));
t.add(entry);
}
}
//Add remaining features
for (EStructuralFeature esf: xml.eClass().getEAllStructuralFeatures()) {
if (isMixed(esf)) {continue;}
Integer curIndex = usedIndices.getOrDefault(esf, 0);
Collection col = MyEcoreUtil.getAsCollection(xml, esf);
Iterator iter = col.iterator();
int lind = 0;
while (iter.hasNext() && lind < curIndex) {
iter.next();
}
while(iter.hasNext()) {
FeatureMap.Entry entry = FeatureMapUtil.createEntry(esf, iter.next());
t.add(entry);
}
}
MyEcoreUtil.setAsCollectionBasic(xml, xmlFeature, t);
}
};
ecoreToXmlChanger.put(xmlAttribute, ecoreToXmlPoc);
ecoreToXmlChanger.put(mixedBaseMixedAttr, ecoreToXmlPoc);
return false; //Remove this attribute because it is replaced!
}
return null;
}
public boolean augmentAttribute(EAttribute xmlAttribute, EAttribute ecoreAttribute) {
if (handledTargets.contains(ecoreAttribute)) {
return augmentAttributeBasic(xmlAttribute, ecoreAttribute);
}
EClass contCl = xmlToEcoreClasses.get(xmlAttribute.getEContainingClass());
if (xmlAttribute.getName().contains("pages")) {
System.out.println("Pages found!");
}
if (contCl == null) {
System.err.println("No matching source found for "+xmlAttribute);
return false;
}
if (xmlAttribute.getEAttributeType() instanceof EEnum) {
//Directly reuse that enum (is this supported in the grammar?)
EEnum targetEEnum = copiedEEnums.get(xmlAttribute.getEAttributeType());
if (targetEEnum == null) {
System.err.println("I have not copied the eenum "+xmlAttribute.getEAttributeType());
return false;
} else {
ecoreAttribute.setEType(targetEEnum);
contCl.getEStructuralFeatures().add(ecoreAttribute);
EAttributeTransformatorImpl tfi = new EAttributeTransformatorImpl(xmlAttribute, ecoreAttribute,
new CollectionValueTransformationImpl(EEnumLiteral.class,
EEnumLiteral.class, eenumTransformator(targetEEnum)));
xmlToEcoreAttr.put(xmlAttribute, tfi);
ecoreToXmlAttr.put(ecoreAttribute, tfi);
// EObject eobj; // TODO remove unused?
return true;
}
}
CollectionValueTransformation trafo = store.getValueTransformationOrNull(xmlAttribute);
if (trafo == null) {
//Check special case: mixed + EFeatureMapEntry
Boolean ret = checkMixedAttribute(contCl,xmlAttribute);
if (ret != null) {
return ret;
}
System.err.println("Cannot translate attribute "+xmlAttribute.getEContainingClass().getName()+"."+xmlAttribute.getName()+" of type "+xmlAttribute.getEAttributeType()+" (cannot find transformator)");
return false;
}
EDataType dt = store.getStandardDatatypeOrNull(trafo.getEcoreClass());
if (dt == null) {
System.err.println("Cannot translate attribute "+xmlAttribute.getEContainingClass().getName()+"."+xmlAttribute.getName()+" of type "+xmlAttribute.getEAttributeType()+" (cannot transform datatype)");
return false;
}
EAttributeTransformatorImpl tfi = new EAttributeTransformatorImpl(xmlAttribute, ecoreAttribute, trafo);
xmlToEcoreAttr.put(xmlAttribute, tfi);
ecoreToXmlAttr.put(ecoreAttribute, tfi);
ecoreAttribute.setEType(dt);
contCl.getEStructuralFeatures().add(ecoreAttribute);
return true;
}
public boolean augmentReferenceBasic(EReference xmlReference, EReference ecoreReference) {
EClass contCl = xmlToEcoreClasses.get(xmlReference.getEContainingClass());
if (contCl == null) {
System.err.println("No matching source found for "+xmlReference);
return false;
}
EClass targetClass = xmlToEcoreClasses.get(xmlReference.getEReferenceType());
if (targetClass == null) {
System.err.println("No matching type found for "+xmlReference.getEContainingClass().getName()+"."+xmlReference.getName()+" ("+xmlReference.getEReferenceType()+")");
return false;
}
EReferenceTransformatorImpl tfi = new EReferenceTransformatorImpl(xmlReference, ecoreReference,
new SingleBasedCollectionObjectTransformation(new InformatedSingleObjectTransformation(xmlReference.getEReferenceType(),
ecoreReference.getEReferenceType(), matchingObjectTransformation)));
xmlToEcoreRef.put(xmlReference, tfi);
ecoreToXmlRef.put(ecoreReference, tfi);
//contCl.getEStructuralFeatures().add(ecoreReference);
return true;
}
public boolean augmentReference(EReference xmlReference, EReference ecoreReference) {
if (handledTargets.contains(ecoreReference)) {
return augmentReferenceBasic(xmlReference, ecoreReference);
}
EClass contCl = xmlToEcoreClasses.get(xmlReference.getEContainingClass());
if (contCl == null) {
System.err.println("No matching source found for "+xmlReference);
return false;
}
EClass targetClass = xmlToEcoreClasses.get(xmlReference.getEReferenceType());
if (targetClass == null) {
System.err.println("No matching type found for "+xmlReference.getEContainingClass().getName()+"."+xmlReference.getName()+" ("+xmlReference.getEReferenceType()+")");
return false;
}
ecoreReference.setEType(targetClass);
EReferenceTransformatorImpl tfi = new EReferenceTransformatorImpl(xmlReference, ecoreReference,
new SingleBasedCollectionObjectTransformation(new InformatedSingleObjectTransformation(xmlReference.getEReferenceType(),
ecoreReference.getEReferenceType(), matchingObjectTransformation)));
xmlToEcoreRef.put(xmlReference, tfi);
ecoreToXmlRef.put(ecoreReference, tfi);
contCl.getEStructuralFeatures().add(ecoreReference);
return true;
}
public boolean augmentEClass(EClass xmlClass, EClass ecoreClass) {
if (handledTargets.contains(ecoreClass)) {
return augmentEClassBasic(xmlClass, ecoreClass);
}
for (EClass superType: xmlClass.getESuperTypes()) {
EClass ecoreSup = xmlToEcoreClasses.get(superType);
ecoreClass.getESuperTypes().add(ecoreSup);
}
//Ich glaube sonst ist nichts zu tun?
return true;
}
public boolean augmentEClassBasic(EClass xmlClass, EClass ecoreReference) {
return true;
}
public String getTargetName(EStructuralFeature xmlFeature){
String targetName = xmlFeature.getName();
if (xmlFeature.isMany() && !targetName.endsWith("s")) {
targetName = targetName+"s";
}
return targetName;
}
public String getEcoreAttributeName(EStructuralFeature xmlFeature) {
return getTargetName(xmlFeature);
}
public EAttribute generateShallowAttribute(EClass xmlClass, EClass ecoreClass, EAttribute xmlAttribute) {
String featName = getTargetName(xmlAttribute);
Object existing = getIfExists(ecoreClass.getName()+"."+featName);
EAttribute target = (existing instanceof EAttribute)?((EAttribute)existing):null;
if (target == null) {
target = EcoreFactory.eINSTANCE.createEAttribute();
target.setName(featName);
setSimple(xmlAttribute, target);
target.setID(xmlAttribute.isID());
}
ecoreToXmlFeature.put(target, xmlAttribute);
return target;
}
public void fixOpposites() {
//Don't fix it since it can't be handled by XText!
}
public EReference generateShallowReference(EClass xmlClass, EClass ecoreClass, EReference xmlReference) {
String featName = getTargetName(xmlReference);
EReference target = (EReference)getIfExists(ecoreClass.getName()+"."+featName);
if (target == null) {
target = EcoreFactory.eINSTANCE.createEReference();
target.setName(featName);
setSimple(xmlReference, target);
target.setContainment(xmlReference.isContainment());
}
ecoreToXmlFeature.put(target, xmlReference);
return target;
}
public String getEcoreClassName(EClass xmlClass) {
String targetName = xmlClass.getName();
if (targetName.endsWith("Type")) {
//targetName = targetName.substring(0,targetName.length()-"Type".length());
}
return targetName;
}
public EClass generateShallowEClass(EClass xmlClass) {
String targetName = getEcoreClassName(xmlClass);
EClass target = (EClass)getIfExists(targetName);
if (target == null) {
target = EcoreFactory.eINSTANCE.createEClass();
target.setName(targetName);
}
return target;
}
// TODO move this to a test class
public static void main(String[] args) {
TypeTransformatorStore store = new TypeTransformatorStore();
ResourceSet basicSet = new ResourceSetImpl();
TransformatorStructure structure = new TransformatorStructure(store, basicSet, new File("library3-base.ecore"));
}
public EObject getXmlEObject(String uriFragment) {
return fragmentToXmlObject.get(uriFragment);
}
public EClass getDocumentRoot() {
return documentRootClassXml;
}
public EClass getXmlRoot() {
return rootClassXml;
}
public EReference getXmlRootReference() {
return rootReferenceXml;
}
public EClass getEcoreRoot() {
return rootClassEcore;
}
public List<Resource> getEcoreResources() {
return ecoreResources;
}
}<|fim▁end|> | boolean idAttributeExisted = false;
//Now you can arbitrarily pick one of the remaining candidates to add the ID attribute
if (!superClasses.isEmpty()) {
|
<|file_name|>export_test.go<|end_file_name|><|fim▁begin|>package ec2
import (
"github.com/crowdmob/goamz/aws"
"time"
)
func Sign(auth aws.Auth, method, path string, params map[string]string, host string) {
sign(auth, method, path, params, host)
}
func fixedTime() time.Time {
return time.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC)
}
func FakeTime(fakeIt bool) {
if fakeIt {
timeNow = fixedTime
} else {
timeNow = time.Now
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>support.py<|end_file_name|><|fim▁begin|>from pathlib import Path
from unittest import mock, TestCase
from rpg.spec import Spec
class RpgTestCase(TestCase):
test_project_dir = Path("tests/project")
def assertExistInDir(self, expected, pathlibobject):
path = Path(pathlibobject)
for files in expected:
self.assertTrue((path / files).exists(), msg=files)
class PluginTestCase(RpgTestCase):<|fim▁hole|><|fim▁end|> | sack = mock.MagicMock()
spec = Spec() |
<|file_name|>dup_averages_wgs_nexome.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
WGS = ['G69145', 'G71602', 'G71608', 'G76270']
### scatch import duplicate text files
# fname = "/seq/picard_aggregation/G69145/NA12878/current/NA12878.duplicate_metrics"
# gdup = pd.read_csv(fname, "\t", skiprows=range(10), comment="#", names=['bin', 'val'])
### read in duplication metric details
metric_dict = {}
for wg in WGS:
fname = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.duplicate_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
# read in depth info
# fdepth = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.depthSM"
metric_dict[wg] = mvals
# put into dataframe
df_wgs = pd.DataFrame.from_dict(metric_dict,orient='index')
df_wgs.columns = nMetrics
df_wgs['platform'] = 'WGS'
### insert size
metric_dict = {}
for wg in WGS:
fname = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.insert_size_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
metric_dict[wg] = mvals
# put into dataframe
insert_wgs = pd.DataFrame.from_dict(metric_dict,orient='index')
insert_wgs.columns = nMetrics
insert_wgs['platform'] = 'WGS'
### Nexome dup data
NexomeIDs = ['359781',
'359877',
'360457',
'361337',
'388072',
'381153',
'364464',
'377582',
'384210',
'384498',
'372754',
'386674',
'393370',
'385972',
'373158',
'379118',
'385390',
'391382',
'383609',
'386068',
'383320',
'383416',
'382684',
'392292',
'376734',
'376014']
metric_dict = {}
for nx in NexomeIDs:
fname = "/seq/picard_aggregation/D5227/NexPond-" + nx + "/current/NexPond-" + nx + ".duplicate_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
metric_dict[nx] = mvals<|fim▁hole|>df_nex['platform'] = 'Nexome'
#concoatonate wgs and nexome
frames = [df_wgs, df_nex]
df_merge = pd.concat(frames)
fout = '/home/unix/hogstrom/nexpond_wgs_dup_metrics.txt'
df_merge.to_csv(fout)
### read in locally
import matplotlib.pyplot as plt
import pandas as pd
fin = "/Users/hogstrom/Dropbox (MIT)/genome_analysis/published_data/nexpond_wgs_dup_metrics.txt"
g = pd.read_csv(fin,index_col=0)
g.boxplot('ESTIMATED_LIBRARY_SIZE', by='platform')
g.boxplot('PERCENT_DUPLICATION', by='platform')
g.boxplot('READ_PAIR_DUPLICATES', by='platform')
# plt.plot(g['ESTIMATED_LIBRARY_SIZE'].values,g['PERCENT_DUPLICATION'].values)<|fim▁end|> |
# put into dataframe
df_nex = pd.DataFrame.from_dict(metric_dict,orient='index')
df_nex.columns = nMetrics |
<|file_name|>anonymize.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from uuid import uuid4
from gluon import current
def rlpcm_person_anonymize():
""" Rules to anonymize a case file """
auth = current.auth
s3db = current.s3db
ANONYMOUS = "-"
# Standard anonymizers
from s3db.pr import pr_address_anonymise as anonymous_address, \
pr_person_obscure_dob as obscure_dob
# Helper to produce an anonymous ID (pe_label)
anonymous_id = lambda record_id, f, v: "NN%s" % uuid4().hex[-8:].upper()
anonymous_code = lambda record_id, f, v: uuid4().hex
# Case Activity Default Closure
activity_closed = s3db.br_case_activity_default_status(closing=True)
# General rule for attachments
documents = ("doc_document", {
"key": "doc_id",
"match": "doc_id",
"fields": {"name": ("set", ANONYMOUS),
"file": "remove",
"url": "remove",
"comments": "remove",
},
"delete": True,
})
# Rule for direct offers (from the offerer perspective)
direct_offers = ("br_direct_offer", {
"key": "offer_id",
"match": "id",
"delete": True,
})
# Rules for user accounts
account = ("auth_user", {
"key": "id",
"match": "user_id",
"fields": {"id": auth.s3_anonymise_roles,
"first_name": ("set", "-"),
"last_name": "remove",
"email": anonymous_code,
"organisation_id": "remove",
"password": auth.s3_anonymise_password,
"deleted": ("set", True),
},
})
# Rules
rules = [
# Rules to remove PID from person record and case file
{"name": "default",
"title": "Names, IDs, Reference Numbers, Contact Information, Addresses",
"fields": {"first_name": ("set", ANONYMOUS),
"last_name": ("set", ANONYMOUS),
"pe_label": anonymous_id,
"date_of_birth": obscure_dob,
"comments": "remove",
},
<|fim▁hole|> },
"cascade": [documents,
],
}),
("pr_contact", {
"key": "pe_id",
"match": "pe_id",
"fields": {"contact_description": "remove",
"value": ("set", ""),
"comments": "remove",
},
"delete": True,
}),
("pr_contact_emergency", {
"key": "pe_id",
"match": "pe_id",
"fields": {"name": ("set", ANONYMOUS),
"relationship": "remove",
"phone": "remove",
"comments": "remove",
},
"delete": True,
}),
("pr_address", {
"key": "pe_id",
"match": "pe_id",
"fields": {"location_id": anonymous_address,
"comments": "remove",
},
}),
("pr_person_details", {
"key": "person_id",
"match": "id",
"fields": {"education": "remove",
"occupation": "remove",
},
}),
("pr_image", {
"key": "pe_id",
"match": "pe_id",
"fields": {"image": "remove",
"url": "remove",
"description": "remove",
},
"delete": True,
}),
("hrm_human_resource", {
"key": "person_id",
"match": "id",
"fields": {"status": ("set", 2),
"site_id": "remove",
"comments": "remove",
},
}),
],
},
# Rules to remove PID from activities and offers
{"name": "activities",
"title": "Needs Reports and Offers of Assistance",
"cascade": [("br_case_activity", {
"key": "person_id",
"match": "id",
"fields": {"location_id": anonymous_address,
"subject": ("set", ANONYMOUS),
"need_details": "remove",
"activity_details": "remove",
"outcome": "remove",
"comments": "remove",
"status_id": ("set", activity_closed),
},
"cascade": [documents,
],
}),
("br_assistance_offer", {
"key": "pe_id",
"match": "pe_id",
"fields": {"name": ("set", ANONYMOUS),
"description": "remove",
"capacity": "remove",
"location_id": anonymous_address,
"contact_name": "remove",
"contact_phone": "remove",
"contact_email": "remove",
"availability": ("set", "RTD"),
"comments": "remove",
},
"cascade": [direct_offers,
],
}),
],
},
# Rules to unlink and remove user account
{"name": "account",
"title": "User Account",
"cascade": [("pr_person_user", {
"key": "pe_id",
"match": "pe_id",
"cascade": [account,
],
"delete": True,
}),
],
},
]
return rules<|fim▁end|> | "cascade": [("br_case", {
"key": "person_id",
"match": "id",
"fields": {"comments": "remove", |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>from Tools.Directories import resolveFilename, SCOPE_PLUGINS
import gettext
PluginLanguageDomain = "OpenWebif"
PluginLanguagePath = "Extensions/OpenWebif/locale"
def localeInit():
gettext.bindtextdomain(PluginLanguageDomain, resolveFilename(SCOPE_PLUGINS, PluginLanguagePath))
def _(txt):
t = gettext.dgettext(PluginLanguageDomain, txt)
if t == txt:
t = gettext.gettext(txt)
return t
localeInit()
language.addCallback(localeInit)<|fim▁end|> | from Components.Language import language |
<|file_name|>html.js<|end_file_name|><|fim▁begin|>var Taco = Taco || {};
Taco.Util = Taco.Util || {};
Taco.Util.HTML = Taco.Util.HTML || {};
Taco.Util.HTML.render = function(str, html) {
html = (typeof html != 'undefined')
? html
: false;
return (html === true)
? str
: Taco.Util.Str.htmlEntities(str);
};
Taco.Util.HTML.attribs = function(attribs, leading_space) {
<|fim▁hole|>
var out = [];
for(var key in attribs) {
var value = attribs[key];
value = (typeof value == 'object') ? Taco.Util.Obj.objectJoin(' ', value) : value;
out.push(key + '="' + String(value).replace(/\"/, '\"') + '"');
}
return ((leading_space) ? ' ' : '') + out.join(' ');
};
Taco.Util.HTML.getTextInputTypes = function() {
return [
'text',
'image',
'file',
'search',
'email',
'url',
'tel',
'number',
'range',
'date',
'month',
'week',
'time',
'datetime',
'datetime-local',
'color'
];
};
Taco.Util.HTML.tag = function(element_type, body, attribs, close, is_html) {
body = (typeof body == 'undefined' || body === null)
? ''
: body;
attribs = (typeof attribs == 'undefined')
? []
: attribs;
close = (typeof close == 'undefined')
? true
: close;
is_html = (typeof is_html == 'undefined')
? false
: is_html;
var not_self_closing = ['a', 'div', 'iframe', 'textarea'];
var is_self_closing = false;
if(close && Taco.Util.General.empty(body) && !Taco.Util.Arr.inArray(
element_type.toLowerCase(),
not_self_closing
)) {
is_self_closing = true;
}
if(is_self_closing) {
return '<' + element_type + this.attribs(attribs) + ' />';
}
return [
'<' + element_type + this.attribs(attribs) + '>',
this.render(body, is_html),
(close) ? '</' + element_type + '>' : ''
].join('');
};
Taco.Util.HTML.selecty = function(options, selected, attribs) {
selected = (typeof selected != 'undefined')
? selected
: null;
attribs = (typeof attribs != 'undefined')
? attribs
: [];
var htmls = [];
htmls.push(this.tag('select', null, attribs, false));
if(Taco.Util.Obj.isIterable(options)) {
for(var key in options) {
value = options[key];
var option_attribs = { value: key };
if(String(selected) === String(value)) {
option_attribs.selected = 'selected';
}
htmls.push(this.tag('option', value, option_attribs));
}
}
htmls.push('</select>');
return htmls.join("\n");
};<|fim▁end|> | leading_space = (typeof leading_space != 'undefined')
? leading_space
: true;
if(Taco.Util.Obj.getObjectLength(attribs) < 1) return ''; |
<|file_name|>resumeButtons.js<|end_file_name|><|fim▁begin|>/**
* A script for handling the bootstrap switch on the resume page.
*/
// Import css.
require("bootstrap-switch/dist/css/bootstrap3/bootstrap-switch.min.css");
require('bootstrap-switch');
$("[name='my-checkbox']").bootstrapSwitch();
// http://www.bootstrap-switch.org/events.html
$('input[name="my-checkbox"]').on('switchChange.bootstrapSwitch', function(event, state) {
var img = document.getElementById("resume-image");<|fim▁hole|> if (state) {
img.src = "/Content/Resume/resume.png";
// Changes the download link.
pdf_link.href = "/Content/Resume/resume.pdf";
pdf_link.download = 'MattGaikemaResume';
}
else {
img.src = "/Content/Resume/cv.png";
pdf_link.href = "/Content/Resume/cv.pdf";
pdf_link.download = 'MattGaikemaCV';
}
});<|fim▁end|> | var pdf_link = document.getElementById('pdf_link');
|
<|file_name|>init-twice-retains-filter.rs<|end_file_name|><|fim▁begin|>extern crate env_logger;
extern crate log;
use std::env;
use std::process;
use std::str;
fn main() {
if env::var("YOU_ARE_TESTING_NOW").is_ok() {
// Init from the env (which should set the max level to `Debug`)
env_logger::init();
assert_eq!(log::LevelFilter::Debug, log::max_level());
// Init again using a different max level
// This shouldn't clobber the level that was previously set
env_logger::Builder::new()
.parse_filters("info")
.try_init()
.unwrap_err();
assert_eq!(log::LevelFilter::Debug, log::max_level());
return;
}
let exe = env::current_exe().unwrap();
let out = process::Command::new(exe)<|fim▁hole|> .env("RUST_LOG", "debug")
.output()
.unwrap_or_else(|e| panic!("Unable to start child process: {}", e));
if out.status.success() {
return;
}
println!("test failed: {}", out.status);
println!("--- stdout\n{}", str::from_utf8(&out.stdout).unwrap());
println!("--- stderr\n{}", str::from_utf8(&out.stderr).unwrap());
process::exit(1);
}<|fim▁end|> | .env("YOU_ARE_TESTING_NOW", "1") |
<|file_name|>htmlbrelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLBRElementBinding;
use dom::bindings::root::DomRoot;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLBRElement {<|fim▁hole|> fn new_inherited(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> HTMLBRElement {
HTMLBRElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> DomRoot<HTMLBRElement> {
Node::reflect_node(Box::new(HTMLBRElement::new_inherited(local_name, prefix, document)),
document,
HTMLBRElementBinding::Wrap)
}
}<|fim▁end|> | htmlelement: HTMLElement,
}
impl HTMLBRElement { |
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package server
import (
"bytes"
"context"
"encoding/json"
"io/ioutil"
"log"
"net/http"
"strings"
"sync"
"sync/atomic"
"text/template"
"time"
"github.com/TV4/graceful"
"github.com/gogap/config"
"github.com/gogap/go-wkhtmltox/wkhtmltox"
"github.com/gorilla/mux"
"github.com/phyber/negroni-gzip/gzip"
"github.com/rs/cors"
"github.com/spf13/cast"
"github.com/urfave/negroni"
)
const (
defaultTemplateText = `{"code":{{.Code}},"message":"{{.Message}}"{{if .Result}},"result":{{.Result|jsonify}}{{end}}}`
)
var (
htmlToX *wkhtmltox.WKHtmlToX
renderTmpls = make(map[string]*template.Template)
defaultTmpl *template.Template
)
type ConvertData struct {
Data []byte `json:"data"`
}
type ConvertArgs struct {
To string `json:"to"`
Fetcher wkhtmltox.FetcherOptions `json:"fetcher"`
Converter json.RawMessage `json:"converter"`
Template string `json:"template"`
}
type TemplateArgs struct {
To string
ConvertResponse
Response *RespHelper
}
type ConvertResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Result interface{} `json:"result"`
}
type serverWrapper struct {
tls bool
certFile string
keyFile string
reqNumber int64
addr string
n *negroni.Negroni
timeout time.Duration
}
func (p *serverWrapper) ServeHTTP(w http.ResponseWriter, r *http.Request) {
atomic.AddInt64(&p.reqNumber, 1)
defer atomic.AddInt64(&p.reqNumber, -1)
p.n.ServeHTTP(w, r)
}
func (p *serverWrapper) ListenAndServe() (err error) {
if p.tls {
err = http.ListenAndServeTLS(p.addr, p.certFile, p.keyFile, p)
} else {
err = http.ListenAndServe(p.addr, p)
}
return
}
func (p *serverWrapper) Shutdown(ctx context.Context) error {
num := atomic.LoadInt64(&p.reqNumber)
schema := "HTTP"
if p.tls {
schema = "HTTPS"
}
beginTime := time.Now()
for num > 0 {
time.Sleep(time.Second)
timeDiff := time.Now().Sub(beginTime)
if timeDiff > p.timeout {
break
}
}
log.Printf("[%s] Shutdown finished, Address: %s\n", schema, p.addr)
return nil
}
type WKHtmlToXServer struct {
conf config.Configuration
servers []*serverWrapper
}
func New(conf config.Configuration) (srv *WKHtmlToXServer, err error) {
serviceConf := conf.GetConfig("service")
wkHtmlToXConf := conf.GetConfig("wkhtmltox")
htmlToX, err = wkhtmltox.New(wkHtmlToXConf)
if err != nil {
return
}
// init templates
defaultTmpl, err = template.New("default").Funcs(funcMap).Parse(defaultTemplateText)
if err != nil {
return
}
err = loadTemplates(
serviceConf.GetConfig("templates"),
)
if err != nil {
return
}
// init http server
c := cors.New(
cors.Options{
AllowedOrigins: serviceConf.GetStringList("cors.allowed-origins"),
AllowedMethods: serviceConf.GetStringList("cors.allowed-methods"),
AllowedHeaders: serviceConf.GetStringList("cors.allowed-headers"),
ExposedHeaders: serviceConf.GetStringList("cors.exposed-headers"),
AllowCredentials: serviceConf.GetBoolean("cors.allow-credentials"),
MaxAge: int(serviceConf.GetInt64("cors.max-age")),
OptionsPassthrough: serviceConf.GetBoolean("cors.options-passthrough"),
Debug: serviceConf.GetBoolean("cors.debug"),
},
)
r := mux.NewRouter()
pathPrefix := serviceConf.GetString("path", "/")
r.PathPrefix(pathPrefix).Path("/convert").
Methods("POST").
HandlerFunc(handleHtmlToX)
r.PathPrefix(pathPrefix).Path("/ping").
Methods("GET", "HEAD").HandlerFunc(
func(rw http.ResponseWriter, req *http.Request) {
rw.Header().Set("Content-Type", "text/plain; charset=utf-8")
rw.Write([]byte("pong"))
},
)
n := negroni.Classic()
n.Use(c) // use cors
if serviceConf.GetBoolean("gzip-enabled", true) {
n.Use(gzip.Gzip(gzip.DefaultCompression))
}
n.UseHandler(r)
gracefulTimeout := serviceConf.GetTimeDuration("graceful.timeout", time.Second*3)
enableHTTP := serviceConf.GetBoolean("http.enabled", true)
enableHTTPS := serviceConf.GetBoolean("https.enabled", false)
var servers []*serverWrapper
if enableHTTP {
listenAddr := serviceConf.GetString("http.address", "127.0.0.1:8080")
httpServer := &serverWrapper{
n: n,
timeout: gracefulTimeout,
addr: listenAddr,
}
servers = append(servers, httpServer)
}
if enableHTTPS {
listenAddr := serviceConf.GetString("http.address", "127.0.0.1:443")
certFile := serviceConf.GetString("https.cert")
keyFile := serviceConf.GetString("https.key")
httpsServer := &serverWrapper{
n: n,
timeout: gracefulTimeout,
addr: listenAddr,
tls: true,
certFile: certFile,
keyFile: keyFile,
}
servers = append(servers, httpsServer)
}
srv = &WKHtmlToXServer{
conf: conf,
servers: servers,
}
return
}
func (p *WKHtmlToXServer) Run() (err error) {
wg := sync.WaitGroup{}
wg.Add(len(p.servers))
for i := 0; i < len(p.servers); i++ {
go func(srv *serverWrapper) {
defer wg.Done()
shcema := "HTTP"
if srv.tls {
shcema = "HTTPS"
}
log.Printf("[%s] Listening on %s\n", shcema, srv.addr)
graceful.ListenAndServe(srv)
}(p.servers[i])
}
wg.Wait()
return
}
func writeResp(rw http.ResponseWriter, convertArgs ConvertArgs, resp ConvertResponse) {
var tmpl *template.Template
if len(convertArgs.Template) == 0 {
tmpl = defaultTmpl
} else {
var exist bool
tmpl, exist = renderTmpls[convertArgs.Template]
if !exist {
tmpl = defaultTmpl
}
}
respHelper := newRespHelper(rw)
args := TemplateArgs{
To: convertArgs.To,
ConvertResponse: resp,
Response: respHelper,
}
buf := bytes.NewBuffer(nil)
err := tmpl.Execute(buf, args)
if err != nil {
log.Println(err)
}
if !respHelper.Holding() {
rw.Write(buf.Bytes())
}
}
func handleHtmlToX(rw http.ResponseWriter, req *http.Request) {
decoder := json.NewDecoder(req.Body)
decoder.UseNumber()
args := ConvertArgs{}
err := decoder.Decode(&args)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
if len(args.Converter) == 0 {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, "converter is nil", nil})
return
}
to := strings.ToUpper(args.To)
var opts wkhtmltox.ConvertOptions
if to == "IMAGE" {
opts = &wkhtmltox.ToImageOptions{}
} else if to == "PDF" {
opts = &wkhtmltox.ToPDFOptions{}
} else {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, "argument of to is illegal (image|pdf)", nil})
return
}
err = json.Unmarshal(args.Converter, opts)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
var convData []byte
convData, err = htmlToX.Convert(args.Fetcher, opts)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
writeResp(rw, args, ConvertResponse{0, "", ConvertData{Data: convData}})
return
}
func loadTemplates(tmplsConf config.Configuration) (err error) {
if tmplsConf == nil {
return
}
tmpls := tmplsConf.Keys()
for _, name := range tmpls {
file := tmplsConf.GetString(name + ".template")
tmpl := template.New(name).Funcs(funcMap)
var data []byte
data, err = ioutil.ReadFile(file)
if err != nil {
return
}
tmpl, err = tmpl.Parse(string(data))
if err != nil {
return
}
renderTmpls[name] = tmpl
}
return
}
type RespHelper struct {
rw http.ResponseWriter
hold bool
}
<|fim▁hole|>func newRespHelper(rw http.ResponseWriter) *RespHelper {
return &RespHelper{
rw: rw,
hold: false,
}
}
func (p *RespHelper) SetHeader(key, value interface{}) error {
k := cast.ToString(key)
v := cast.ToString(value)
p.rw.Header().Set(k, v)
return nil
}
func (p *RespHelper) Hold(v interface{}) error {
h := cast.ToBool(v)
p.hold = h
return nil
}
func (p *RespHelper) Holding() bool {
return p.hold
}
func (p *RespHelper) Write(data []byte) error {
p.rw.Write(data)
return nil
}
func (p *RespHelper) WriteHeader(code interface{}) error {
c, err := cast.ToIntE(code)
if err != nil {
return err
}
p.rw.WriteHeader(c)
return nil
}<|fim▁end|> | |
<|file_name|>perf.js<|end_file_name|><|fim▁begin|>;(function() {
/** Used to access the Firebug Lite panel (set by `run`). */
var fbPanel;
/** Used as a safe reference for `undefined` in pre ES5 environments. */
var undefined;
/** Used as a reference to the global object. */
var root = typeof global == 'object' && global || this;
/** Method and object shortcuts. */
var phantom = root.phantom,
amd = root.define && define.amd,
argv = root.process && process.argv,
document = !phantom && root.document,
noop = function() {},
params = root.arguments,
system = root.system;
/** Add `console.log()` support for Narwhal, Rhino, and RingoJS. */
var console = root.console || (root.console = { 'log': root.print });
/** The file path of the Lo-Dash file to test. */
var filePath = (function() {
var min = 0,
result = [];
if (phantom) {
result = params = phantom.args;
} else if (system) {
min = 1;
result = params = system.args;
} else if (argv) {
min = 2;
result = params = argv;
} else if (params) {
result = params;
}
var last = result[result.length - 1];
result = (result.length > min && !/perf(?:\.js)?$/.test(last)) ? last : '../lodash.js';
if (!amd) {
try {
result = require('fs').realpathSync(result);
} catch(e) {}
try {
result = require.resolve(result);
} catch(e) {}
}
return result;
}());
/** Used to match path separators. */
var rePathSeparator = /[\/\\]/;
/** Used to detect primitive types. */
var rePrimitive = /^(?:boolean|number|string|undefined)$/;
/** Used to match RegExp special characters. */
var reSpecialChars = /[.*+?^=!:${}()|[\]\/\\]/g;
/** The `ui` object. */
var ui = root.ui || (root.ui = {
'buildPath': basename(filePath, '.js'),
'otherPath': 'underscore'
});
/** The Lo-Dash build basename. */
var buildName = root.buildName = basename(ui.buildPath, '.js');
/** The other library basename. */
var otherName = root.otherName = (function() {
var result = basename(ui.otherPath, '.js');
return result + (result == buildName ? ' (2)' : '');
}());
/** Used to score performance. */
var score = { 'a': [], 'b': [] };
/** Used to queue benchmark suites. */
var suites = [];
/** Used to resolve a value's internal [[Class]]. */
var toString = Object.prototype.toString;
/** Detect if in a browser environment. */
var isBrowser = isHostType(root, 'document') && isHostType(root, 'navigator');
/** Detect if in a Java environment. */
var isJava = !isBrowser && /Java/.test(toString.call(root.java));
/** Use a single "load" function. */
var load = (typeof require == 'function' && !amd)
? require
: (isJava && root.load) || noop;
/** Load Lo-Dash. */
var lodash = root.lodash || (root.lodash = (
lodash = load(filePath) || root._,
lodash = lodash._ || lodash,
(lodash.runInContext ? lodash.runInContext(root) : lodash),
lodash.noConflict()
));
/** Load Benchmark.js. */
var Benchmark = root.Benchmark || (root.Benchmark = (
Benchmark = load('../vendor/benchmark.js/benchmark.js') || root.Benchmark,
Benchmark = Benchmark.Benchmark || Benchmark,
Benchmark.runInContext(lodash.extend({}, root, { '_': lodash }))
));
/** Load Underscore. */
var _ = root._ || (root._ = (
_ = load('../vendor/underscore/underscore.js') || root._,
_._ || _
));
/*--------------------------------------------------------------------------*/
/**
* Gets the basename of the given `filePath`. If the file `extension` is passed,
* it will be removed from the basename.
*
* @private
* @param {string} path The file path to inspect.
* @param {string} extension The extension to remove.
* @returns {string} Returns the basename.
*/
function basename(filePath, extension) {
var result = (filePath || '').split(rePathSeparator).pop();
return (arguments.length < 2)
? result
: result.replace(RegExp(extension.replace(reSpecialChars, '\\$&') + '$'), '');
}
/**
* Computes the geometric mean (log-average) of an array of values.
* See http://en.wikipedia.org/wiki/Geometric_mean#Relationship_with_arithmetic_mean_of_logarithms.
*
* @private
* @param {Array} array The array of values.
* @returns {number} The geometric mean.
*/
function getGeometricMean(array) {
return Math.pow(Math.E, lodash.reduce(array, function(sum, x) {
return sum + Math.log(x);
}, 0) / array.length) || 0;
}
/**
* Gets the Hz, i.e. operations per second, of `bench` adjusted for the
* margin of error.
*
* @private
* @param {Object} bench The benchmark object.
* @returns {number} Returns the adjusted Hz.
*/
function getHz(bench) {
var result = 1 / (bench.stats.mean + bench.stats.moe);
return isFinite(result) ? result : 0;
}
/**
* Host objects can return type values that are different from their actual
* data type. The objects we are concerned with usually return non-primitive
* types of "object", "function", or "unknown".
*
* @private
* @param {*} object The owner of the property.
* @param {string} property The property to check.
* @returns {boolean} Returns `true` if the property value is a non-primitive, else `false`.
*/
function isHostType(object, property) {
if (object == null) {
return false;
}
var type = typeof object[property];
return !rePrimitive.test(type) && (type != 'object' || !!object[property]);
}
/**
* Logs text to the console.
*
* @private
* @param {string} text The text to log.
*/
function log(text) {
console.log(text + '');
if (fbPanel) {
// Scroll the Firebug Lite panel down.
fbPanel.scrollTop = fbPanel.scrollHeight;
}
}
/**
* Runs all benchmark suites.
*
* @private (@public in the browser)
*/
function run() {
fbPanel = (fbPanel = root.document && document.getElementById('FirebugUI')) &&
(fbPanel = (fbPanel = fbPanel.contentWindow || fbPanel.contentDocument).document || fbPanel) &&
fbPanel.getElementById('fbPanel1');
log('\nSit back and relax, this may take a while.');
suites[0].run({ 'async': !isJava });
}
/*--------------------------------------------------------------------------*/
lodash.extend(Benchmark.Suite.options, {
'onStart': function() {
log('\n' + this.name + ':');
},
'onCycle': function(event) {
log(event.target);
},
'onComplete': function() {
for (var index = 0, length = this.length; index < length; index++) {
var bench = this[index];
if (bench.error) {
var errored = true;
}
}
if (errored) {
log('There was a problem, skipping...');
}
else {
var formatNumber = Benchmark.formatNumber,
fastest = this.filter('fastest'),
fastestHz = getHz(fastest[0]),
slowest = this.filter('slowest'),
slowestHz = getHz(slowest[0]),
aHz = getHz(this[0]),
bHz = getHz(this[1]);
if (fastest.length > 1) {
log('It\'s too close to call.');
aHz = bHz = slowestHz;
}
else {
var percent = ((fastestHz / slowestHz) - 1) * 100;
log(
fastest[0].name + ' is ' +
formatNumber(percent < 1 ? percent.toFixed(2) : Math.round(percent)) +
'% faster.'
);
}
// Add score adjusted for margin of error.
score.a.push(aHz);
score.b.push(bHz);
}
// Remove current suite from queue.
suites.shift();
if (suites.length) {
// Run next suite.
suites[0].run({ 'async': !isJava });
}
else {
var aMeanHz = getGeometricMean(score.a),
bMeanHz = getGeometricMean(score.b),
fastestMeanHz = Math.max(aMeanHz, bMeanHz),
slowestMeanHz = Math.min(aMeanHz, bMeanHz),
xFaster = fastestMeanHz / slowestMeanHz,
percentFaster = formatNumber(Math.round((xFaster - 1) * 100)),
message = 'is ' + percentFaster + '% ' + (xFaster == 1 ? '' : '(' + formatNumber(xFaster.toFixed(2)) + 'x) ') + 'faster than';
// Report results.
if (aMeanHz >= bMeanHz) {
log('\n' + buildName + ' ' + message + ' ' + otherName + '.');
} else {
log('\n' + otherName + ' ' + message + ' ' + buildName + '.');
}
}
}
});
/*--------------------------------------------------------------------------*/
lodash.extend(Benchmark.options, {
'async': true,
'setup': '\
var _ = global._,\
lodash = global.lodash,\
belt = this.name == buildName ? lodash : _;\
\
var date = new Date,\
limit = 20,\
regexp = /x/,\
object = {},\
objects = Array(limit),\
numbers = Array(limit),\
fourNumbers = [5, 25, 10, 30],\
nestedNumbers = [1, [2], [3, [[4]]]],\
nestedObjects = [{}, [{}], [{}, [[{}]]]],\
twoNumbers = [12, 23];\
\
for (var index = 0; index < limit; index++) {\
numbers[index] = index;\
object["key" + index] = index;\
objects[index] = { "num": index };\
}\
var strNumbers = numbers + "";\
\
if (typeof bind != "undefined") {\
var thisArg = { "name": "fred" };\
\
var func = function(greeting, punctuation) {\
return (greeting || "hi") + " " + this.name + (punctuation || ".");\
};\
\
var _boundNormal = _.bind(func, thisArg),\
_boundMultiple = _boundNormal,\
_boundPartial = _.bind(func, thisArg, "hi");\
\
var lodashBoundNormal = lodash.bind(func, thisArg),\
lodashBoundMultiple = lodashBoundNormal,\
lodashBoundPartial = lodash.bind(func, thisArg, "hi");\
\
for (index = 0; index < 10; index++) {\
_boundMultiple = _.bind(_boundMultiple, { "name": "fred" + index });\
lodashBoundMultiple = lodash.bind(lodashBoundMultiple, { "name": "fred" + index });\
}\
}\
if (typeof bindAll != "undefined") {\
var bindAllCount = -1,\
bindAllObjects = Array(this.count);\
\
var funcNames = belt.reject(belt.functions(belt).slice(0, 40), function(funcName) {\
return /^_/.test(funcName);\
});\
\
// Potentially expensive.\n\
for (index = 0; index < this.count; index++) {\
bindAllObjects[index] = belt.reduce(funcNames, function(object, funcName) {\
object[funcName] = belt[funcName];\
return object;\
}, {});\
}\
}\
if (typeof chaining != "undefined") {\
var even = function(v) { return v % 2 == 0; },\
square = function(v) { return v * v; };\
\
var largeArray = belt.range(10000),\
_chaining = _.chain ? _(largeArray).chain() : _(largeArray),\
lodashChaining = lodash(largeArray);\
}\
if (typeof compact != "undefined") {\
var uncompacted = numbers.slice();\
uncompacted[2] = false;\
uncompacted[6] = null;\
uncompacted[18] = "";\
}\
if (typeof compose != "undefined") {\
var compAddOne = function(n) { return n + 1; },\
compAddTwo = function(n) { return n + 2; },\
compAddThree = function(n) { return n + 3; };\
\
var _composed = _.compose(compAddThree, compAddTwo, compAddOne),\
lodashComposed = lodash.compose(compAddThree, compAddTwo, compAddOne);\
}\
if (typeof countBy != "undefined" || typeof omit != "undefined") {\
var wordToNumber = {\
"one": 1,\
"two": 2,\
"three": 3,\
"four": 4,\
"five": 5,\
"six": 6,\
"seven": 7,\
"eight": 8,\
"nine": 9,\
"ten": 10,\
"eleven": 11,\
"twelve": 12,\
"thirteen": 13,\
"fourteen": 14,\
"fifteen": 15,\
"sixteen": 16,\
"seventeen": 17,\
"eighteen": 18,\
"nineteen": 19,\
"twenty": 20,\
"twenty-one": 21,\
"twenty-two": 22,\
"twenty-three": 23,\
"twenty-four": 24,\
"twenty-five": 25,\
"twenty-six": 26,\
"twenty-seven": 27,\
"twenty-eight": 28,\
"twenty-nine": 29,\
"thirty": 30,\
"thirty-one": 31,\
"thirty-two": 32,\
"thirty-three": 33,\
"thirty-four": 34,\
"thirty-five": 35,\
"thirty-six": 36,\
"thirty-seven": 37,\
"thirty-eight": 38,\
"thirty-nine": 39,\
"forty": 40\
};\
\
var words = belt.keys(wordToNumber).slice(0, limit);\
}\
if (typeof flatten != "undefined") {\
var _flattenDeep = _.flatten([[1]])[0] !== 1,\
lodashFlattenDeep = lodash.flatten([[1]]) !== 1;\
}\
if (typeof isEqual != "undefined") {\
var objectOfPrimitives = {\
"boolean": true,\
"number": 1,\
"string": "a"\
};\
\
var objectOfObjects = {\
"boolean": new Boolean(true),\
"number": new Number(1),\
"string": new String("a")\
};\
\
var objectOfObjects2 = {\
"boolean": new Boolean(true),\
"number": new Number(1),\
"string": new String("A")\
};\
\
var object2 = {},\
object3 = {},\
objects2 = Array(limit),\
objects3 = Array(limit),\
numbers2 = Array(limit),\
numbers3 = Array(limit),\
nestedNumbers2 = [1, [2], [3, [[4]]]],\
nestedNumbers3 = [1, [2], [3, [[6]]]];\
\
for (index = 0; index < limit; index++) {\
object2["key" + index] = index;\
object3["key" + index] = index;\
objects2[index] = { "num": index };\
objects3[index] = { "num": index };\
numbers2[index] = index;\
numbers3[index] = index;\
}\
object3["key" + (limit - 1)] = -1;\
objects3[limit - 1].num = -1;\
numbers3[limit - 1] = -1;\
}\
if (typeof matches != "undefined") {\
var source = { "num": 9 };\<|fim▁hole|> \
var _findWhere = _.findWhere || _.find,\
_match = (_.matches || _.createCallback || _.noop)(source);\
\
var lodashFindWhere = lodash.findWhere || lodash.find,\
lodashMatch = (lodash.matches || lodash.createCallback || lodash.noop)(source);\
}\
if (typeof multiArrays != "undefined") {\
var twentyValues = belt.shuffle(belt.range(20)),\
fortyValues = belt.shuffle(belt.range(40)),\
hundredSortedValues = belt.range(100),\
hundredValues = belt.shuffle(hundredSortedValues),\
hundredValues2 = belt.shuffle(hundredValues),\
hundredTwentyValues = belt.shuffle(belt.range(120)),\
hundredTwentyValues2 = belt.shuffle(hundredTwentyValues),\
twoHundredValues = belt.shuffle(belt.range(200)),\
twoHundredValues2 = belt.shuffle(twoHundredValues);\
}\
if (typeof partial != "undefined") {\
var func = function(greeting, punctuation) {\
return greeting + " fred" + (punctuation || ".");\
};\
\
var _partial = _.partial(func, "hi"),\
lodashPartial = lodash.partial(func, "hi");\
}\
if (typeof template != "undefined") {\
var tplData = {\
"header1": "Header1",\
"header2": "Header2",\
"header3": "Header3",\
"header4": "Header4",\
"header5": "Header5",\
"header6": "Header6",\
"list": ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]\
};\
\
var tpl =\
"<div>" +\
"<h1 class=\'header1\'><%= header1 %></h1>" +\
"<h2 class=\'header2\'><%= header2 %></h2>" +\
"<h3 class=\'header3\'><%= header3 %></h3>" +\
"<h4 class=\'header4\'><%= header4 %></h4>" +\
"<h5 class=\'header5\'><%= header5 %></h5>" +\
"<h6 class=\'header6\'><%= header6 %></h6>" +\
"<ul class=\'list\'>" +\
"<% for (var index = 0, length = list.length; index < length; index++) { %>" +\
"<li class=\'item\'><%= list[index] %></li>" +\
"<% } %>" +\
"</ul>" +\
"</div>";\
\
var tplVerbose =\
"<div>" +\
"<h1 class=\'header1\'><%= data.header1 %></h1>" +\
"<h2 class=\'header2\'><%= data.header2 %></h2>" +\
"<h3 class=\'header3\'><%= data.header3 %></h3>" +\
"<h4 class=\'header4\'><%= data.header4 %></h4>" +\
"<h5 class=\'header5\'><%= data.header5 %></h5>" +\
"<h6 class=\'header6\'><%= data.header6 %></h6>" +\
"<ul class=\'list\'>" +\
"<% for (var index = 0, length = data.list.length; index < length; index++) { %>" +\
"<li class=\'item\'><%= data.list[index] %></li>" +\
"<% } %>" +\
"</ul>" +\
"</div>";\
\
var settingsObject = { "variable": "data" };\
\
var _tpl = _.template(tpl),\
_tplVerbose = _.template(tplVerbose, null, settingsObject);\
\
var lodashTpl = lodash.template(tpl),\
lodashTplVerbose = lodash.template(tplVerbose, null, settingsObject);\
}\
if (typeof wrap != "undefined") {\
var add = function(a, b) {\
return a + b;\
};\
\
var average = function(func, a, b) {\
return (func(a, b) / 2).toFixed(2);\
};\
\
var _wrapped = _.wrap(add, average);\
lodashWrapped = lodash.wrap(add, average);\
}\
if (typeof zip != "undefined") {\
var unzipped = [["a", "b", "c"], [1, 2, 3], [true, false, true]];\
}'
});
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_(...).map(...).filter(...).take(...).value()`')
.add(buildName, {
'fn': 'lodashChaining.map(square).filter(even).take(100).value()',
'teardown': 'function chaining(){}'
})
.add(otherName, {
'fn': '_chaining.map(square).filter(even).take(100).value()',
'teardown': 'function chaining(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.bind` (slow path)')
.add(buildName, {
'fn': 'lodash.bind(function() { return this.name; }, { "name": "fred" })',
'teardown': 'function bind(){}'
})
.add(otherName, {
'fn': '_.bind(function() { return this.name; }, { "name": "fred" })',
'teardown': 'function bind(){}'
})
);
suites.push(
Benchmark.Suite('bound call with arguments')
.add(buildName, {
'fn': 'lodashBoundNormal("hi", "!")',
'teardown': 'function bind(){}'
})
.add(otherName, {
'fn': '_boundNormal("hi", "!")',
'teardown': 'function bind(){}'
})
);
suites.push(
Benchmark.Suite('bound and partially applied call with arguments')
.add(buildName, {
'fn': 'lodashBoundPartial("!")',
'teardown': 'function bind(){}'
})
.add(otherName, {
'fn': '_boundPartial("!")',
'teardown': 'function bind(){}'
})
);
suites.push(
Benchmark.Suite('bound multiple times')
.add(buildName, {
'fn': 'lodashBoundMultiple()',
'teardown': 'function bind(){}'
})
.add(otherName, {
'fn': '_boundMultiple()',
'teardown': 'function bind(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.bindAll` iterating arguments')
.add(buildName, {
'fn': 'lodash.bindAll.apply(lodash, [bindAllObjects[++bindAllCount]].concat(funcNames))',
'teardown': 'function bindAll(){}'
})
.add(otherName, {
'fn': '_.bindAll.apply(_, [bindAllObjects[++bindAllCount]].concat(funcNames))',
'teardown': 'function bindAll(){}'
})
);
suites.push(
Benchmark.Suite('`_.bindAll` iterating the `object`')
.add(buildName, {
'fn': 'lodash.bindAll(bindAllObjects[++bindAllCount])',
'teardown': 'function bindAll(){}'
})
.add(otherName, {
'fn': '_.bindAll(bindAllObjects[++bindAllCount])',
'teardown': 'function bindAll(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.clone` with an array')
.add(buildName, '\
lodash.clone(numbers)'
)
.add(otherName, '\
_.clone(numbers)'
)
);
suites.push(
Benchmark.Suite('`_.clone` with an object')
.add(buildName, '\
lodash.clone(object)'
)
.add(otherName, '\
_.clone(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.compact`')
.add(buildName, {
'fn': 'lodash.compact(uncompacted)',
'teardown': 'function compact(){}'
})
.add(otherName, {
'fn': '_.compact(uncompacted)',
'teardown': 'function compact(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.compose`')
.add(buildName, {
'fn': 'lodash.compose(compAddThree, compAddTwo, compAddOne)',
'teardown': 'function compose(){}'
})
.add(otherName, {
'fn': '_.compose(compAddThree, compAddTwo, compAddOne)',
'teardown': 'function compose(){}'
})
);
suites.push(
Benchmark.Suite('composed call')
.add(buildName, {
'fn': 'lodashComposed(0)',
'teardown': 'function compose(){}'
})
.add(otherName, {
'fn': '_composed(0)',
'teardown': 'function compose(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.countBy` with `callback` iterating an array')
.add(buildName, '\
lodash.countBy(numbers, function(num) { return num >> 1; })'
)
.add(otherName, '\
_.countBy(numbers, function(num) { return num >> 1; })'
)
);
suites.push(
Benchmark.Suite('`_.countBy` with `property` name iterating an array')
.add(buildName, {
'fn': 'lodash.countBy(words, "length")',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.countBy(words, "length")',
'teardown': 'function countBy(){}'
})
);
suites.push(
Benchmark.Suite('`_.countBy` with `callback` iterating an object')
.add(buildName, {
'fn': 'lodash.countBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.countBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.defaults`')
.add(buildName, '\
lodash.defaults({ "key2": 2, "key6": 6, "key18": 18 }, object)'
)
.add(otherName, '\
_.defaults({ "key2": 2, "key6": 6, "key18": 18 }, object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.difference`')
.add(buildName, '\
lodash.difference(numbers, twoNumbers, fourNumbers)'
)
.add(otherName, '\
_.difference(numbers, twoNumbers, fourNumbers)'
)
);
suites.push(
Benchmark.Suite('`_.difference` iterating 200 elements')
.add(buildName, {
'fn': 'lodash.difference(twoHundredValues, twoHundredValues2)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.difference(twoHundredValues, twoHundredValues2)',
'teardown': 'function multiArrays(){}'
})
);
suites.push(
Benchmark.Suite('`_.difference` iterating 20 and 40 elements')
.add(buildName, {
'fn': 'lodash.difference(twentyValues, fortyValues)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.difference(twentyValues, fortyValues)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.each` iterating an array')
.add(buildName, '\
var result = [];\
lodash.each(numbers, function(num) {\
result.push(num * 2);\
})'
)
.add(otherName, '\
var result = [];\
_.each(numbers, function(num) {\
result.push(num * 2);\
})'
)
);
suites.push(
Benchmark.Suite('`_.each` iterating an array with `thisArg` (slow path)')
.add(buildName, '\
var result = [];\
lodash.each(numbers, function(num, index) {\
result.push(num + this["key" + index]);\
}, object)'
)
.add(otherName, '\
var result = [];\
_.each(numbers, function(num, index) {\
result.push(num + this["key" + index]);\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.each` iterating an object')
.add(buildName, '\
var result = [];\
lodash.each(object, function(num) {\
result.push(num * 2);\
})'
)
.add(otherName, '\
var result = [];\
_.each(object, function(num) {\
result.push(num * 2);\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.every` iterating an array')
.add(buildName, '\
lodash.every(numbers, function(num) {\
return num < limit;\
})'
)
.add(otherName, '\
_.every(numbers, function(num) {\
return num < limit;\
})'
)
);
suites.push(
Benchmark.Suite('`_.every` iterating an object')
.add(buildName, '\
lodash.every(object, function(num) {\
return num < limit;\
})'
)
.add(otherName, '\
_.every(object, function(num) {\
return num < limit;\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.extend`')
.add(buildName, '\
lodash.extend({}, object)'
)
.add(otherName, '\
_.extend({}, object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.filter` iterating an array')
.add(buildName, '\
lodash.filter(numbers, function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.filter(numbers, function(num) {\
return num % 2;\
})'
)
);
suites.push(
Benchmark.Suite('`_.filter` iterating an array with `thisArg` (slow path)')
.add(buildName, '\
lodash.filter(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
.add(otherName, '\
_.filter(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.filter` iterating an object')
.add(buildName, '\
lodash.filter(object, function(num) {\
return num % 2\
})'
)
.add(otherName, '\
_.filter(object, function(num) {\
return num % 2\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.find` iterating an array')
.add(buildName, '\
lodash.find(numbers, function(num) {\
return num === (limit - 1);\
})'
)
.add(otherName, '\
_.find(numbers, function(num) {\
return num === (limit - 1);\
})'
)
);
suites.push(
Benchmark.Suite('`_.find` iterating an object')
.add(buildName, '\
lodash.find(object, function(value, key) {\
return /\D9$/.test(key);\
})'
)
.add(otherName, '\
_.find(object, function(value, key) {\
return /\D9$/.test(key);\
})'
)
);
// Avoid Underscore induced `OutOfMemoryError` in Rhino, Narwhal, and Ringo.
if (!isJava) {
suites.push(
Benchmark.Suite('`_.find` with `properties`')
.add(buildName, {
'fn': 'lodashFindWhere(objects, source)',
'teardown': 'function matches(){}'
})
.add(otherName, {
'fn': '_findWhere(objects, source)',
'teardown': 'function matches(){}'
})
);
}
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.flatten`')
.add(buildName, {
'fn': 'lodash.flatten(nestedNumbers, !lodashFlattenDeep)',
'teardown': 'function flatten(){}'
})
.add(otherName, {
'fn': '_.flatten(nestedNumbers, !_flattenDeep)',
'teardown': 'function flatten(){}'
})
);
suites.push(
Benchmark.Suite('`_.flatten` nested arrays of numbers with `isDeep`')
.add(buildName, {
'fn': 'lodash.flatten(nestedNumbers, lodashFlattenDeep)',
'teardown': 'function flatten(){}'
})
.add(otherName, {
'fn': '_.flatten(nestedNumbers, _flattenDeep)',
'teardown': 'function flatten(){}'
})
);
suites.push(
Benchmark.Suite('`_.flatten` nest arrays of objects with `isDeep`')
.add(buildName, {
'fn': 'lodash.flatten(nestedObjects, lodashFlattenDeep)',
'teardown': 'function flatten(){}'
})
.add(otherName, {
'fn': '_.flatten(nestedObjects, _flattenDeep)',
'teardown': 'function flatten(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.functions`')
.add(buildName, '\
lodash.functions(lodash)'
)
.add(otherName, '\
_.functions(lodash)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.groupBy` with `callback` iterating an array')
.add(buildName, '\
lodash.groupBy(numbers, function(num) { return num >> 1; })'
)
.add(otherName, '\
_.groupBy(numbers, function(num) { return num >> 1; })'
)
);
suites.push(
Benchmark.Suite('`_.groupBy` with `property` name iterating an array')
.add(buildName, {
'fn': 'lodash.groupBy(words, "length")',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.groupBy(words, "length")',
'teardown': 'function countBy(){}'
})
);
suites.push(
Benchmark.Suite('`_.groupBy` with `callback` iterating an object')
.add(buildName, {
'fn': 'lodash.groupBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.groupBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.include` iterating an array')
.add(buildName, '\
lodash.include(numbers, limit - 1)'
)
.add(otherName, '\
_.include(numbers, limit - 1)'
)
);
suites.push(
Benchmark.Suite('`_.include` iterating an object')
.add(buildName, '\
lodash.include(object, limit - 1)'
)
.add(otherName, '\
_.include(object, limit - 1)'
)
);
if (lodash.include('ab', 'ab') && _.include('ab', 'ab')) {
suites.push(
Benchmark.Suite('`_.include` iterating a string')
.add(buildName, '\
lodash.include(strNumbers, "," + (limit - 1))'
)
.add(otherName, '\
_.include(strNumbers, "," + (limit - 1))'
)
);
}
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.indexBy` with `callback` iterating an array')
.add(buildName, '\
lodash.indexBy(numbers, function(num) { return num >> 1; })'
)
.add(otherName, '\
_.indexBy(numbers, function(num) { return num >> 1; })'
)
);
suites.push(
Benchmark.Suite('`_.indexBy` with `property` name iterating an array')
.add(buildName, {
'fn': 'lodash.indexBy(words, "length")',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.indexBy(words, "length")',
'teardown': 'function countBy(){}'
})
);
suites.push(
Benchmark.Suite('`_.indexBy` with `callback` iterating an object')
.add(buildName, {
'fn': 'lodash.indexBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.indexBy(wordToNumber, function(num) { return num >> 1; })',
'teardown': 'function countBy(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.indexOf`')
.add(buildName, {
'fn': 'lodash.indexOf(hundredSortedValues, 99)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.indexOf(hundredSortedValues, 99)',
'teardown': 'function multiArrays(){}'
})
);
suites.push(
Benchmark.Suite('`_.indexOf` performing a binary search')
.add(buildName, {
'fn': 'lodash.indexOf(hundredSortedValues, 99, true)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.indexOf(hundredSortedValues, 99, true)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.intersection`')
.add(buildName, '\
lodash.intersection(numbers, twoNumbers, fourNumbers)'
)
.add(otherName, '\
_.intersection(numbers, twoNumbers, fourNumbers)'
)
);
suites.push(
Benchmark.Suite('`_.intersection` iterating 120 elements')
.add(buildName, {
'fn': 'lodash.intersection(hundredTwentyValues, hundredTwentyValues2)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.intersection(hundredTwentyValues, hundredTwentyValues2)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.invert`')
.add(buildName, '\
lodash.invert(object)'
)
.add(otherName, '\
_.invert(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.invoke` iterating an array')
.add(buildName, '\
lodash.invoke(numbers, "toFixed")'
)
.add(otherName, '\
_.invoke(numbers, "toFixed")'
)
);
suites.push(
Benchmark.Suite('`_.invoke` with arguments iterating an array')
.add(buildName, '\
lodash.invoke(numbers, "toFixed", 1)'
)
.add(otherName, '\
_.invoke(numbers, "toFixed", 1)'
)
);
suites.push(
Benchmark.Suite('`_.invoke` with a function for `methodName` iterating an array')
.add(buildName, '\
lodash.invoke(numbers, Number.prototype.toFixed, 1)'
)
.add(otherName, '\
_.invoke(numbers, Number.prototype.toFixed, 1)'
)
);
suites.push(
Benchmark.Suite('`_.invoke` iterating an object')
.add(buildName, '\
lodash.invoke(object, "toFixed", 1)'
)
.add(otherName, '\
_.invoke(object, "toFixed", 1)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.isEqual` comparing primitives')
.add(buildName, {
'fn': '\
lodash.isEqual(1, "1");\
lodash.isEqual(1, 1)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(1, "1");\
_.isEqual(1, 1);',
'teardown': 'function isEqual(){}'
})
);
suites.push(
Benchmark.Suite('`_.isEqual` comparing primitives and their object counterparts (edge case)')
.add(buildName, {
'fn': '\
lodash.isEqual(objectOfPrimitives, objectOfObjects);\
lodash.isEqual(objectOfPrimitives, objectOfObjects2)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(objectOfPrimitives, objectOfObjects);\
_.isEqual(objectOfPrimitives, objectOfObjects2)',
'teardown': 'function isEqual(){}'
})
);
suites.push(
Benchmark.Suite('`_.isEqual` comparing arrays')
.add(buildName, {
'fn': '\
lodash.isEqual(numbers, numbers2);\
lodash.isEqual(numbers2, numbers3)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(numbers, numbers2);\
_.isEqual(numbers2, numbers3)',
'teardown': 'function isEqual(){}'
})
);
suites.push(
Benchmark.Suite('`_.isEqual` comparing nested arrays')
.add(buildName, {
'fn': '\
lodash.isEqual(nestedNumbers, nestedNumbers2);\
lodash.isEqual(nestedNumbers2, nestedNumbers3)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(nestedNumbers, nestedNumbers2);\
_.isEqual(nestedNumbers2, nestedNumbers3)',
'teardown': 'function isEqual(){}'
})
);
suites.push(
Benchmark.Suite('`_.isEqual` comparing arrays of objects')
.add(buildName, {
'fn': '\
lodash.isEqual(objects, objects2);\
lodash.isEqual(objects2, objects3)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(objects, objects2);\
_.isEqual(objects2, objects3)',
'teardown': 'function isEqual(){}'
})
);
suites.push(
Benchmark.Suite('`_.isEqual` comparing objects')
.add(buildName, {
'fn': '\
lodash.isEqual(object, object2);\
lodash.isEqual(object2, object3)',
'teardown': 'function isEqual(){}'
})
.add(otherName, {
'fn': '\
_.isEqual(object, object2);\
_.isEqual(object2, object3)',
'teardown': 'function isEqual(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.isArguments`, `_.isDate`, `_.isFunction`, `_.isNumber`, `_.isObject`, `_.isRegExp`')
.add(buildName, '\
lodash.isArguments(arguments);\
lodash.isArguments(object);\
lodash.isDate(date);\
lodash.isDate(object);\
lodash.isFunction(lodash);\
lodash.isFunction(object);\
lodash.isNumber(1);\
lodash.isNumber(object);\
lodash.isObject(object);\
lodash.isObject(1);\
lodash.isRegExp(regexp);\
lodash.isRegExp(object)'
)
.add(otherName, '\
_.isArguments(arguments);\
_.isArguments(object);\
_.isDate(date);\
_.isDate(object);\
_.isFunction(_);\
_.isFunction(object);\
_.isNumber(1);\
_.isNumber(object);\
_.isObject(object);\
_.isObject(1);\
_.isRegExp(regexp);\
_.isRegExp(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.keys` (uses native `Object.keys` if available)')
.add(buildName, '\
lodash.keys(object)'
)
.add(otherName, '\
_.keys(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.lastIndexOf`')
.add(buildName, {
'fn': 'lodash.lastIndexOf(hundredSortedValues, 0)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.lastIndexOf(hundredSortedValues, 0)',
'teardown': 'function multiArrays(){}'
})
);
suites.push(
Benchmark.Suite('`_.lastIndexOf` performing a binary search')
.add(buildName, {
'fn': 'lodash.lastIndexOf(hundredSortedValues, 0, true)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.lastIndexOf(hundredSortedValues, 0, true)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.map` iterating an array')
.add(buildName, '\
lodash.map(objects, function(value) {\
return value.num;\
})'
)
.add(otherName, '\
_.map(objects, function(value) {\
return value.num;\
})'
)
);
suites.push(
Benchmark.Suite('`_.map` with `thisArg` iterating an array (slow path)')
.add(buildName, '\
lodash.map(objects, function(value, index) {\
return this["key" + index] + value.num;\
}, object)'
)
.add(otherName, '\
_.map(objects, function(value, index) {\
return this["key" + index] + value.num;\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.map` iterating an object')
.add(buildName, '\
lodash.map(object, function(value) {\
return value;\
})'
)
.add(otherName, '\
_.map(object, function(value) {\
return value;\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.matches` predicate')
.add(buildName, {
'fn': 'lodash.filter(objects, lodashMatch)',
'teardown': 'function matches(){}'
})
.add(otherName, {
'fn': '_.filter(objects, _match)',
'teardown': 'function matches(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.max`')
.add(buildName, '\
lodash.max(numbers)'
)
.add(otherName, '\
_.max(numbers)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.min`')
.add(buildName, '\
lodash.min(numbers)'
)
.add(otherName, '\
_.min(numbers)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.omit` iterating 20 properties, omitting 2 keys')
.add(buildName, '\
lodash.omit(object, "key6", "key13")'
)
.add(otherName, '\
_.omit(object, "key6", "key13")'
)
);
suites.push(
Benchmark.Suite('`_.omit` iterating 40 properties, omitting 20 keys')
.add(buildName, {
'fn': 'lodash.omit(wordToNumber, words)',
'teardown': 'function omit(){}'
})
.add(otherName, {
'fn': '_.omit(wordToNumber, words)',
'teardown': 'function omit(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.pairs`')
.add(buildName, '\
lodash.pairs(object)'
)
.add(otherName, '\
_.pairs(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.partial` (slow path)')
.add(buildName, {
'fn': 'lodash.partial(function(greeting) { return greeting + " " + this.name; }, "hi")',
'teardown': 'function partial(){}'
})
.add(otherName, {
'fn': '_.partial(function(greeting) { return greeting + " " + this.name; }, "hi")',
'teardown': 'function partial(){}'
})
);
suites.push(
Benchmark.Suite('partially applied call with arguments')
.add(buildName, {
'fn': 'lodashPartial("!")',
'teardown': 'function partial(){}'
})
.add(otherName, {
'fn': '_partial("!")',
'teardown': 'function partial(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.partition` iterating an array')
.add(buildName, '\
lodash.partition(numbers, function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.partition(numbers, function(num) {\
return num % 2;\
})'
)
);
suites.push(
Benchmark.Suite('`_.partition` iterating an array with `thisArg` (slow path)')
.add(buildName, '\
lodash.partition(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
.add(otherName, '\
_.partition(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.partition` iterating an object')
.add(buildName, '\
lodash.partition(object, function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.partition(object, function(num) {\
return num % 2;\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.pick`')
.add(buildName, '\
lodash.pick(object, "key6", "key13")'
)
.add(otherName, '\
_.pick(object, "key6", "key13")'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.pluck`')
.add(buildName, '\
lodash.pluck(objects, "num")'
)
.add(otherName, '\
_.pluck(objects, "num")'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.reduce` iterating an array')
.add(buildName, '\
lodash.reduce(numbers, function(result, value, index) {\
result[index] = value;\
return result;\
}, {})'
)
.add(otherName, '\
_.reduce(numbers, function(result, value, index) {\
result[index] = value;\
return result;\
}, {})'
)
);
suites.push(
Benchmark.Suite('`_.reduce` iterating an object')
.add(buildName, '\
lodash.reduce(object, function(result, value, key) {\
result.push(key, value);\
return result;\
}, [])'
)
.add(otherName, '\
_.reduce(object, function(result, value, key) {\
result.push(key, value);\
return result;\
}, [])'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.reduceRight` iterating an array')
.add(buildName, '\
lodash.reduceRight(numbers, function(result, value, index) {\
result[index] = value;\
return result;\
}, {})'
)
.add(otherName, '\
_.reduceRight(numbers, function(result, value, index) {\
result[index] = value;\
return result;\
}, {})'
)
);
suites.push(
Benchmark.Suite('`_.reduceRight` iterating an object')
.add(buildName, '\
lodash.reduceRight(object, function(result, value, key) {\
result.push(key, value);\
return result;\
}, [])'
)
.add(otherName, '\
_.reduceRight(object, function(result, value, key) {\
result.push(key, value);\
return result;\
}, [])'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.reject` iterating an array')
.add(buildName, '\
lodash.reject(numbers, function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.reject(numbers, function(num) {\
return num % 2;\
})'
)
);
suites.push(
Benchmark.Suite('`_.reject` iterating an array with `thisArg` (slow path)')
.add(buildName, '\
lodash.reject(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
.add(otherName, '\
_.reject(numbers, function(num, index) {\
return this["key" + index] % 2;\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.reject` iterating an object')
.add(buildName, '\
lodash.reject(object, function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.reject(object, function(num) {\
return num % 2;\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.sample` with an `n`')
.add(buildName, '\
lodash.sample(numbers, limit / 2)'
)
.add(otherName, '\
_.sample(numbers, limit / 2)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.shuffle`')
.add(buildName, '\
lodash.shuffle(numbers)'
)
.add(otherName, '\
_.shuffle(numbers)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.size` with an object')
.add(buildName, '\
lodash.size(object)'
)
.add(otherName, '\
_.size(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.some` iterating an array')
.add(buildName, '\
lodash.some(numbers, function(num) {\
return num == (limit - 1);\
})'
)
.add(otherName, '\
_.some(numbers, function(num) {\
return num == (limit - 1);\
})'
)
);
suites.push(
Benchmark.Suite('`_.some` with `thisArg` iterating an array (slow path)')
.add(buildName, '\
lodash.some(objects, function(value, index) {\
return this["key" + index] == (limit - 1);\
}, object)'
)
.add(otherName, '\
_.some(objects, function(value, index) {\
return this["key" + index] == (limit - 1);\
}, object)'
)
);
suites.push(
Benchmark.Suite('`_.some` iterating an object')
.add(buildName, '\
lodash.some(object, function(num) {\
return num == (limit - 1);\
})'
)
.add(otherName, '\
_.some(object, function(num) {\
return num == (limit - 1);\
})'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.sortBy` with `callback`')
.add(buildName, '\
lodash.sortBy(numbers, function(num) { return Math.sin(num); })'
)
.add(otherName, '\
_.sortBy(numbers, function(num) { return Math.sin(num); })'
)
);
suites.push(
Benchmark.Suite('`_.sortBy` with `callback` and `thisArg` (slow path)')
.add(buildName, '\
lodash.sortBy(numbers, function(num) { return this.sin(num); }, Math)'
)
.add(otherName, '\
_.sortBy(numbers, function(num) { return this.sin(num); }, Math)'
)
);
suites.push(
Benchmark.Suite('`_.sortBy` with `property` name')
.add(buildName, {
'fn': 'lodash.sortBy(words, "length")',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '_.sortBy(words, "length")',
'teardown': 'function countBy(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.sortedIndex` with `callback`')
.add(buildName, {
'fn': '\
lodash.sortedIndex(words, "twenty-five", function(value) {\
return wordToNumber[value];\
})',
'teardown': 'function countBy(){}'
})
.add(otherName, {
'fn': '\
_.sortedIndex(words, "twenty-five", function(value) {\
return wordToNumber[value];\
})',
'teardown': 'function countBy(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.template` (slow path)')
.add(buildName, {
'fn': 'lodash.template(tpl)(tplData)',
'teardown': 'function template(){}'
})
.add(otherName, {
'fn': '_.template(tpl)(tplData)',
'teardown': 'function template(){}'
})
);
suites.push(
Benchmark.Suite('compiled template')
.add(buildName, {
'fn': 'lodashTpl(tplData)',
'teardown': 'function template(){}'
})
.add(otherName, {
'fn': '_tpl(tplData)',
'teardown': 'function template(){}'
})
);
suites.push(
Benchmark.Suite('compiled template without a with-statement')
.add(buildName, {
'fn': 'lodashTplVerbose(tplData)',
'teardown': 'function template(){}'
})
.add(otherName, {
'fn': '_tplVerbose(tplData)',
'teardown': 'function template(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.times`')
.add(buildName, '\
var result = [];\
lodash.times(limit, function(n) { result.push(n); })'
)
.add(otherName, '\
var result = [];\
_.times(limit, function(n) { result.push(n); })'
)
);
suites.push(
Benchmark.Suite('`_.times` with `thisArg` (slow path)')
.add(buildName, '\
var result = [];\
lodash.times(limit, function(n) { result.push(this.sin(n)); }, Math)'
)
.add(otherName, '\
var result = [];\
_.times(limit, function(n) { result.push(this.sin(n)); }, Math)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.toArray` with an array (edge case)')
.add(buildName, '\
lodash.toArray(numbers)'
)
.add(otherName, '\
_.toArray(numbers)'
)
);
suites.push(
Benchmark.Suite('`_.toArray` with an object')
.add(buildName, '\
lodash.toArray(object)'
)
.add(otherName, '\
_.toArray(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.unescape` string without html entities')
.add(buildName, '\
lodash.unescape("`&`, `<`, `>`, `\\"`, and `\'`")'
)
.add(otherName, '\
_.unescape("`&`, `<`, `>`, `\\"`, and `\'`")'
)
);
suites.push(
Benchmark.Suite('`_.unescape` string with html entities')
.add(buildName, '\
lodash.unescape("`&`, `<`, `>`, `"`, and `'`")'
)
.add(otherName, '\
_.unescape("`&`, `<`, `>`, `"`, and `'`")'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.union`')
.add(buildName, '\
lodash.union(numbers, twoNumbers, fourNumbers)'
)
.add(otherName, '\
_.union(numbers, twoNumbers, fourNumbers)'
)
);
suites.push(
Benchmark.Suite('`_.union` iterating an array of 200 elements')
.add(buildName, {
'fn': 'lodash.union(hundredValues, hundredValues2)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.union(hundredValues, hundredValues2)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.uniq`')
.add(buildName, '\
lodash.uniq(numbers.concat(twoNumbers, fourNumbers))'
)
.add(otherName, '\
_.uniq(numbers.concat(twoNumbers, fourNumbers))'
)
);
suites.push(
Benchmark.Suite('`_.uniq` with `callback`')
.add(buildName, '\
lodash.uniq(numbers.concat(twoNumbers, fourNumbers), function(num) {\
return num % 2;\
})'
)
.add(otherName, '\
_.uniq(numbers.concat(twoNumbers, fourNumbers), function(num) {\
return num % 2;\
})'
)
);
suites.push(
Benchmark.Suite('`_.uniq` iterating an array of 200 elements')
.add(buildName, {
'fn': 'lodash.uniq(twoHundredValues)',
'teardown': 'function multiArrays(){}'
})
.add(otherName, {
'fn': '_.uniq(twoHundredValues)',
'teardown': 'function multiArrays(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.values`')
.add(buildName, '\
lodash.values(object)'
)
.add(otherName, '\
_.values(object)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.where`')
.add(buildName, {
'fn': 'lodash.where(objects, source)',
'teardown': 'function matches(){}'
})
.add(otherName, {
'fn': '_.where(objects, source)',
'teardown': 'function matches(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.without`')
.add(buildName, '\
lodash.without(numbers, 9, 12, 14, 15)'
)
.add(otherName, '\
_.without(numbers, 9, 12, 14, 15)'
)
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.wrap` result called')
.add(buildName, {
'fn': 'lodashWrapped(2, 5)',
'teardown': 'function wrap(){}'
})
.add(otherName, {
'fn': '_wrapped(2, 5)',
'teardown': 'function wrap(){}'
})
);
/*--------------------------------------------------------------------------*/
suites.push(
Benchmark.Suite('`_.zip`')
.add(buildName, {
'fn': 'lodash.zip.apply(lodash, unzipped)',
'teardown': 'function zip(){}'
})
.add(otherName, {
'fn': '_.zip.apply(_, unzipped)',
'teardown': 'function zip(){}'
})
);
/*--------------------------------------------------------------------------*/
if (Benchmark.platform + '') {
log(Benchmark.platform);
}
// Expose `run` to be called later when executing in a browser.
if (document) {
root.run = run;
} else {
run();
}
}.call(this));<|fim▁end|> | |
<|file_name|>test_util.rs<|end_file_name|><|fim▁begin|>extern crate mazth;
#[allow(unused_imports)]
use std::ops::Div;<|fim▁hole|>use self::mazth::i_comparable::IComparableError;
use self::mazth::mat::{Mat3x1, Mat4};
use implement::math::util;
#[test]
fn test_math_util(){
//look_at
{
let eye : Mat3x1<f32> = Mat3x1 { _val: [5.0,5.0,5.0] };
let center : Mat3x1<f32> = Mat3x1 { _val: [0.0,0.0,0.0] };
let up : Mat3x1<f32> = Mat3x1 { _val: [0.0,1.0,0.0] };
let lookat = util::look_at( eye, center, up );
assert!( lookat.is_equal( &Mat4{ _val: [ 0.70711, 0.0, -0.70711, 0.0,
-0.40825, 0.81650, -0.40825, 0.0,
0.57735, 0.57735, 0.57735, -8.66025,
0.0, 0.0, 0.0, 1.0 ], _is_row_major: true }, 0.0001f32 ).expect("look_at result unexpected") );
}
//perspective transform
{
let fov = 90.0;
let aspect = 1.0;
let near = 0.1;
let far = 100.0;
let persp = util::perspective( fov, aspect, near, far );
println!( "{:?}", persp );
assert!( persp.is_equal( &Mat4{ _val: [ 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, -0.2,
0.0, 0.0, -1.0, 0.0 ], _is_row_major: true }, 0.01f32 ).expect("perspective result unexpected") );
}
}<|fim▁end|> | #[allow(unused_imports)]
use std::cmp::Ordering;
|
<|file_name|>keen-tracker.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
/*jslint evil: true, regexp: true */
/*members $ref, apply, call, decycle, hasOwnProperty, length, prototype, push,
retrocycle, stringify, test, toString
*/
(function (exports) {
if (typeof exports.decycle !== 'function') {
exports.decycle = function decycle(object) {
'use strict';
var objects = [],
paths = [];
return (function derez(value, path) {
var i,
name,
nu;
switch (typeof value) {
case 'object':
if (!value) {
return null;
}
for (i = 0; i < objects.length; i += 1) {
if (objects[i] === value) {
return {$ref: paths[i]};
}
}
objects.push(value);
paths.push(path);
if (Object.prototype.toString.apply(value) === '[object Array]') {
nu = [];
for (i = 0; i < value.length; i += 1) {
nu[i] = derez(value[i], path + '[' + i + ']');
}
} else {
nu = {};
for (name in value) {
if (Object.prototype.hasOwnProperty.call(value, name)) {
nu[name] = derez(value[name],
path + '[' + JSON.stringify(name) + ']');
}
}
}
return nu;
case 'number':
case 'string':
case 'boolean':
return value;
}
}(object, '$'));
};
}
if (typeof exports.retrocycle !== 'function') {
exports.retrocycle = function retrocycle($) {
'use strict';
var px =
/^\$(?:\[(?:\d+|\"(?:[^\\\"\u0000-\u001f]|\\([\\\"\/bfnrt]|u[0-9a-zA-Z]{4}))*\")\])*$/;
(function rez(value) {
var i, item, name, path;
if (value && typeof value === 'object') {
if (Object.prototype.toString.apply(value) === '[object Array]') {
for (i = 0; i < value.length; i += 1) {
item = value[i];
if (item && typeof item === 'object') {
path = item.$ref;
if (typeof path === 'string' && px.test(path)) {
value[i] = eval(path);
} else {
rez(item);
}
}
}
} else {
for (name in value) {
if (typeof value[name] === 'object') {
item = value[name];
if (item) {
path = item.$ref;
if (typeof path === 'string' && px.test(path)) {
value[name] = eval(path);
} else {
rez(item);
}
}
}
}
}
}
}($));
return $;
};
}
}) (
(typeof exports !== 'undefined') ?
exports :
(window.JSON ?
(window.JSON) :
(window.JSON = {})
)
);
},{}],2:[function(require,module,exports){
var JSON2 = require('./json2');
var cycle = require('./cycle');
JSON2.decycle = cycle.decycle;
JSON2.retrocycle = cycle.retrocycle;
module.exports = JSON2;
},{"./cycle":1,"./json2":3}],3:[function(require,module,exports){
/*
json2.js
2011-10-19
Public Domain.
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
See http://www.JSON.org/js.html
This code should be minified before deployment.
See http://javascript.crockford.com/jsmin.html
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
NOT CONTROL.
This file creates a global JSON object containing two methods: stringify
and parse.
JSON.stringify(value, replacer, space)
value any JavaScript value, usually an object or array.
replacer an optional parameter that determines how object
values are stringified for objects. It can be a
function or an array of strings.
space an optional parameter that specifies the indentation
of nested structures. If it is omitted, the text will
be packed without extra whitespace. If it is a number,
it will specify the number of spaces to indent at each
level. If it is a string (such as '\t' or ' '),
it contains the characters used to indent at each level.
This method produces a JSON text from a JavaScript value.
When an object value is found, if the object contains a toJSON
method, its toJSON method will be called and the result will be
stringified. A toJSON method does not serialize: it returns the
value represented by the name/value pair that should be serialized,
or undefined if nothing should be serialized. The toJSON method
will be passed the key associated with the value, and this will be
bound to the value
For example, this would serialize Dates as ISO strings.
Date.prototype.toJSON = function (key) {
function f(n) {
return n < 10 ? '0' + n : n;
}
return this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z';
};
You can provide an optional replacer method. It will be passed the
key and value of each member, with this bound to the containing
object. The value that is returned from your method will be
serialized. If your method returns undefined, then the member will
be excluded from the serialization.
If the replacer parameter is an array of strings, then it will be
used to select the members to be serialized. It filters the results
such that only members with keys listed in the replacer array are
stringified.
Values that do not have JSON representations, such as undefined or
functions, will not be serialized. Such values in objects will be
dropped; in arrays they will be replaced with null. You can use
a replacer function to replace those with JSON values.
JSON.stringify(undefined) returns undefined.
The optional space parameter produces a stringification of the
value that is filled with line breaks and indentation to make it
easier to read.
If the space parameter is a non-empty string, then that string will
be used for indentation. If the space parameter is a number, then
the indentation will be that many spaces.
Example:
text = JSON.stringify(['e', {pluribus: 'unum'}]);
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
text = JSON.stringify([new Date()], function (key, value) {
return this[key] instanceof Date ?
'Date(' + this[key] + ')' : value;
});
JSON.parse(text, reviver)
This method parses a JSON text to produce an object or array.
It can throw a SyntaxError exception.
The optional reviver parameter is a function that can filter and
transform the results. It receives each of the keys and values,
and its return value is used instead of the original value.
If it returns what it received, then the structure is not modified.
If it returns undefined then the member is deleted.
Example:
myData = JSON.parse(text, function (key, value) {
var a;
if (typeof value === 'string') {
a =
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
if (a) {
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+a[5], +a[6]));
}
}
return value;
});
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
var d;
if (typeof value === 'string' &&
value.slice(0, 5) === 'Date(' &&
value.slice(-1) === ')') {
d = new Date(value.slice(5, -1));
if (d) {
return d;
}
}
return value;
});
This is a reference implementation. You are free to copy, modify, or
redistribute.
*/
/*jslint evil: true, regexp: true */
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
lastIndex, length, parse, prototype, push, replace, slice, stringify,
test, toJSON, toString, valueOf
*/
(function (JSON) {
'use strict';
function f(n) {
return n < 10 ? '0' + n : n;
}
/* DDOPSON-2012-04-16 - mutating global prototypes is NOT allowed for a well-behaved module.
* It's also unneeded, since Date already defines toJSON() to the same ISOwhatever format below
* Thus, we skip this logic for the CommonJS case where 'exports' is defined
*/
if (typeof exports === 'undefined') {
if (typeof Date.prototype.toJSON !== 'function') {
Date.prototype.toJSON = function (key) {
return isFinite(this.valueOf())
? this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z'
: null;
};
}
if (typeof String.prototype.toJSON !== 'function') {
String.prototype.toJSON = function (key) { return this.valueOf(); };
}
if (typeof Number.prototype.toJSON !== 'function') {
Number.prototype.toJSON = function (key) { return this.valueOf(); };
}
if (typeof Boolean.prototype.toJSON !== 'function') {
Boolean.prototype.toJSON = function (key) { return this.valueOf(); };
}
}
var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
gap,
indent,
meta = {
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'"' : '\\"',
'\\': '\\\\'
},
rep;
function quote(string) {
escapable.lastIndex = 0;
return escapable.test(string) ? '"' + string.replace(escapable, function (a) {
var c = meta[a];
return typeof c === 'string'
? c
: '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
}) + '"' : '"' + string + '"';
}
function str(key, holder) {
var i,
k,
v,
length,
mind = gap,
partial,
value = holder[key];
if (value && typeof value === 'object' &&
typeof value.toJSON === 'function') {
value = value.toJSON(key);
}
if (typeof rep === 'function') {
value = rep.call(holder, key, value);
}
switch (typeof value) {
case 'string':
return quote(value);
case 'number':
return isFinite(value) ? String(value) : 'null';
case 'boolean':
case 'null':
return String(value);
case 'object':
if (!value) {
return 'null';
}
gap += indent;
partial = [];
if (Object.prototype.toString.apply(value) === '[object Array]') {
length = value.length;
for (i = 0; i < length; i += 1) {
partial[i] = str(i, value) || 'null';
}
v = partial.length === 0
? '[]'
: gap
? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']'
: '[' + partial.join(',') + ']';
gap = mind;
return v;
}
if (rep && typeof rep === 'object') {
length = rep.length;
for (i = 0; i < length; i += 1) {
if (typeof rep[i] === 'string') {
k = rep[i];
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
} else {
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
}
v = partial.length === 0
? '{}'
: gap
? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}'
: '{' + partial.join(',') + '}';
gap = mind;
return v;
}
}
if (typeof JSON.stringify !== 'function') {
JSON.stringify = function (value, replacer, space) {
var i;
gap = '';
indent = '';
if (typeof space === 'number') {
for (i = 0; i < space; i += 1) {
indent += ' ';
}
} else if (typeof space === 'string') {
indent = space;
}
rep = replacer;
if (replacer && typeof replacer !== 'function' &&
(typeof replacer !== 'object' ||
typeof replacer.length !== 'number')) {
throw new Error('JSON.stringify');
}
return str('', {'': value});
};
}
if (typeof JSON.parse !== 'function') {
JSON.parse = function (text, reviver) {
var j;
function walk(holder, key) {
var k, v, value = holder[key];
if (value && typeof value === 'object') {
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = walk(value, k);
if (v !== undefined) {
value[k] = v;
} else {
delete value[k];
}
}
}
}
return reviver.call(holder, key, value);
}
text = String(text);
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
return '\\u' +
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
});
}
if (/^[\],:{}\s]*$/
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
j = eval('(' + text + ')');
return typeof reviver === 'function'
? walk({'': j}, '')
: j;
}
throw new SyntaxError('JSON.parse');
};
}
})(
(typeof exports !== 'undefined') ?
exports :
(window.JSON ?
(window.JSON) :
(window.JSON = {})
)
);
},{}],4:[function(require,module,exports){
/**
* Expose `Emitter`.
*/
module.exports = Emitter;
/**
* Initialize a new `Emitter`.
*
* @api public
*/
function Emitter(obj) {
if (obj) return mixin(obj);
};
/**
* Mixin the emitter properties.
*
* @param {Object} obj
* @return {Object}
* @api private
*/
function mixin(obj) {
for (var key in Emitter.prototype) {
obj[key] = Emitter.prototype[key];
}
return obj;
}
/**
* Listen on the given `event` with `fn`.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.on =
Emitter.prototype.addEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
(this._callbacks[event] = this._callbacks[event] || [])
.push(fn);
return this;
};
/**
* Adds an `event` listener that will be invoked a single
* time then automatically removed.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.once = function(event, fn){
var self = this;
this._callbacks = this._callbacks || {};
function on() {
self.off(event, on);
fn.apply(this, arguments);
}
on.fn = fn;
this.on(event, on);
return this;
};
/**
* Remove the given callback for `event` or all
* registered callbacks.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.off =
Emitter.prototype.removeListener =
Emitter.prototype.removeAllListeners =
Emitter.prototype.removeEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
if (0 == arguments.length) {
this._callbacks = {};
return this;
}
var callbacks = this._callbacks[event];
if (!callbacks) return this;
if (1 == arguments.length) {
delete this._callbacks[event];
return this;
}
var cb;
for (var i = 0; i < callbacks.length; i++) {
cb = callbacks[i];
if (cb === fn || cb.fn === fn) {
callbacks.splice(i, 1);
break;
}
}
return this;
};
/**
* Emit `event` with the given args.
*
* @param {String} event
* @param {Mixed} ...
* @return {Emitter}
*/
Emitter.prototype.emit = function(event){
this._callbacks = this._callbacks || {};
var args = [].slice.call(arguments, 1)
, callbacks = this._callbacks[event];
if (callbacks) {
callbacks = callbacks.slice(0);
for (var i = 0, len = callbacks.length; i < len; ++i) {
callbacks[i].apply(this, args);
}
}
return this;
};
/**
* Return array of callbacks for `event`.
*
* @param {String} event
* @return {Array}
* @api public
*/
Emitter.prototype.listeners = function(event){
this._callbacks = this._callbacks || {};
return this._callbacks[event] || [];
};
/**
* Check if this emitter has `event` handlers.
*
* @param {String} event
* @return {Boolean}
* @api public
*/
Emitter.prototype.hasListeners = function(event){
return !! this.listeners(event).length;
};
},{}],5:[function(require,module,exports){
/*!
* domready (c) Dustin Diaz 2012 - License MIT
*/
!function (name, definition) {
if (typeof module != 'undefined') module.exports = definition()
else if (typeof define == 'function' && typeof define.amd == 'object') define(definition)
else this[name] = definition()
}('domready', function (ready) {
var fns = [], fn, f = false
, doc = document
, testEl = doc.documentElement
, hack = testEl.doScroll
, domContentLoaded = 'DOMContentLoaded'
, addEventListener = 'addEventListener'
, onreadystatechange = 'onreadystatechange'
, readyState = 'readyState'
, loadedRgx = hack ? /^loaded|^c/ : /^loaded|c/
, loaded = loadedRgx.test(doc[readyState])
function flush(f) {
loaded = 1
while (f = fns.shift()) f()
}
doc[addEventListener] && doc[addEventListener](domContentLoaded, fn = function () {
doc.removeEventListener(domContentLoaded, fn, f)
flush()
}, f)
hack && doc.attachEvent(onreadystatechange, fn = function () {
if (/^c/.test(doc[readyState])) {
doc.detachEvent(onreadystatechange, fn)
flush()
}
})
return (ready = hack ?
function (fn) {
self != top ?
loaded ? fn() : fns.push(fn) :
function () {
try {
testEl.doScroll('left')
} catch (e) {
return setTimeout(function() { ready(fn) }, 50)
}
fn()
}()
} :
function (fn) {
loaded ? fn() : fns.push(fn)
})
})
},{}],6:[function(require,module,exports){
/**
* Module dependencies.
*/
var Emitter = require('emitter');
var reduce = require('reduce');
/**
* Root reference for iframes.
*/
var root = 'undefined' == typeof window
? this
: window;
/**
* Noop.
*/
function noop(){};
/**
* Check if `obj` is a host object,
* we don't want to serialize these :)
*
* TODO: future proof, move to compoent land
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isHost(obj) {
var str = {}.toString.call(obj);
switch (str) {
case '[object File]':
case '[object Blob]':
case '[object FormData]':
return true;
default:
return false;
}
}
/**
* Determine XHR.
*/
function getXHR() {
if (root.XMLHttpRequest
&& ('file:' != root.location.protocol || !root.ActiveXObject)) {
return new XMLHttpRequest;
} else {
try { return new ActiveXObject('Microsoft.XMLHTTP'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.6.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.3.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP'); } catch(e) {}
}
return false;
}
/**
* Removes leading and trailing whitespace, added to support IE.
*
* @param {String} s
* @return {String}
* @api private
*/
var trim = ''.trim
? function(s) { return s.trim(); }
: function(s) { return s.replace(/(^\s*|\s*$)/g, ''); };
/**
* Check if `obj` is an object.
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isObject(obj) {
return obj === Object(obj);
}
/**
* Serialize the given `obj`.
*
* @param {Object} obj
* @return {String}
* @api private
*/
function serialize(obj) {
if (!isObject(obj)) return obj;
var pairs = [];
for (var key in obj) {
if (null != obj[key]) {
pairs.push(encodeURIComponent(key)
+ '=' + encodeURIComponent(obj[key]));
}
}
return pairs.join('&');
}
/**
* Expose serialization method.
*/
request.serializeObject = serialize;
/**
* Parse the given x-www-form-urlencoded `str`.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseString(str) {
var obj = {};
var pairs = str.split('&');
var parts;
var pair;
for (var i = 0, len = pairs.length; i < len; ++i) {
pair = pairs[i];
parts = pair.split('=');
obj[decodeURIComponent(parts[0])] = decodeURIComponent(parts[1]);
}
return obj;
}
/**
* Expose parser.
*/
request.parseString = parseString;
/**
* Default MIME type map.
*
* superagent.types.xml = 'application/xml';
*
*/
request.types = {
html: 'text/html',
json: 'application/json',
xml: 'application/xml',
urlencoded: 'application/x-www-form-urlencoded',
'form': 'application/x-www-form-urlencoded',
'form-data': 'application/x-www-form-urlencoded'
};
/**
* Default serialization map.
*
* superagent.serialize['application/xml'] = function(obj){
* return 'generated xml here';
* };
*
*/
request.serialize = {
'application/x-www-form-urlencoded': serialize,
'application/json': JSON.stringify
};
/**
* Default parsers.
*
* superagent.parse['application/xml'] = function(str){
* return { object parsed from str };
* };
*
*/
request.parse = {
'application/x-www-form-urlencoded': parseString,
'application/json': JSON.parse
};
/**
* Parse the given header `str` into
* an object containing the mapped fields.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseHeader(str) {
var lines = str.split(/\r?\n/);
var fields = {};
var index;
var line;
var field;
var val;
lines.pop();
for (var i = 0, len = lines.length; i < len; ++i) {
line = lines[i];
index = line.indexOf(':');
field = line.slice(0, index).toLowerCase();
val = trim(line.slice(index + 1));
fields[field] = val;
}
return fields;
}
/**
* Return the mime type for the given `str`.
*
* @param {String} str
* @return {String}
* @api private
*/
function type(str){
return str.split(/ *; */).shift();
};
/**
* Return header field parameters.
*
* @param {String} str
* @return {Object}
* @api private
*/
function params(str){
return reduce(str.split(/ *; */), function(obj, str){
var parts = str.split(/ *= */)
, key = parts.shift()
, val = parts.shift();
if (key && val) obj[key] = val;
return obj;
}, {});
};
/**
* Initialize a new `Response` with the given `xhr`.
*
* - set flags (.ok, .error, etc)
* - parse header
*
* Examples:
*
* Aliasing `superagent` as `request` is nice:
*
* request = superagent;
*
* We can use the promise-like API, or pass callbacks:
*
* request.get('/').end(function(res){});
* request.get('/', function(res){});
*
* Sending data can be chained:
*
* request
* .post('/user')
* .send({ name: 'tj' })
* .end(function(res){});
*
* Or passed to `.send()`:
*
* request
* .post('/user')
* .send({ name: 'tj' }, function(res){});
*
* Or passed to `.post()`:
*
* request
* .post('/user', { name: 'tj' })
* .end(function(res){});
*
* Or further reduced to a single call for simple cases:
*
* request
* .post('/user', { name: 'tj' }, function(res){});
*
* @param {XMLHTTPRequest} xhr
* @param {Object} options
* @api private
*/
function Response(req, options) {
options = options || {};
this.req = req;
this.xhr = this.req.xhr;
this.text = this.req.method !='HEAD'
? this.xhr.responseText
: null;
this.setStatusProperties(this.xhr.status);
this.header = this.headers = parseHeader(this.xhr.getAllResponseHeaders());
this.header['content-type'] = this.xhr.getResponseHeader('content-type');
this.setHeaderProperties(this.header);
this.body = this.req.method != 'HEAD'
? this.parseBody(this.text)
: null;
}
/**
* Get case-insensitive `field` value.
*
* @param {String} field
* @return {String}
* @api public
*/
Response.prototype.get = function(field){
return this.header[field.toLowerCase()];
};
/**
* Set header related properties:
*
* - `.type` the content type without params
*
* A response of "Content-Type: text/plain; charset=utf-8"
* will provide you with a `.type` of "text/plain".
*
* @param {Object} header
* @api private
*/
Response.prototype.setHeaderProperties = function(header){
var ct = this.header['content-type'] || '';
this.type = type(ct);
var obj = params(ct);
for (var key in obj) this[key] = obj[key];
};
/**
* Parse the given body `str`.
*
* Used for auto-parsing of bodies. Parsers
* are defined on the `superagent.parse` object.
*
* @param {String} str
* @return {Mixed}
* @api private
*/
Response.prototype.parseBody = function(str){
var parse = request.parse[this.type];
return parse && str && str.length
? parse(str)
: null;
};
/**
* Set flags such as `.ok` based on `status`.
*
* For example a 2xx response will give you a `.ok` of __true__
* whereas 5xx will be __false__ and `.error` will be __true__. The
* `.clientError` and `.serverError` are also available to be more
* specific, and `.statusType` is the class of error ranging from 1..5
* sometimes useful for mapping respond colors etc.
*
* "sugar" properties are also defined for common cases. Currently providing:
*
* - .noContent
* - .badRequest
* - .unauthorized
* - .notAcceptable
* - .notFound
*
* @param {Number} status
* @api private
*/
Response.prototype.setStatusProperties = function(status){
var type = status / 100 | 0;
this.status = status;
this.statusType = type;
this.info = 1 == type;
this.ok = 2 == type;
this.clientError = 4 == type;
this.serverError = 5 == type;
this.error = (4 == type || 5 == type)
? this.toError()
: false;
this.accepted = 202 == status;
this.noContent = 204 == status || 1223 == status;
this.badRequest = 400 == status;
this.unauthorized = 401 == status;
this.notAcceptable = 406 == status;
this.notFound = 404 == status;
this.forbidden = 403 == status;
};
/**
* Return an `Error` representative of this response.
*
* @return {Error}
* @api public
*/
Response.prototype.toError = function(){
var req = this.req;
var method = req.method;
var url = req.url;
var msg = 'cannot ' + method + ' ' + url + ' (' + this.status + ')';
var err = new Error(msg);
err.status = this.status;
err.method = method;
err.url = url;
return err;
};
/**
* Expose `Response`.
*/
request.Response = Response;
/**
* Initialize a new `Request` with the given `method` and `url`.
*
* @param {String} method
* @param {String} url
* @api public
*/
function Request(method, url) {
var self = this;
Emitter.call(this);
this._query = this._query || [];
this.method = method;
this.url = url;
this.header = {};
this._header = {};
this.on('end', function(){
var err = null;
var res = null;
try {
res = new Response(self);
} catch(e) {
err = new Error('Parser is unable to parse the response');
err.parse = true;
err.original = e;
}
self.callback(err, res);
});
}
/**
* Mixin `Emitter`.
*/
Emitter(Request.prototype);
/**
* Allow for extension
*/
Request.prototype.use = function(fn) {
fn(this);
return this;
}
/**
* Set timeout to `ms`.
*
* @param {Number} ms
* @return {Request} for chaining
* @api public
*/
Request.prototype.timeout = function(ms){
this._timeout = ms;
return this;
};
/**
* Clear previous timeout.
*
* @return {Request} for chaining
* @api public
*/
Request.prototype.clearTimeout = function(){
this._timeout = 0;
clearTimeout(this._timer);
return this;
};
/**
* Abort the request, and clear potential timeout.
*
* @return {Request}
* @api public
*/
Request.prototype.abort = function(){
if (this.aborted) return;
this.aborted = true;
this.xhr.abort();
this.clearTimeout();
this.emit('abort');
return this;<|fim▁hole|>};
/**
* Set header `field` to `val`, or multiple fields with one object.
*
* Examples:
*
* req.get('/')
* .set('Accept', 'application/json')
* .set('X-API-Key', 'foobar')
* .end(callback);
*
* req.get('/')
* .set({ Accept: 'application/json', 'X-API-Key': 'foobar' })
* .end(callback);
*
* @param {String|Object} field
* @param {String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.set = function(field, val){
if (isObject(field)) {
for (var key in field) {
this.set(key, field[key]);
}
return this;
}
this._header[field.toLowerCase()] = val;
this.header[field] = val;
return this;
};
/**
* Remove header `field`.
*
* Example:
*
* req.get('/')
* .unset('User-Agent')
* .end(callback);
*
* @param {String} field
* @return {Request} for chaining
* @api public
*/
Request.prototype.unset = function(field){
delete this._header[field.toLowerCase()];
delete this.header[field];
return this;
};
/**
* Get case-insensitive header `field` value.
*
* @param {String} field
* @return {String}
* @api private
*/
Request.prototype.getHeader = function(field){
return this._header[field.toLowerCase()];
};
/**
* Set Content-Type to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.xml = 'application/xml';
*
* request.post('/')
* .type('xml')
* .send(xmlstring)
* .end(callback);
*
* request.post('/')
* .type('application/xml')
* .send(xmlstring)
* .end(callback);
*
* @param {String} type
* @return {Request} for chaining
* @api public
*/
Request.prototype.type = function(type){
this.set('Content-Type', request.types[type] || type);
return this;
};
/**
* Set Accept to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.json = 'application/json';
*
* request.get('/agent')
* .accept('json')
* .end(callback);
*
* request.get('/agent')
* .accept('application/json')
* .end(callback);
*
* @param {String} accept
* @return {Request} for chaining
* @api public
*/
Request.prototype.accept = function(type){
this.set('Accept', request.types[type] || type);
return this;
};
/**
* Set Authorization field value with `user` and `pass`.
*
* @param {String} user
* @param {String} pass
* @return {Request} for chaining
* @api public
*/
Request.prototype.auth = function(user, pass){
var str = btoa(user + ':' + pass);
this.set('Authorization', 'Basic ' + str);
return this;
};
/**
* Add query-string `val`.
*
* Examples:
*
* request.get('/shoes')
* .query('size=10')
* .query({ color: 'blue' })
*
* @param {Object|String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.query = function(val){
if ('string' != typeof val) val = serialize(val);
if (val) this._query.push(val);
return this;
};
/**
* Write the field `name` and `val` for "multipart/form-data"
* request bodies.
*
* ``` js
* request.post('/upload')
* .field('foo', 'bar')
* .end(callback);
* ```
*
* @param {String} name
* @param {String|Blob|File} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.field = function(name, val){
if (!this._formData) this._formData = new FormData();
this._formData.append(name, val);
return this;
};
/**
* Queue the given `file` as an attachment to the specified `field`,
* with optional `filename`.
*
* ``` js
* request.post('/upload')
* .attach(new Blob(['<a id="a"><b id="b">hey!</b></a>'], { type: "text/html"}))
* .end(callback);
* ```
*
* @param {String} field
* @param {Blob|File} file
* @param {String} filename
* @return {Request} for chaining
* @api public
*/
Request.prototype.attach = function(field, file, filename){
if (!this._formData) this._formData = new FormData();
this._formData.append(field, file, filename);
return this;
};
/**
* Send `data`, defaulting the `.type()` to "json" when
* an object is given.
*
* Examples:
*
*
* request.get('/search')
* .end(callback)
*
*
* request.get('/search')
* .send({ search: 'query' })
* .send({ range: '1..5' })
* .send({ order: 'desc' })
* .end(callback)
*
*
* request.post('/user')
* .type('json')
* .send('{"name":"tj"})
* .end(callback)
*
*
* request.post('/user')
* .send({ name: 'tj' })
* .end(callback)
*
*
* request.post('/user')
* .type('form')
* .send('name=tj')
* .end(callback)
*
*
* request.post('/user')
* .type('form')
* .send({ name: 'tj' })
* .end(callback)
*
*
* request.post('/user')
* .send('name=tobi')
* .send('species=ferret')
* .end(callback)
*
* @param {String|Object} data
* @return {Request} for chaining
* @api public
*/
Request.prototype.send = function(data){
var obj = isObject(data);
var type = this.getHeader('Content-Type');
if (obj && isObject(this._data)) {
for (var key in data) {
this._data[key] = data[key];
}
} else if ('string' == typeof data) {
if (!type) this.type('form');
type = this.getHeader('Content-Type');
if ('application/x-www-form-urlencoded' == type) {
this._data = this._data
? this._data + '&' + data
: data;
} else {
this._data = (this._data || '') + data;
}
} else {
this._data = data;
}
if (!obj) return this;
if (!type) this.type('json');
return this;
};
/**
* Invoke the callback with `err` and `res`
* and handle arity check.
*
* @param {Error} err
* @param {Response} res
* @api private
*/
Request.prototype.callback = function(err, res){
var fn = this._callback;
this.clearTimeout();
if (2 == fn.length) return fn(err, res);
if (err) return this.emit('error', err);
fn(res);
};
/**
* Invoke callback with x-domain error.
*
* @api private
*/
Request.prototype.crossDomainError = function(){
var err = new Error('Origin is not allowed by Access-Control-Allow-Origin');
err.crossDomain = true;
this.callback(err);
};
/**
* Invoke callback with timeout error.
*
* @api private
*/
Request.prototype.timeoutError = function(){
var timeout = this._timeout;
var err = new Error('timeout of ' + timeout + 'ms exceeded');
err.timeout = timeout;
this.callback(err);
};
/**
* Enable transmission of cookies with x-domain requests.
*
* Note that for this to work the origin must not be
* using "Access-Control-Allow-Origin" with a wildcard,
* and also must set "Access-Control-Allow-Credentials"
* to "true".
*
* @api public
*/
Request.prototype.withCredentials = function(){
this._withCredentials = true;
return this;
};
/**
* Initiate request, invoking callback `fn(res)`
* with an instanceof `Response`.
*
* @param {Function} fn
* @return {Request} for chaining
* @api public
*/
Request.prototype.end = function(fn){
var self = this;
var xhr = this.xhr = getXHR();
var query = this._query.join('&');
var timeout = this._timeout;
var data = this._formData || this._data;
this._callback = fn || noop;
xhr.onreadystatechange = function(){
if (4 != xhr.readyState) return;
if (0 == xhr.status) {
if (self.aborted) return self.timeoutError();
return self.crossDomainError();
}
self.emit('end');
};
if (xhr.upload) {
xhr.upload.onprogress = function(e){
e.percent = e.loaded / e.total * 100;
self.emit('progress', e);
};
}
if (timeout && !this._timer) {
this._timer = setTimeout(function(){
self.abort();
}, timeout);
}
if (query) {
query = request.serializeObject(query);
this.url += ~this.url.indexOf('?')
? '&' + query
: '?' + query;
}
xhr.open(this.method, this.url, true);
if (this._withCredentials) xhr.withCredentials = true;
if ('GET' != this.method && 'HEAD' != this.method && 'string' != typeof data && !isHost(data)) {
var serialize = request.serialize[this.getHeader('Content-Type')];
if (serialize) data = serialize(data);
}
for (var field in this.header) {
if (null == this.header[field]) continue;
xhr.setRequestHeader(field, this.header[field]);
}
this.emit('request', this);
xhr.send(data);
return this;
};
/**
* Expose `Request`.
*/
request.Request = Request;
/**
* Issue a request:
*
* Examples:
*
* request('GET', '/users').end(callback)
* request('/users').end(callback)
* request('/users', callback)
*
* @param {String} method
* @param {String|Function} url or callback
* @return {Request}
* @api public
*/
function request(method, url) {
if ('function' == typeof url) {
return new Request('GET', method).end(url);
}
if (1 == arguments.length) {
return new Request('GET', method);
}
return new Request(method, url);
}
/**
* GET `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.get = function(url, data, fn){
var req = request('GET', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.query(data);
if (fn) req.end(fn);
return req;
};
/**
* HEAD `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.head = function(url, data, fn){
var req = request('HEAD', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* DELETE `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Function} fn
* @return {Request}
* @api public
*/
request.del = function(url, fn){
var req = request('DELETE', url);
if (fn) req.end(fn);
return req;
};
/**
* PATCH `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.patch = function(url, data, fn){
var req = request('PATCH', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* POST `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.post = function(url, data, fn){
var req = request('POST', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* PUT `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.put = function(url, data, fn){
var req = request('PUT', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* Expose `request`.
*/
module.exports = request;
},{"emitter":7,"reduce":8}],7:[function(require,module,exports){
arguments[4][4][0].apply(exports,arguments)
},{"dup":4}],8:[function(require,module,exports){
/**
* Reduce `arr` with `fn`.
*
* @param {Array} arr
* @param {Function} fn
* @param {Mixed} initial
*
* TODO: combatible error handling?
*/
module.exports = function(arr, fn, initial){
var idx = 0;
var len = arr.length;
var curr = arguments.length == 3
? initial
: arr[idx++];
while (idx < len) {
curr = fn.call(null, curr, arr[idx], ++idx, arr);
}
return curr;
};
},{}],9:[function(require,module,exports){
var Keen = require("./index"),
each = require("./utils/each");
module.exports = function(){
var loaded = window['Keen'] || null,
cached = window['_' + 'Keen'] || null,
clients,
ready;
if (loaded && cached) {
clients = cached['clients'] || {},
ready = cached['ready'] || [];
each(clients, function(client, id){
each(Keen.prototype, function(method, key){
loaded.prototype[key] = method;
});
each(["Query", "Request", "Dataset", "Dataviz"], function(name){
loaded[name] = (Keen[name]) ? Keen[name] : function(){};
});
if (client._config) {
client.configure.call(client, client._config);
}
if (client._setGlobalProperties) {
each(client._setGlobalProperties, function(fn){
client.setGlobalProperties.apply(client, fn);
});
}
if (client._addEvent) {
each(client._addEvent, function(obj){
client.addEvent.apply(client, obj);
});
}
var callback = client._on || [];
if (client._on) {
each(client._on, function(obj){
client.on.apply(client, obj);
});
client.trigger('ready');
}
each(["_config", "_setGlobalProperties", "_addEvent", "_on"], function(name){
if (client[name]) {
client[name] = undefined;
try{
delete client[name];
} catch(e){}
}
});
});
each(ready, function(cb, i){
Keen.once("ready", cb);
});
}
window['_' + 'Keen'] = undefined;
try {
delete window['_' + 'Keen']
} catch(e) {}
};
},{"./index":17,"./utils/each":23}],10:[function(require,module,exports){
var Emitter = require('component-emitter');
Emitter.prototype.trigger = Emitter.prototype.emit;
module.exports = Emitter;
},{"component-emitter":4}],11:[function(require,module,exports){
module.exports = function(){
return "undefined" == typeof window ? "server" : "browser";
};
},{}],12:[function(require,module,exports){
var each = require('../utils/each'),
JSON2 = require('JSON2');
module.exports = function(params){
var query = [];
each(params, function(value, key){
if ('string' !== typeof value) {
value = JSON2.stringify(value);
}
query.push(key + '=' + encodeURIComponent(value));
});
return '?' + query.join('&');
};
},{"../utils/each":23,"JSON2":2}],13:[function(require,module,exports){
module.exports = function(){
if ("undefined" !== typeof window) {
if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) {
return 2000;
}
}
return 16000;
};
},{}],14:[function(require,module,exports){
module.exports = function() {
var root = "undefined" == typeof window ? this : window;
if (root.XMLHttpRequest && ("file:" != root.location.protocol || !root.ActiveXObject)) {
return new XMLHttpRequest;
} else {
try { return new ActiveXObject("Microsoft.XMLHTTP"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP.6.0"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP.3.0"); } catch(e) {}
try { return new ActiveXObject("Msxml2.XMLHTTP"); } catch(e) {}
}
return false;
};
},{}],15:[function(require,module,exports){
module.exports = function(err, res, callback) {
var cb = callback || function() {};
if (res && !res.ok) {
var is_err = res.body && res.body.error_code;
err = new Error(is_err ? res.body.message : 'Unknown error occurred');
err.code = is_err ? res.body.error_code : 'UnknownError';
}
if (err) {
cb(err, null);
}
else {
cb(null, res.body);
}
return;
};
},{}],16:[function(require,module,exports){
var superagent = require('superagent');
var each = require('../utils/each'),
getXHR = require('./get-xhr-object');
module.exports = function(type, opts){
return function(request) {
var __super__ = request.constructor.prototype.end;
if ( 'undefined' === typeof window ) return;
request.requestType = request.requestType || {};
request.requestType['type'] = type;
request.requestType['options'] = request.requestType['options'] || {
async: true,
success: {
responseText: '{ "created": true }',
status: 201
},
error: {
responseText: '{ "error_code": "ERROR", "message": "Request failed" }',
status: 404
}
};
if (opts) {
if ( 'boolean' === typeof opts.async ) {
request.requestType['options'].async = opts.async;
}
if ( opts.success ) {
extend(request.requestType['options'].success, opts.success);
}
if ( opts.error ) {
extend(request.requestType['options'].error, opts.error);
}
}
request.end = function(fn){
var self = this,
reqType = (this.requestType) ? this.requestType['type'] : 'xhr',
query,
timeout;
if ( ('GET' !== self['method'] || 'xhr' === reqType) && self.requestType['options'].async ) {
__super__.call(self, fn);
return;
}
query = self._query.join('&');
timeout = self._timeout;
self._callback = fn || noop;
if (timeout && !self._timer) {
self._timer = setTimeout(function(){
abortRequest.call(self);
}, timeout);
}
if (query) {
query = superagent.serializeObject(query);
self.url += ~self.url.indexOf('?') ? '&' + query : '?' + query;
}
self.emit('request', self);
if ( !self.requestType['options'].async ) {
sendXhrSync.call(self);
}
else if ( 'jsonp' === reqType ) {
sendJsonp.call(self);
}
else if ( 'beacon' === reqType ) {
sendBeacon.call(self);
}
return self;
};
return request;
};
};
function sendXhrSync(){
var xhr = getXHR();
if (xhr) {
xhr.open('GET', this.url, false);
xhr.send(null);
}
return this;
}
function sendJsonp(){
var self = this,
timestamp = new Date().getTime(),
script = document.createElement('script'),
parent = document.getElementsByTagName('head')[0],
callbackName = 'keenJSONPCallback',
loaded = false;
callbackName += timestamp;
while (callbackName in window) {
callbackName += 'a';
}
window[callbackName] = function(response) {
if (loaded === true) return;
loaded = true;
handleSuccess.call(self, response);
cleanup();
};
script.src = self.url + '&jsonp=' + callbackName;
parent.appendChild(script);
script.onreadystatechange = function() {
if (loaded === false && self.readyState === 'loaded') {
loaded = true;
handleError.call(self);
cleanup();
}
};
script.onerror = function() {
if (loaded === false) {
loaded = true;
handleError.call(self);
cleanup();
}
};
function cleanup(){
window[callbackName] = undefined;
try {
delete window[callbackName];
} catch(e){}
parent.removeChild(script);
}
}
function sendBeacon(){
var self = this,
img = document.createElement('img'),
loaded = false;
img.onload = function() {
loaded = true;
if ('naturalHeight' in this) {
if (this.naturalHeight + this.naturalWidth === 0) {
this.onerror();
return;
}
} else if (this.width + this.height === 0) {
this.onerror();
return;
}
handleSuccess.call(self);
};
img.onerror = function() {
loaded = true;
handleError.call(self);
};
img.src = self.url + '&c=clv1';
}
function handleSuccess(res){
var opts = this.requestType['options']['success'],
response = '';
xhrShim.call(this, opts);
if (res) {
try {
response = JSON.stringify(res);
} catch(e) {}
}
else {
response = opts['responseText'];
}
this.xhr.responseText = response;
this.xhr.status = opts['status'];
this.emit('end');
}
function handleError(){
var opts = this.requestType['options']['error'];
xhrShim.call(this, opts);
this.xhr.responseText = opts['responseText'];
this.xhr.status = opts['status'];
this.emit('end');
}
function abortRequest(){
this.aborted = true;
this.clearTimeout();
this.emit('abort');
}
function xhrShim(opts){
this.xhr = {
getAllResponseHeaders: function(){ return ''; },
getResponseHeader: function(){ return 'application/json'; },
responseText: opts['responseText'],
status: opts['status']
};
return this;
}
},{"../utils/each":23,"./get-xhr-object":14,"superagent":6}],17:[function(require,module,exports){
var root = this;
var previous_Keen = root.Keen;
var extend = require('./utils/extend');
var Emitter = require('./helpers/emitter-shim');
function Keen(config) {
this.configure(config || {});
Keen.trigger('client', this);
}
Keen.debug = false;
Keen.enabled = true;
Keen.loaded = true;
Keen.version = '3.2.0';
Emitter(Keen);
Emitter(Keen.prototype);
Keen.prototype.configure = function(cfg){
var config = cfg || {};
if (config['host']) {
config['host'].replace(/.*?:\/\//g, '');
}
if (config.protocol && config.protocol === 'auto') {
config['protocol'] = location.protocol.replace(/:/g, '');
}
this.config = {
projectId : config.projectId,
writeKey : config.writeKey,
readKey : config.readKey,
masterKey : config.masterKey,
requestType : config.requestType || 'jsonp',
host : config['host'] || 'api.keen.io/3.0',
protocol : config['protocol'] || 'https',
globalProperties: null
};
if (Keen.debug) {
this.on('error', Keen.log);
}
this.trigger('ready');
};
Keen.prototype.projectId = function(str){
if (!arguments.length) return this.config.projectId;
this.config.projectId = (str ? String(str) : null);
return this;
};
Keen.prototype.masterKey = function(str){
if (!arguments.length) return this.config.masterKey;
this.config.masterKey = (str ? String(str) : null);
return this;
};
Keen.prototype.readKey = function(str){
if (!arguments.length) return this.config.readKey;
this.config.readKey = (str ? String(str) : null);
return this;
};
Keen.prototype.writeKey = function(str){
if (!arguments.length) return this.config.writeKey;
this.config.writeKey = (str ? String(str) : null);
return this;
};
Keen.prototype.url = function(path){
if (!this.projectId()) {
this.trigger('error', 'Client is missing projectId property');
return;
}
return this.config.protocol + '://' + this.config.host + '/projects/' + this.projectId() + path;
};
Keen.log = function(message) {
if (Keen.debug && typeof console == 'object') {
console.log('[Keen IO]', message);
}
};
Keen.noConflict = function(){
root.Keen = previous_Keen;
return Keen;
};
Keen.ready = function(fn){
if (Keen.loaded) {
fn();
} else {
Keen.once('ready', fn);
}
};
module.exports = Keen;
},{"./helpers/emitter-shim":10,"./utils/extend":24}],18:[function(require,module,exports){
var JSON2 = require('JSON2');
var request = require('superagent');
var Keen = require('../index');
var base64 = require('../utils/base64'),
each = require('../utils/each'),
getContext = require('../helpers/get-context'),
getQueryString = require('../helpers/get-query-string'),
getUrlMaxLength = require('../helpers/get-url-max-length'),
getXHR = require('../helpers/get-xhr-object'),
requestTypes = require('../helpers/superagent-request-types'),
responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(collection, payload, callback, async) {
var self = this,
urlBase = this.url('/events/' + collection),
reqType = this.config.requestType,
data = {},
cb = callback,
isAsync,
getUrl;
isAsync = ('boolean' === typeof async) ? async : true;
if (!Keen.enabled) {
handleValidationError.call(self, 'Keen.enabled = false');
return;
}
if (!self.projectId()) {
handleValidationError.call(self, 'Missing projectId property');
return;
}
if (!self.writeKey()) {
handleValidationError.call(self, 'Missing writeKey property');
return;
}
if (!collection || typeof collection !== 'string') {
handleValidationError.call(self, 'Collection name must be a string');
return;
}
if (self.config.globalProperties) {
data = self.config.globalProperties(collection);
}
each(payload, function(value, key){
data[key] = value;
});
if ( !getXHR() && 'xhr' === reqType ) {
reqType = 'jsonp';
}
if ( 'xhr' !== reqType || !isAsync ) {
getUrl = prepareGetRequest.call(self, urlBase, data);
}
if ( getUrl && getContext() === 'browser' ) {
request
.get(getUrl)
.use(function(req){
req.async = isAsync;
return req;
})
.use(requestTypes(reqType))
.end(handleResponse);
}
else if ( getXHR() || getContext() === 'server' ) {
request
.post(urlBase)
.set('Content-Type', 'application/json')
.set('Authorization', self.writeKey())
.send(data)
.end(handleResponse);
}
else {
self.trigger('error', 'Request not sent: URL length exceeds current browser limit, and XHR (POST) is not supported.');
}
function handleResponse(err, res){
responseHandler(err, res, cb);
cb = callback = null;
}
function handleValidationError(msg){
var err = 'Event not recorded: ' + msg;
self.trigger('error', err);
if (cb) {
cb.call(self, err, null);
cb = callback = null;
}
}
return;
};
function prepareGetRequest(url, data){
url += getQueryString({
api_key : this.writeKey(),
data : base64.encode( JSON2.stringify(data) ),
modified : new Date().getTime()
});
return ( url.length < getUrlMaxLength() ) ? url : false;
}
},{"../helpers/get-context":11,"../helpers/get-query-string":12,"../helpers/get-url-max-length":13,"../helpers/get-xhr-object":14,"../helpers/superagent-handle-response":15,"../helpers/superagent-request-types":16,"../index":17,"../utils/base64":22,"../utils/each":23,"JSON2":2,"superagent":6}],19:[function(require,module,exports){
var Keen = require('../index');
var request = require('superagent');
var each = require('../utils/each'),
getContext = require('../helpers/get-context'),
getXHR = require('../helpers/get-xhr-object'),
requestTypes = require('../helpers/superagent-request-types'),
responseHandler = require('../helpers/superagent-handle-response');
module.exports = function(payload, callback) {
var self = this,
urlBase = this.url('/events'),
data = {},
cb = callback;
if (!Keen.enabled) {
handleValidationError.call(self, 'Keen.enabled = false');
return;
}
if (!self.projectId()) {
handleValidationError.call(self, 'Missing projectId property');
return;
}
if (!self.writeKey()) {
handleValidationError.call(self, 'Missing writeKey property');
return;
}
if (arguments.length > 2) {
handleValidationError.call(self, 'Incorrect arguments provided to #addEvents method');
return;
}
if (typeof payload !== 'object' || payload instanceof Array) {
handleValidationError.call(self, 'Request payload must be an object');
return;
}
if (self.config.globalProperties) {
each(payload, function(events, collection){
each(events, function(body, index){
var base = self.config.globalProperties(collection);
each(body, function(value, key){
base[key] = value;
});
data[collection].push(base);
});
});
}
else {
data = payload;
}
if ( getXHR() || getContext() === 'server' ) {
request
.post(urlBase)
.set('Content-Type', 'application/json')
.set('Authorization', self.writeKey())
.send(data)
.end(function(err, res){
responseHandler(err, res, cb);
cb = callback = null;
});
}
else {
self.trigger('error', 'Events not recorded: XHR support is required for batch upload');
}
function handleValidationError(msg){
var err = 'Events not recorded: ' + msg;
self.trigger('error', err);
if (cb) {
cb.call(self, err, null);
cb = callback = null;
}
}
return;
};
},{"../helpers/get-context":11,"../helpers/get-xhr-object":14,"../helpers/superagent-handle-response":15,"../helpers/superagent-request-types":16,"../index":17,"../utils/each":23,"superagent":6}],20:[function(require,module,exports){
module.exports = function(newGlobalProperties) {
if (newGlobalProperties && typeof(newGlobalProperties) == "function") {
this.config.globalProperties = newGlobalProperties;
} else {
this.trigger("error", "Invalid value for global properties: " + newGlobalProperties);
}
};
},{}],21:[function(require,module,exports){
var addEvent = require("./addEvent");
module.exports = function(jsEvent, eventCollection, payload, timeout, timeoutCallback){
var evt = jsEvent,
target = (evt.currentTarget) ? evt.currentTarget : (evt.srcElement || evt.target),
timer = timeout || 500,
triggered = false,
targetAttr = "",
callback,
win;
if (target.getAttribute !== void 0) {
targetAttr = target.getAttribute("target");
} else if (target.target) {
targetAttr = target.target;
}
if ((targetAttr == "_blank" || targetAttr == "blank") && !evt.metaKey) {
win = window.open("about:blank");
win.document.location = target.href;
}
if (target.nodeName === "A") {
callback = function(){
if(!triggered && !evt.metaKey && (targetAttr !== "_blank" && targetAttr !== "blank")){
triggered = true;
window.location = target.href;
}
};
} else if (target.nodeName === "FORM") {
callback = function(){
if(!triggered){
triggered = true;
target.submit();
}
};
} else {
this.trigger("error", "#trackExternalLink method not attached to an <a> or <form> DOM element");
}
if (timeoutCallback) {
callback = function(){
if(!triggered){
triggered = true;
timeoutCallback();
}
};
}
addEvent.call(this, eventCollection, payload, callback);
setTimeout(callback, timer);
if (!evt.metaKey) {
return false;
}
};
},{"./addEvent":18}],22:[function(require,module,exports){
module.exports = {
map: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
encode: function (n) {
"use strict";
var o = "", i = 0, m = this.map, i1, i2, i3, e1, e2, e3, e4;
n = this.utf8.encode(n);
while (i < n.length) {
i1 = n.charCodeAt(i++); i2 = n.charCodeAt(i++); i3 = n.charCodeAt(i++);
e1 = (i1 >> 2); e2 = (((i1 & 3) << 4) | (i2 >> 4)); e3 = (isNaN(i2) ? 64 : ((i2 & 15) << 2) | (i3 >> 6));
e4 = (isNaN(i2) || isNaN(i3)) ? 64 : i3 & 63;
o = o + m.charAt(e1) + m.charAt(e2) + m.charAt(e3) + m.charAt(e4);
} return o;
},
decode: function (n) {
"use strict";
var o = "", i = 0, m = this.map, cc = String.fromCharCode, e1, e2, e3, e4, c1, c2, c3;
n = n.replace(/[^A-Za-z0-9\+\/\=]/g, "");
while (i < n.length) {
e1 = m.indexOf(n.charAt(i++)); e2 = m.indexOf(n.charAt(i++));
e3 = m.indexOf(n.charAt(i++)); e4 = m.indexOf(n.charAt(i++));
c1 = (e1 << 2) | (e2 >> 4); c2 = ((e2 & 15) << 4) | (e3 >> 2);
c3 = ((e3 & 3) << 6) | e4;
o = o + (cc(c1) + ((e3 != 64) ? cc(c2) : "")) + (((e4 != 64) ? cc(c3) : ""));
} return this.utf8.decode(o);
},
utf8: {
encode: function (n) {
"use strict";
var o = "", i = 0, cc = String.fromCharCode, c;
while (i < n.length) {
c = n.charCodeAt(i++); o = o + ((c < 128) ? cc(c) : ((c > 127) && (c < 2048)) ?
(cc((c >> 6) | 192) + cc((c & 63) | 128)) : (cc((c >> 12) | 224) + cc(((c >> 6) & 63) | 128) + cc((c & 63) | 128)));
} return o;
},
decode: function (n) {
"use strict";
var o = "", i = 0, cc = String.fromCharCode, c2, c;
while (i < n.length) {
c = n.charCodeAt(i);
o = o + ((c < 128) ? [cc(c), i++][0] : ((c > 191) && (c < 224)) ?
[cc(((c & 31) << 6) | ((c2 = n.charCodeAt(i + 1)) & 63)), (i += 2)][0] :
[cc(((c & 15) << 12) | (((c2 = n.charCodeAt(i + 1)) & 63) << 6) | ((c3 = n.charCodeAt(i + 2)) & 63)), (i += 3)][0]);
} return o;
}
}
};
},{}],23:[function(require,module,exports){
module.exports = function(o, cb, s){
var n;
if (!o){
return 0;
}
s = !s ? o : s;
if (o instanceof Array){
for (n=0; n<o.length; n++) {
if (cb.call(s, o[n], n, o) === false){
return 0;
}
}
} else {
for (n in o){
if (o.hasOwnProperty(n)) {
if (cb.call(s, o[n], n, o) === false){
return 0;
}
}
}
}
return 1;
};
},{}],24:[function(require,module,exports){
module.exports = function(target){
for (var i = 1; i < arguments.length; i++) {
for (var prop in arguments[i]){
target[prop] = arguments[i][prop];
}
}
return target;
};
},{}],25:[function(require,module,exports){
function parseParams(str){
var urlParams = {},
match,
pl = /\+/g,
search = /([^&=]+)=?([^&]*)/g,
decode = function (s) { return decodeURIComponent(s.replace(pl, " ")); },
query = str.split("?")[1];
while (!!(match=search.exec(query))) {
urlParams[decode(match[1])] = decode(match[2]);
}
return urlParams;
};
module.exports = parseParams;
},{}],26:[function(require,module,exports){
(function (global){
;(function (f) {
if (typeof define === "function" && define.amd) {
define("keen", [], function(){ return f(); });
}
if (typeof exports === "object" && typeof module !== "undefined") {
module.exports = f();
}
var g = null;
if (typeof window !== "undefined") {
g = window;
} else if (typeof global !== "undefined") {
g = global;
} else if (typeof self !== "undefined") {
g = self;
}
if (g) {
g.Keen = f();
}
})(function() {
"use strict";
var Keen = require("./core"),
extend = require("./core/utils/extend");
extend(Keen.prototype, {
"addEvent" : require("./core/lib/addEvent"),
"addEvents" : require("./core/lib/addEvents"),
"setGlobalProperties" : require("./core/lib/setGlobalProperties"),
"trackExternalLink" : require("./core/lib/trackExternalLink")
});
Keen.Base64 = require("./core/utils/base64");
Keen.utils = {
"domready" : require("domready"),
"each" : require("./core/utils/each"),
"extend" : extend,
"parseParams" : require("./core/utils/parseParams")
};
if (Keen.loaded) {
setTimeout(function(){
Keen.utils.domready(function(){
Keen.emit("ready");
});
}, 0);
}
require("./core/async")();
module.exports = Keen;
return Keen;
});
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./core":17,"./core/async":9,"./core/lib/addEvent":18,"./core/lib/addEvents":19,"./core/lib/setGlobalProperties":20,"./core/lib/trackExternalLink":21,"./core/utils/base64":22,"./core/utils/each":23,"./core/utils/extend":24,"./core/utils/parseParams":25,"domready":5}]},{},[26]);<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* The routes worker process
*
* This worker process gets the app routes by running the application dynamically
* on the server, then stores the publicly exposed routes in an intermediate format
* (just the url paths) in Redis.
* From there, the paths are used by the live app for serving /sitemap.xml and /robots.txt requests.
* The paths are also used by downstream worker processes (snapshots) to produce the site's html snapshots
* for _escaped_fragment_ requests.
*
* Run this worker anytime the app menu has changed and needs to be refreshed.
*
* PATH and NODE_ENV have to set in the environment before running this.
* PATH has to include phantomjs bin
*/
var phantom = require("node-phantom");
var urlm = require("url");
var redis = require("../../../helpers/redis");
var configLib = require("../../../config");
var config = configLib.create();
// Write the appRoutes to Redis
function writeAppRoutes(appRoutes) {
// remove pattern routes, and prepend /
appRoutes = appRoutes.filter(function(appRoute) {
return !/[*\(\:]/.test(appRoute);
}).map(function(appRoute) {
return "/"+appRoute;
});
if (appRoutes.length > 0) {
var redisClient = redis.client();
redisClient.set(config.keys.routes, appRoutes, function(err) {
if (err) {
console.error("failed to store the routes for "+config.keys.routes);
} else {
console.log("successfully stored "+appRoutes.length+" routes in "+config.keys.routes);
}
redisClient.quit();
});
} else {
console.error("no routes found for "+config.app.hostname);
}
}
// Start up phantom, wait for the page, get the appRoutes
function routes(urlPath, timeout) {
var url = urlm.format({
protocol: "http",
hostname: config.app.hostname,
port: config.app.port || 80,
pathname: urlPath
});
phantom.create(function(err, ph) {
return ph.createPage(function(err, page) {
return page.open(url, function(err, status) {
if (status !== "success") {
console.error("Unable to load page " + url);
ph.exit();
} else {<|fim▁hole|> return window.wpspa.appRoutes;
}, function(err, result) {
if (err) {
console.error("failed to get appRoutes");
} else {
writeAppRoutes(Object.keys(result));
}
ph.exit();
});
}, timeout);
}
});
});
});
}
module.exports = {
routes: routes
};<|fim▁end|> | setTimeout(function() {
return page.evaluate(function() { |
<|file_name|>actions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from pisi.actionsapi import shelltools, get, autotools, pisitools
def setup():
autotools.configure ("--prefix=/usr\<|fim▁hole|> --disable-static\
--disable-docs\
--docdir=/usr/share/doc/fontconfig-2.10.2")
def build():
autotools.make ()
def install():
autotools.rawInstall ("DESTDIR=%s" % get.installDIR())<|fim▁end|> | |
<|file_name|>authority.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2018 Benjamin Fry <[email protected]>
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! All authority related types
use std::ops::{Deref, DerefMut};
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::Arc;
use futures::future::Future;
use trust_dns_client::op::LowerQuery;
use trust_dns_client::op::ResponseCode;
use trust_dns_client::proto::rr::dnssec::rdata::key::KEY;
use trust_dns_client::rr::dnssec::{DnsSecResult, Signer, SupportedAlgorithms};
use trust_dns_client::rr::{
DNSClass, LowerName, Name, RData, Record, RecordSet, RecordType, RrKey,
};
#[cfg(feature = "dnssec")]
use crate::authority::UpdateRequest;
use crate::authority::{Authority, LookupError, MessageRequest, UpdateResult, ZoneType};
use crate::error::{PersistenceErrorKind, PersistenceResult};
use crate::store::in_memory::InMemoryAuthority;
use crate::store::sqlite::{Journal, SqliteConfig};
/// SqliteAuthority is responsible for storing the resource records for a particular zone.
///
/// Authorities default to DNSClass IN. The ZoneType specifies if this should be treated as the
/// start of authority for the zone, is a slave, or a cached zone.
pub struct SqliteAuthority {
in_memory: InMemoryAuthority,
journal: Option<Journal>,
allow_update: bool,
is_dnssec_enabled: bool,
}
impl SqliteAuthority {
/// Creates a new Authority.
///
/// # Arguments
///
/// * `in_memory` - InMemoryAuthority for all records.
/// * `allow_update` - If true, then this zone accepts dynamic updates.
/// * `is_dnssec_enabled` - If true, then the zone will sign the zone with all registered keys,
/// (see `add_zone_signing_key()`)
///
/// # Return value
///
/// The new `Authority`.
pub fn new(in_memory: InMemoryAuthority, allow_update: bool, is_dnssec_enabled: bool) -> Self {
Self {
in_memory,
journal: None,
allow_update,
is_dnssec_enabled,
}
}
/// load the authority from the configuration
pub fn try_from_config(
origin: Name,
zone_type: ZoneType,
allow_axfr: bool,
enable_dnssec: bool,
root_dir: Option<&Path>,
config: &SqliteConfig,
) -> Result<Self, String> {
use crate::store::file::{FileAuthority, FileConfig};
let zone_name: Name = origin;
let root_zone_dir = root_dir.map(PathBuf::from).unwrap_or_else(PathBuf::new);
// to be compatible with previous versions, the extension might be zone, not jrnl
let journal_path: PathBuf = root_zone_dir.join(&config.journal_file_path);
let zone_path: PathBuf = root_zone_dir.join(&config.zone_file_path);
// load the zone
if journal_path.exists() {
info!("recovering zone from journal: {:?}", journal_path);
let journal = Journal::from_file(&journal_path)
.map_err(|e| format!("error opening journal: {:?}: {}", journal_path, e))?;
let in_memory = InMemoryAuthority::empty(zone_name.clone(), zone_type, allow_axfr);
let mut authority = SqliteAuthority::new(in_memory, config.allow_update, enable_dnssec);
authority
.recover_with_journal(&journal)
.map_err(|e| format!("error recovering from journal: {}", e))?;
authority.set_journal(journal);
info!("recovered zone: {}", zone_name);
Ok(authority)
} else if zone_path.exists() {
// TODO: deprecate this portion of loading, instantiate the journal through a separate tool
info!("loading zone file: {:?}", zone_path);
let file_config = FileConfig {
zone_file_path: config.zone_file_path.clone(),
};
let in_memory = FileAuthority::try_from_config(
zone_name.clone(),
zone_type,
allow_axfr,
root_dir,
&file_config,
)?
.unwrap();
let mut authority = SqliteAuthority::new(in_memory, config.allow_update, enable_dnssec);
// if dynamic update is enabled, enable the journal
info!("creating new journal: {:?}", journal_path);
let journal = Journal::from_file(&journal_path)
.map_err(|e| format!("error creating journal {:?}: {}", journal_path, e))?;
authority.set_journal(journal);
// preserve to the new journal, i.e. we just loaded the zone from disk, start the journal
authority
.persist_to_journal()
.map_err(|e| format!("error persisting to journal {:?}: {}", journal_path, e))?;
info!("zone file loaded: {}", zone_name);
Ok(authority)
} else {
Err(format!(
"no zone file or journal defined at: {:?}",
zone_path
))
}
}
/// Recovers the zone from a Journal, returns an error on failure to recover the zone.
///
/// # Arguments
///
/// * `journal` - the journal from which to load the persisted zone.
pub fn recover_with_journal(&mut self, journal: &Journal) -> PersistenceResult<()> {
assert!(
self.in_memory.records().is_empty(),
"records should be empty during a recovery"
);
info!("recovering from journal");
for record in journal.iter() {
// AXFR is special, it is used to mark the dump of a full zone.
// when recovering, if an AXFR is encountered, we should remove all the records in the
// authority.
if record.rr_type() == RecordType::AXFR {
self.in_memory.clear();
} else if let Err(error) = self.update_records(&[record], false) {
return Err(PersistenceErrorKind::Recovery(error.to_str()).into());
}
}
Ok(())
}
/// Persist the state of the current zone to the journal, does nothing if there is no associated
/// Journal.
///
/// Returns an error if there was an issue writing to the persistence layer.
pub fn persist_to_journal(&self) -> PersistenceResult<()> {
if let Some(journal) = self.journal.as_ref() {
let serial = self.serial();
info!("persisting zone to journal at SOA.serial: {}", serial);
// TODO: THIS NEEDS TO BE IN A TRANSACTION!!!
journal.insert_record(serial, Record::new().set_rr_type(RecordType::AXFR))?;
for rr_set in self.in_memory.records().values() {
// TODO: should we preserve rr_sets or not?
for record in rr_set.records_without_rrsigs() {
journal.insert_record(serial, record)?;
}
}
// TODO: COMMIT THE TRANSACTION!!!
}
Ok(())
}
/// Associate a backing Journal with this Authority for Updatable zones
pub fn set_journal(&mut self, journal: Journal) {
self.journal = Some(journal);
}
/// Returns the associated Journal
pub fn journal(&self) -> Option<&Journal> {
self.journal.as_ref()
}
/// Enables the zone for dynamic DNS updates
pub fn set_allow_update(&mut self, allow_update: bool) {
self.allow_update = allow_update;
}
/// [RFC 2136](https://tools.ietf.org/html/rfc2136), DNS Update, April 1997
///
/// ```text
///
/// 3.2 - Process Prerequisite Section
///
/// Next, the Prerequisite Section is checked to see that all
/// prerequisites are satisfied by the current state of the zone. Using
/// the definitions expressed in Section 1.2, if any RR's NAME is not
/// within the zone specified in the Zone Section, signal NOTZONE to the
/// requestor.
///
/// 3.2.1. For RRs in this section whose CLASS is ANY, test to see that
/// TTL and RDLENGTH are both zero (0), else signal FORMERR to the
/// requestor. If TYPE is ANY, test to see that there is at least one RR
/// in the zone whose NAME is the same as that of the Prerequisite RR,
/// else signal NXDOMAIN to the requestor. If TYPE is not ANY, test to
/// see that there is at least one RR in the zone whose NAME and TYPE are
/// the same as that of the Prerequisite RR, else signal NXRRSET to the
/// requestor.
///
/// 3.2.2. For RRs in this section whose CLASS is NONE, test to see that
/// the TTL and RDLENGTH are both zero (0), else signal FORMERR to the
/// requestor. If the TYPE is ANY, test to see that there are no RRs in
/// the zone whose NAME is the same as that of the Prerequisite RR, else
/// signal YXDOMAIN to the requestor. If the TYPE is not ANY, test to
/// see that there are no RRs in the zone whose NAME and TYPE are the
/// same as that of the Prerequisite RR, else signal YXRRSET to the
/// requestor.
///
/// 3.2.3. For RRs in this section whose CLASS is the same as the ZCLASS,
/// test to see that the TTL is zero (0), else signal FORMERR to the
/// requestor. Then, build an RRset for each unique <NAME,TYPE> and
/// compare each resulting RRset for set equality (same members, no more,
/// no less) with RRsets in the zone. If any Prerequisite RRset is not
/// entirely and exactly matched by a zone RRset, signal NXRRSET to the
/// requestor. If any RR in this section has a CLASS other than ZCLASS
/// or NONE or ANY, signal FORMERR to the requestor.
///
/// 3.2.4 - Table Of Metavalues Used In Prerequisite Section
///
/// CLASS TYPE RDATA Meaning
/// ------------------------------------------------------------
/// ANY ANY empty Name is in use
/// ANY rrset empty RRset exists (value independent)
/// NONE ANY empty Name is not in use
/// NONE rrset empty RRset does not exist
/// zone rrset rr RRset exists (value dependent)
/// ```
pub fn verify_prerequisites(&self, pre_requisites: &[Record]) -> UpdateResult<()> {
use futures::executor::block_on;
// 3.2.5 - Pseudocode for Prerequisite Section Processing
//
// for rr in prerequisites
// if (rr.ttl != 0)
// return (FORMERR)
// if (zone_of(rr.name) != ZNAME)
// return (NOTZONE);
// if (rr.class == ANY)
// if (rr.rdlength != 0)
// return (FORMERR)
// if (rr.type == ANY)
// if (!zone_name<rr.name>)
// return (NXDOMAIN)
// else
// if (!zone_rrset<rr.name, rr.type>)
// return (NXRRSET)
// if (rr.class == NONE)
// if (rr.rdlength != 0)
// return (FORMERR)
// if (rr.type == ANY)
// if (zone_name<rr.name>)
// return (YXDOMAIN)
// else
// if (zone_rrset<rr.name, rr.type>)
// return (YXRRSET)
// if (rr.class == zclass)
// temp<rr.name, rr.type> += rr
// else
// return (FORMERR)
//
// for rrset in temp
// if (zone_rrset<rrset.name, rrset.type> != rrset)
// return (NXRRSET)
for require in pre_requisites {
let required_name = LowerName::from(require.name());
if require.ttl() != 0 {
warn!("ttl must be 0 for: {:?}", require);
return Err(ResponseCode::FormErr);
}
if !self.origin().zone_of(&require.name().into()) {
warn!("{} is not a zone_of {}", require.name(), self.origin());
return Err(ResponseCode::NotZone);
}
match require.dns_class() {
DNSClass::ANY => {
if let RData::NULL(..) = *require.rdata() {
match require.rr_type() {
// ANY ANY empty Name is in use
RecordType::ANY => {
/*TODO: this works because the future here is always complete*/
if block_on(self.lookup(
&required_name,
RecordType::ANY,
false,
SupportedAlgorithms::new(),
))
.unwrap_or_default()
.was_empty()
{
return Err(ResponseCode::NXDomain);
} else {
continue;
}
}
// ANY rrset empty RRset exists (value independent)
rrset => {
/*TODO: this works because the future here is always complete*/
if block_on(self.lookup(
&required_name,
rrset,
false,
SupportedAlgorithms::new(),
))
.unwrap_or_default()
.was_empty()
{
return Err(ResponseCode::NXRRSet);
} else {
continue;
}
}
}
} else {
return Err(ResponseCode::FormErr);
}
}
DNSClass::NONE => {
if let RData::NULL(..) = *require.rdata() {
match require.rr_type() {
// NONE ANY empty Name is not in use
RecordType::ANY => {
/*TODO: this works because the future here is always complete*/
if !block_on(self.lookup(
&required_name,
RecordType::ANY,
false,
SupportedAlgorithms::new(),
))
.unwrap_or_default()
.was_empty()
{
return Err(ResponseCode::YXDomain);
} else {
continue;
}
}
// NONE rrset empty RRset does not exist
rrset => {
/*TODO: this works because the future here is always complete*/
if !block_on(self.lookup(
&required_name,
rrset,
false,
SupportedAlgorithms::new(),
))
.unwrap_or_default()
.was_empty()
{
return Err(ResponseCode::YXRRSet);
} else {
continue;
}
}
}
} else {
return Err(ResponseCode::FormErr);
}
}
class if class == self.class() =>
// zone rrset rr RRset exists (value dependent)
{
/*TODO: this works because the future here is always complete*/
if block_on(self.lookup(
&required_name,
require.rr_type(),
false,
SupportedAlgorithms::new(),
))
.unwrap_or_default()
.iter()
.find(|rr| *rr == require)
.is_none()
{
return Err(ResponseCode::NXRRSet);
} else {
continue;
}
}
_ => return Err(ResponseCode::FormErr),
}
}
// if we didn't bail everything checked out...
Ok(())
}
/// [RFC 2136](https://tools.ietf.org/html/rfc2136), DNS Update, April 1997
///
/// ```text
///
/// 3.3 - Check Requestor's Permissions
///
/// 3.3.1. Next, the requestor's permission to update the RRs named in
/// the Update Section may be tested in an implementation dependent
/// fashion or using mechanisms specified in a subsequent Secure DNS
/// Update protocol. If the requestor does not have permission to
/// perform these updates, the server may write a warning message in its
/// operations log, and may either signal REFUSED to the requestor, or
/// ignore the permission problem and proceed with the update.
///
/// 3.3.2. While the exact processing is implementation defined, if these
/// verification activities are to be performed, this is the point in the
/// server's processing where such performance should take place, since
/// if a REFUSED condition is encountered after an update has been
/// partially applied, it will be necessary to undo the partial update
/// and restore the zone to its original state before answering the
/// requestor.
/// ```
///
#[cfg(feature = "dnssec")]
#[allow(clippy::block_in_if_condition_stmt)]
pub fn authorize(&self, update_message: &MessageRequest) -> UpdateResult<()> {
use futures::executor::block_on;
use proto::rr::dnssec::Verifier;
use trust_dns_client::rr::rdata::{DNSSECRData, DNSSECRecordType};
// 3.3.3 - Pseudocode for Permission Checking
//
// if (security policy exists)
// if (this update is not permitted)
// if (local option)
// log a message about permission problem
// if (local option)
// return (REFUSED)
// does this authority allow_updates?
if !self.allow_update {
warn!(
"update attempted on non-updatable Authority: {}",
self.origin()
);
return Err(ResponseCode::Refused);
}
// verify sig0, currently the only authorization that is accepted.
let sig0s: &[Record] = update_message.sig0();
debug!("authorizing with: {:?}", sig0s);
if !sig0s.is_empty()
&& sig0s
.iter()
.filter_map(|sig0| {
if let RData::DNSSEC(DNSSECRData::SIG(ref sig)) = *sig0.rdata() {
Some(sig)
} else {
None
}
})
.any(|sig| {
let name = LowerName::from(sig.signer_name());
// TODO: updates should be async as well.
let keys = block_on(self.lookup(
&name,
RecordType::DNSSEC(DNSSECRecordType::KEY),
false,
SupportedAlgorithms::new(),
));
let keys = match keys {
Ok(keys) => keys,
Err(_) => return false,
};
debug!("found keys {:?}", keys);
// TODO: check key usage flags and restrictions
keys.iter()
.filter_map(|rr_set| {
if let RData::DNSSEC(DNSSECRData::KEY(ref key)) = *rr_set.rdata() {
Some(key)
} else {
None
}
})
.any(|key| {
key.verify_message(update_message, sig.sig(), sig)
.map(|_| {
info!("verified sig: {:?} with key: {:?}", sig, key);
true
})
.unwrap_or_else(|_| {
debug!("did not verify sig: {:?} with key: {:?}", sig, key);
false
})
})
})
{
return Ok(());
} else {
warn!(
"no sig0 matched registered records: id {}",
update_message.id()
);
}
// getting here, we will always default to rejecting the request
// the code will only ever explicitly return authorized actions.
Err(ResponseCode::Refused)
}
/// [RFC 2136](https://tools.ietf.org/html/rfc2136), DNS Update, April 1997
///
/// ```text
///
/// 3.4 - Process Update Section
///
/// Next, the Update Section is processed as follows.
///
/// 3.4.1 - Prescan
///
/// The Update Section is parsed into RRs and each RR's CLASS is checked
/// to see if it is ANY, NONE, or the same as the Zone Class, else signal
/// a FORMERR to the requestor. Using the definitions in Section 1.2,
/// each RR's NAME must be in the zone specified by the Zone Section,
/// else signal NOTZONE to the requestor.
///
/// 3.4.1.2. For RRs whose CLASS is not ANY, check the TYPE and if it is
/// ANY, AXFR, MAILA, MAILB, or any other QUERY metatype, or any
/// unrecognized type, then signal FORMERR to the requestor. For RRs
/// whose CLASS is ANY or NONE, check the TTL to see that it is zero (0),
/// else signal a FORMERR to the requestor. For any RR whose CLASS is
/// ANY, check the RDLENGTH to make sure that it is zero (0) (that is,
/// the RDATA field is empty), and that the TYPE is not AXFR, MAILA,
/// MAILB, or any other QUERY metatype besides ANY, or any unrecognized
/// type, else signal FORMERR to the requestor.
/// ```
#[allow(clippy::unused_unit)]
pub fn pre_scan(&self, records: &[Record]) -> UpdateResult<()> {
// 3.4.1.3 - Pseudocode For Update Section Prescan
//
// [rr] for rr in updates
// if (zone_of(rr.name) != ZNAME)
// return (NOTZONE);
// if (rr.class == zclass)
// if (rr.type & ANY|AXFR|MAILA|MAILB)
// return (FORMERR)
// elsif (rr.class == ANY)
// if (rr.ttl != 0 || rr.rdlength != 0
// || rr.type & AXFR|MAILA|MAILB)
// return (FORMERR)
// elsif (rr.class == NONE)
// if (rr.ttl != 0 || rr.type & ANY|AXFR|MAILA|MAILB)
// return (FORMERR)
// else
// return (FORMERR)
for rr in records {
if !self.origin().zone_of(&rr.name().into()) {
return Err(ResponseCode::NotZone);
}
let class: DNSClass = rr.dns_class();
if class == self.class() {
match rr.rr_type() {
RecordType::ANY | RecordType::AXFR | RecordType::IXFR => {
return Err(ResponseCode::FormErr);
}
_ => (),
}
} else {
match class {
DNSClass::ANY => {
if rr.ttl() != 0 {
return Err(ResponseCode::FormErr);
}
if let RData::NULL(..) = *rr.rdata() {
()
} else {
return Err(ResponseCode::FormErr);
}
match rr.rr_type() {
RecordType::AXFR | RecordType::IXFR => {
return Err(ResponseCode::FormErr);
}
_ => (),
}
}
DNSClass::NONE => {
if rr.ttl() != 0 {
return Err(ResponseCode::FormErr);
}
match rr.rr_type() {
RecordType::ANY | RecordType::AXFR | RecordType::IXFR => {
return Err(ResponseCode::FormErr);
}
_ => (),
}
}
_ => return Err(ResponseCode::FormErr),
}
}
}
Ok(())
}
/// Updates the specified records according to the update section.
///
/// [RFC 2136](https://tools.ietf.org/html/rfc2136), DNS Update, April 1997
///
/// ```text
///
/// 3.4.2.6 - Table Of Metavalues Used In Update Section
///
/// CLASS TYPE RDATA Meaning
/// ---------------------------------------------------------
/// ANY ANY empty Delete all RRsets from a name
/// ANY rrset empty Delete an RRset
/// NONE rrset rr Delete an RR from an RRset
/// zone rrset rr Add to an RRset
/// ```
///
/// # Arguments
///
/// * `records` - set of record instructions for update following above rules
/// * `auto_signing_and_increment` - if true, the zone will sign and increment the SOA, this
/// should be disabled during recovery.
pub fn update_records(
&mut self,
records: &[Record],
auto_signing_and_increment: bool,
) -> UpdateResult<bool> {
let mut updated = false;
let serial: u32 = self.serial();
// the persistence act as a write-ahead log. The WAL will also be used for recovery of a zone
// subsequent to a failure of the server.
if let Some(ref journal) = self.journal {
if let Err(error) = journal.insert_records(serial, records) {
error!("could not persist update records: {}", error);
return Err(ResponseCode::ServFail);
}
}
// 3.4.2.7 - Pseudocode For Update Section Processing
//
// [rr] for rr in updates
// if (rr.class == zclass)
// if (rr.type == CNAME)
// if (zone_rrset<rr.name, ~CNAME>)
// next [rr]
// elsif (zone_rrset<rr.name, CNAME>)
// next [rr]
// if (rr.type == SOA)
// if (!zone_rrset<rr.name, SOA> ||
// zone_rr<rr.name, SOA>.serial > rr.soa.serial)
// next [rr]
// for zrr in zone_rrset<rr.name, rr.type>
// if (rr.type == CNAME || rr.type == SOA ||
// (rr.type == WKS && rr.proto == zrr.proto &&
// rr.address == zrr.address) ||
// rr.rdata == zrr.rdata)
// zrr = rr
// next [rr]
// zone_rrset<rr.name, rr.type> += rr
// elsif (rr.class == ANY)
// if (rr.type == ANY)
// if (rr.name == zname)
// zone_rrset<rr.name, ~(SOA|NS)> = Nil
// else
// zone_rrset<rr.name, *> = Nil
// elsif (rr.name == zname &&
// (rr.type == SOA || rr.type == NS))
// next [rr]
// else
// zone_rrset<rr.name, rr.type> = Nil
// elsif (rr.class == NONE)
// if (rr.type == SOA)
// next [rr]
// if (rr.type == NS && zone_rrset<rr.name, NS> == rr)
// next [rr]
// zone_rr<rr.name, rr.type, rr.data> = Nil
// return (NOERROR)
for rr in records {
let rr_name = LowerName::from(rr.name());
let rr_key = RrKey::new(rr_name.clone(), rr.rr_type());
match rr.dns_class() {
class if class == self.class() => {
// RFC 2136 - 3.4.2.2. Any Update RR whose CLASS is the same as ZCLASS is added to
// the zone. In case of duplicate RDATAs (which for SOA RRs is always
// the case, and for WKS RRs is the case if the ADDRESS and PROTOCOL
// fields both match), the Zone RR is replaced by Update RR. If the
// TYPE is SOA and there is no Zone SOA RR, or the new SOA.SERIAL is
// lower (according to [RFC1982]) than or equal to the current Zone SOA
// RR's SOA.SERIAL, the Update RR is ignored. In the case of a CNAME
// Update RR and a non-CNAME Zone RRset or vice versa, ignore the CNAME
// Update RR, otherwise replace the CNAME Zone RR with the CNAME Update
// RR.
// zone rrset rr Add to an RRset
info!("upserting record: {:?}", rr);
updated = self.upsert(rr.clone(), serial) || updated;
}
DNSClass::ANY => {
// This is a delete of entire RRSETs, either many or one. In either case, the spec is clear:
match rr.rr_type() {
t @ RecordType::SOA | t @ RecordType::NS if rr_name == *self.origin() => {
// SOA and NS records are not to be deleted if they are the origin records
info!("skipping delete of {:?} see RFC 2136 - 3.4.2.3", t);
continue;
}
RecordType::ANY => {
// RFC 2136 - 3.4.2.3. For any Update RR whose CLASS is ANY and whose TYPE is ANY,
// all Zone RRs with the same NAME are deleted, unless the NAME is the
// same as ZNAME in which case only those RRs whose TYPE is other than
// SOA or NS are deleted.
// ANY ANY empty Delete all RRsets from a name
info!(
"deleting all records at name (not SOA or NS at origin): {:?}",
rr_name
);
let to_delete = self
.records()
.keys()
.filter(|k| {
!((k.record_type == RecordType::SOA
|| k.record_type == RecordType::NS)
&& k.name != *self.origin())
})
.filter(|k| k.name == rr_name)
.cloned()
.collect::<Vec<RrKey>>();
for delete in to_delete {
self.records_mut().remove(&delete);
updated = true;
}
}
_ => {
// RFC 2136 - 3.4.2.3. For any Update RR whose CLASS is ANY and
// whose TYPE is not ANY all Zone RRs with the same NAME and TYPE are
// deleted, unless the NAME is the same as ZNAME in which case neither
// SOA or NS RRs will be deleted.
// ANY rrset empty Delete an RRset
if let RData::NULL(..) = *rr.rdata() {
let deleted = self.records_mut().remove(&rr_key);
info!("deleted rrset: {:?}", deleted);
updated = updated || deleted.is_some();
} else {
info!("expected empty rdata: {:?}", rr);
return Err(ResponseCode::FormErr);
}
}
}
}
DNSClass::NONE => {
info!("deleting specific record: {:?}", rr);
// NONE rrset rr Delete an RR from an RRset
if let Some(rrset) = self.records_mut().get_mut(&rr_key) {
// b/c this is an Arc, we need to clone, then remove, and replace the node.
let mut rrset_clone: RecordSet = RecordSet::clone(&*rrset);
let deleted = rrset_clone.remove(rr, serial);
info!("deleted ({}) specific record: {:?}", deleted, rr);
updated = updated || deleted;
if deleted {
*rrset = Arc::new(rrset_clone);
}
}
}
class => {
info!("unexpected DNS Class: {:?}", class);
return Err(ResponseCode::FormErr);
}
}
}
// update the serial...
if updated && auto_signing_and_increment {
if self.is_dnssec_enabled {
self.secure_zone().map_err(|e| {
error!("failure securing zone: {}", e);
ResponseCode::ServFail
})?
} else {
// the secure_zone() function increments the SOA during it's operation, if we're not
// dnssec, then we need to do it here...
self.increment_soa_serial();
}
}
Ok(updated)
}
}
impl Deref for SqliteAuthority {
type Target = InMemoryAuthority;
fn deref(&self) -> &Self::Target {
&self.in_memory
}
}
impl DerefMut for SqliteAuthority {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.in_memory
}
}
impl Authority for SqliteAuthority {
type Lookup = <InMemoryAuthority as Authority>::Lookup;
type LookupFuture = <InMemoryAuthority as Authority>::LookupFuture;
/// What type is this zone
fn zone_type(&self) -> ZoneType {
self.in_memory.zone_type()
}
/// Return true if AXFR is allowed
fn is_axfr_allowed(&self) -> bool {
self.in_memory.is_axfr_allowed()
}
/// Takes the UpdateMessage, extracts the Records, and applies the changes to the record set.
///
/// [RFC 2136](https://tools.ietf.org/html/rfc2136), DNS Update, April 1997
///
/// ```text
///
/// 3.4 - Process Update Section
///
/// Next, the Update Section is processed as follows.
///
/// 3.4.2 - Update
///
/// The Update Section is parsed into RRs and these RRs are processed in
/// order.
///
/// 3.4.2.1. If any system failure (such as an out of memory condition,
/// or a hardware error in persistent storage) occurs during the
/// processing of this section, signal SERVFAIL to the requestor and undo
/// all updates applied to the zone during this transaction.
///
/// 3.4.2.2. Any Update RR whose CLASS is the same as ZCLASS is added to
/// the zone. In case of duplicate RDATAs (which for SOA RRs is always
/// the case, and for WKS RRs is the case if the ADDRESS and PROTOCOL
/// fields both match), the Zone RR is replaced by Update RR. If the
/// TYPE is SOA and there is no Zone SOA RR, or the new SOA.SERIAL is
/// lower (according to [RFC1982]) than or equal to the current Zone SOA
/// RR's SOA.SERIAL, the Update RR is ignored. In the case of a CNAME
/// Update RR and a non-CNAME Zone RRset or vice versa, ignore the CNAME
/// Update RR, otherwise replace the CNAME Zone RR with the CNAME Update
/// RR.
///
/// 3.4.2.3. For any Update RR whose CLASS is ANY and whose TYPE is ANY,
/// all Zone RRs with the same NAME are deleted, unless the NAME is the
/// same as ZNAME in which case only those RRs whose TYPE is other than
/// SOA or NS are deleted. For any Update RR whose CLASS is ANY and
/// whose TYPE is not ANY all Zone RRs with the same NAME and TYPE are
/// deleted, unless the NAME is the same as ZNAME in which case neither
/// SOA or NS RRs will be deleted.
///
/// 3.4.2.4. For any Update RR whose class is NONE, any Zone RR whose
/// NAME, TYPE, RDATA and RDLENGTH are equal to the Update RR is deleted,
/// unless the NAME is the same as ZNAME and either the TYPE is SOA or
/// the TYPE is NS and the matching Zone RR is the only NS remaining in
/// the RRset, in which case this Update RR is ignored.
///
/// 3.4.2.5. Signal NOERROR to the requestor.
/// ```
///
/// # Arguments
///
/// * `update` - The `UpdateMessage` records will be extracted and used to perform the update
/// actions as specified in the above RFC.
///
/// # Return value
///
/// true if any of additions, updates or deletes were made to the zone, false otherwise. Err is
/// returned in the case of bad data, etc.
#[cfg(feature = "dnssec")]
fn update(&mut self, update: &MessageRequest) -> UpdateResult<bool> {
// the spec says to authorize after prereqs, seems better to auth first.
self.authorize(update)?;
self.verify_prerequisites(update.prerequisites())?;
self.pre_scan(update.updates())?;
self.update_records(update.updates(), true)
}
/// Always fail when DNSSEC is disabled.
#[cfg(not(feature = "dnssec"))]
fn update(&mut self, _update: &MessageRequest) -> UpdateResult<bool> {
Err(ResponseCode::NotImp)
}
/// Get the origin of this zone, i.e. example.com is the origin for www.example.com
fn origin(&self) -> &LowerName {
self.in_memory.origin()
}
/// Looks up all Resource Records matching the giving `Name` and `RecordType`.
///
/// # Arguments
///
/// * `name` - The `Name`, label, to lookup.
/// * `rtype` - The `RecordType`, to lookup. `RecordType::ANY` will return all records matching
/// `name`. `RecordType::AXFR` will return all record types except `RecordType::SOA`
/// due to the requirements that on zone transfers the `RecordType::SOA` must both
/// precede and follow all other records.
/// * `is_secure` - If the DO bit is set on the EDNS OPT record, then return RRSIGs as well.
///
/// # Return value
///
/// None if there are no matching records, otherwise a `Vec` containing the found records.
fn lookup(<|fim▁hole|> rtype: RecordType,
is_secure: bool,
supported_algorithms: SupportedAlgorithms,
) -> Pin<Box<dyn Future<Output = Result<Self::Lookup, LookupError>> + Send>> {
self.in_memory
.lookup(name, rtype, is_secure, supported_algorithms)
}
fn search(
&self,
query: &LowerQuery,
is_secure: bool,
supported_algorithms: SupportedAlgorithms,
) -> Pin<Box<dyn Future<Output = Result<Self::Lookup, LookupError>> + Send>> {
self.in_memory
.search(query, is_secure, supported_algorithms)
}
/// Return the NSEC records based on the given name
///
/// # Arguments
///
/// * `name` - given this name (i.e. the lookup name), return the NSEC record that is less than
/// this
/// * `is_secure` - if true then it will return RRSIG records as well
fn get_nsec_records(
&self,
name: &LowerName,
is_secure: bool,
supported_algorithms: SupportedAlgorithms,
) -> Pin<Box<dyn Future<Output = Result<Self::Lookup, LookupError>> + Send>> {
self.in_memory
.get_nsec_records(name, is_secure, supported_algorithms)
}
fn add_update_auth_key(&mut self, name: Name, key: KEY) -> DnsSecResult<()> {
self.in_memory.add_update_auth_key(name, key)
}
/// By adding a secure key, this will implicitly enable dnssec for the zone.
///
/// # Arguments
///
/// * `signer` - Signer with associated private key
fn add_zone_signing_key(&mut self, signer: Signer) -> DnsSecResult<()> {
self.in_memory.add_zone_signing_key(signer)
}
/// (Re)generates the nsec records, increments the serial number and signs the zone
fn secure_zone(&mut self) -> DnsSecResult<()> {
Authority::secure_zone(&mut self.in_memory)
}
}<|fim▁end|> | &self,
name: &LowerName, |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(macro_rules)]
#![crate_id = "num#0.11-pre"]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![license = "MIT/ASL2"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://static.rust-lang.org/doc/master")]
#![deny(deprecated_owned_vector)]
extern crate rand;
pub mod bigint;
pub mod rational;
pub mod complex;
pub trait Integer: Num + Ord
+ Div<Self, Self>
+ Rem<Self, Self> {
/// Simultaneous truncated integer division and modulus
#[inline]
fn div_rem(&self, other: &Self) -> (Self, Self) {
(*self / *other, *self % *other)
}
/// Floored integer division
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8i).div_floor(& 3) == 2);
/// assert!(( 8i).div_floor(&-3) == -3);
/// assert!((-8i).div_floor(& 3) == -3);
/// assert!((-8i).div_floor(&-3) == 2);
///
/// assert!(( 1i).div_floor(& 2) == 0);
/// assert!(( 1i).div_floor(&-2) == -1);
/// assert!((-1i).div_floor(& 2) == -1);
/// assert!((-1i).div_floor(&-2) == 0);
/// ~~~
fn div_floor(&self, other: &Self) -> Self;
/// Floored integer modulo, satisfying:
///
/// ~~~
/// # use num::Integer;
/// # let n = 1i; let d = 1i;
/// assert!(n.div_floor(&d) * d + n.mod_floor(&d) == n)
/// ~~~
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8i).mod_floor(& 3) == 2);
/// assert!(( 8i).mod_floor(&-3) == -1);
/// assert!((-8i).mod_floor(& 3) == 1);
/// assert!((-8i).mod_floor(&-3) == -2);
///
/// assert!(( 1i).mod_floor(& 2) == 1);
/// assert!(( 1i).mod_floor(&-2) == -1);
/// assert!((-1i).mod_floor(& 2) == 1);
/// assert!((-1i).mod_floor(&-2) == -1);
/// ~~~
fn mod_floor(&self, other: &Self) -> Self;
/// Simultaneous floored integer division and modulus
fn div_mod_floor(&self, other: &Self) -> (Self, Self) {
(self.div_floor(other), self.mod_floor(other))
}
/// Greatest Common Divisor (GCD)
fn gcd(&self, other: &Self) -> Self;
/// Lowest Common Multiple (LCM)
fn lcm(&self, other: &Self) -> Self;
/// Returns `true` if `other` divides evenly into `self`
fn divides(&self, other: &Self) -> bool;
/// Returns `true` if the number is even
fn is_even(&self) -> bool;
/// Returns `true` if the number is odd
fn is_odd(&self) -> bool;
}
/// Simultaneous integer division and modulus
#[inline] pub fn div_rem<T: Integer>(x: T, y: T) -> (T, T) { x.div_rem(&y) }
/// Floored integer division
#[inline] pub fn div_floor<T: Integer>(x: T, y: T) -> T { x.div_floor(&y) }
/// Floored integer modulus
#[inline] pub fn mod_floor<T: Integer>(x: T, y: T) -> T { x.mod_floor(&y) }
/// Simultaneous floored integer division and modulus
#[inline] pub fn div_mod_floor<T: Integer>(x: T, y: T) -> (T, T) { x.div_mod_floor(&y) }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`. The
/// result is always positive.
#[inline(always)] pub fn gcd<T: Integer>(x: T, y: T) -> T { x.gcd(&y) }
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`.
#[inline(always)] pub fn lcm<T: Integer>(x: T, y: T) -> T { x.lcm(&y) }
macro_rules! impl_integer_for_int {
($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Floored integer division
#[inline]
fn div_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => d - 1,
(d, _) => d,
}
}
/// Floored integer modulo
#[inline]
fn mod_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match *self % *other {
r if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => r + *other,
r => r,
}
}
/// Calculates `div_floor` and `mod_floor` simultaneously
#[inline]
fn div_mod_floor(&self, other: &$T) -> ($T,$T) {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => (d - 1, r + *other),
(d, r) => (d, r),
}
}
/// Calculates the Greatest Common Divisor (GCD) of the number and
/// `other`. The result is always positive.
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Euclid's algorithm
let mut m = *self;
let mut n = *other;
while m != 0 {
let temp = m;
m = n % temp;
n = temp;
}
n.abs()
}
/// Calculates the Lowest Common Multiple (LCM) of the number and
/// `other`.
#[inline]
fn lcm(&self, other: &$T) -> $T {
// should not have to recalculate abs
((*self * *other) / self.gcd(other)).abs()
}
/// Returns `true` if the number can be divided by `other` without
/// leaving a remainder
#[inline]
fn divides(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`
#[inline]
fn is_even(&self) -> bool { self & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`
#[inline]
fn is_odd(&self) -> bool { !self.is_even() }
}
#[cfg(test)]
mod $test_mod {
use Integer;
/// Checks that the division rule holds for:
///
/// - `n`: numerator (dividend)
/// - `d`: denominator (divisor)
/// - `qr`: quotient and remainder
#[cfg(test)]
fn test_division_rule((n,d): ($T,$T), (q,r): ($T,$T)) {
assert_eq!(d * q + r, n);
}
#[test]
fn test_div_rem() {
fn test_nd_dr(nd: ($T,$T), qr: ($T,$T)) {
let (n,d) = nd;
let separate_div_rem = (n / d, n % d);
let combined_div_rem = n.div_rem(&d);
assert_eq!(separate_div_rem, qr);
assert_eq!(combined_div_rem, qr);
test_division_rule(nd, separate_div_rem);
test_division_rule(nd, combined_div_rem);
}
test_nd_dr(( 8, 3), ( 2, 2));
test_nd_dr(( 8, -3), (-2, 2));
test_nd_dr((-8, 3), (-2, -2));
test_nd_dr((-8, -3), ( 2, -2));
test_nd_dr(( 1, 2), ( 0, 1));
test_nd_dr(( 1, -2), ( 0, 1));
test_nd_dr((-1, 2), ( 0, -1));
test_nd_dr((-1, -2), ( 0, -1));
}
#[test]
fn test_div_mod_floor() {
fn test_nd_dm(nd: ($T,$T), dm: ($T,$T)) {
let (n,d) = nd;
let separate_div_mod_floor = (n.div_floor(&d), n.mod_floor(&d));
let combined_div_mod_floor = n.div_mod_floor(&d);
assert_eq!(separate_div_mod_floor, dm);
assert_eq!(combined_div_mod_floor, dm);
test_division_rule(nd, separate_div_mod_floor);
test_division_rule(nd, combined_div_mod_floor);
}
test_nd_dm(( 8, 3), ( 2, 2));
test_nd_dm(( 8, -3), (-3, -1));
test_nd_dm((-8, 3), (-3, 1));
test_nd_dm((-8, -3), ( 2, -2));
test_nd_dm(( 1, 2), ( 0, 1));
test_nd_dm(( 1, -2), (-1, -1));
test_nd_dm((-1, 2), (-1, 1));
test_nd_dm((-1, -2), ( 0, -1));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
assert_eq!((3 as $T).gcd(&-3), 3 as $T);
assert_eq!((-6 as $T).gcd(&3), 3 as $T);
assert_eq!((-4 as $T).gcd(&-2), 2 as $T);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((-1 as $T).lcm(&1), 1 as $T);
assert_eq!((1 as $T).lcm(&-1), 1 as $T);
assert_eq!((-1 as $T).lcm(&-1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
}
#[test]
fn test_even() {
assert_eq!((-4 as $T).is_even(), true);
assert_eq!((-3 as $T).is_even(), false);
assert_eq!((-2 as $T).is_even(), true);
assert_eq!((-1 as $T).is_even(), false);
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((-4 as $T).is_odd(), false);
assert_eq!((-3 as $T).is_odd(), true);
assert_eq!((-2 as $T).is_odd(), false);
assert_eq!((-1 as $T).is_odd(), true);
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_int!(i8, test_integer_i8)
impl_integer_for_int!(i16, test_integer_i16)
impl_integer_for_int!(i32, test_integer_i32)
impl_integer_for_int!(i64, test_integer_i64)
impl_integer_for_int!(int, test_integer_int)<|fim▁hole|> ($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Unsigned integer division. Returns the same result as `div` (`/`).
#[inline]
fn div_floor(&self, other: &$T) -> $T { *self / *other }
/// Unsigned integer modulo operation. Returns the same result as `rem` (`%`).
#[inline]
fn mod_floor(&self, other: &$T) -> $T { *self % *other }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Euclid's algorithm
let mut m = *self;
let mut n = *other;
while m != 0 {
let temp = m;
m = n % temp;
n = temp;
}
n
}
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`
#[inline]
fn lcm(&self, other: &$T) -> $T {
(*self * *other) / self.gcd(other)
}
/// Returns `true` if the number can be divided by `other` without leaving a remainder
#[inline]
fn divides(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`
#[inline]
fn is_even(&self) -> bool { self & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`
#[inline]
fn is_odd(&self) -> bool { !self.is_even() }
}
#[cfg(test)]
mod $test_mod {
use Integer;
#[test]
fn test_div_mod_floor() {
assert_eq!((10 as $T).div_floor(&(3 as $T)), 3 as $T);
assert_eq!((10 as $T).mod_floor(&(3 as $T)), 1 as $T);
assert_eq!((10 as $T).div_mod_floor(&(3 as $T)), (3 as $T, 1 as $T));
assert_eq!((5 as $T).div_floor(&(5 as $T)), 1 as $T);
assert_eq!((5 as $T).mod_floor(&(5 as $T)), 0 as $T);
assert_eq!((5 as $T).div_mod_floor(&(5 as $T)), (1 as $T, 0 as $T));
assert_eq!((3 as $T).div_floor(&(7 as $T)), 0 as $T);
assert_eq!((3 as $T).mod_floor(&(7 as $T)), 3 as $T);
assert_eq!((3 as $T).div_mod_floor(&(7 as $T)), (0 as $T, 3 as $T));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
assert_eq!((99 as $T).lcm(&17), 1683 as $T);
}
#[test]
fn test_divides() {
assert!((6 as $T).divides(&(6 as $T)));
assert!((6 as $T).divides(&(3 as $T)));
assert!((6 as $T).divides(&(1 as $T)));
}
#[test]
fn test_even() {
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_uint!(u8, test_integer_u8)
impl_integer_for_uint!(u16, test_integer_u16)
impl_integer_for_uint!(u32, test_integer_u32)
impl_integer_for_uint!(u64, test_integer_u64)
impl_integer_for_uint!(uint, test_integer_uint)<|fim▁end|> |
macro_rules! impl_integer_for_uint { |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
import itertools
from datetime import timedelta
import ddt
from django.core.exceptions import ValidationError
from django.test import TestCase, override_settings
from django.utils.timezone import now
from mock import patch
from opaque_keys.edx.locator import CourseLocator
from course_modes.helpers import enrollment_mode_display
from course_modes.models import CourseMode, Mode, invalidate_course_mode_cache, get_cosmetic_display_price
from course_modes.tests.factories import CourseModeFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase,
)
@ddt.ddt
class CourseModeModelTest(TestCase):
"""
Tests for the CourseMode model
"""
NOW = 'now'
DATES = {
NOW: now(),
None: None,
}
def setUp(self):
super(CourseModeModelTest, self).setUp()
self.course_key = CourseLocator('Test', 'TestCourse', 'TestCourseRun')
CourseMode.objects.all().delete()
def tearDown(self):
invalidate_course_mode_cache(sender=None)
def create_mode(
self,
mode_slug,
mode_name,
min_price=0,
suggested_prices='',
currency='usd',
expiration_datetime=None,
):
"""
Create a new course mode
"""
return CourseMode.objects.get_or_create(
course_id=self.course_key,
mode_display_name=mode_name,
mode_slug=mode_slug,
min_price=min_price,
suggested_prices=suggested_prices,
currency=currency,
_expiration_datetime=expiration_datetime,
)
def test_save(self):
""" Verify currency is always lowercase. """
cm, __ = self.create_mode('honor', 'honor', 0, '', 'USD')
self.assertEqual(cm.currency, 'usd')
cm.currency = 'GHS'
cm.save()
self.assertEqual(cm.currency, 'ghs')
def test_modes_for_course_empty(self):
"""
If we can't find any modes, we should get back the default mode
"""
# shouldn't be able to find a corresponding course
modes = CourseMode.modes_for_course(self.course_key)
self.assertEqual([CourseMode.get_default_course_mode()], modes)
def test_nodes_for_course_single(self):
"""
Find the modes for a course with only one mode
"""
self.create_mode('verified', 'Verified Certificate', 10)
modes = CourseMode.modes_for_course(self.course_key)
mode = Mode(u'verified', u'Verified Certificate', 10, '', 'usd', None, None, None, None)
self.assertEqual([mode], modes)
modes_dict = CourseMode.modes_for_course_dict(self.course_key)
self.assertEqual(modes_dict['verified'], mode)
self.assertEqual(CourseMode.mode_for_course(self.course_key, 'verified'),
mode)
def test_modes_for_course_multiple(self):
"""
Finding the modes when there's multiple modes
"""
mode1 = Mode(u'honor', u'Honor Code Certificate', 0, '', 'usd', None, None, None, None)
mode2 = Mode(u'verified', u'Verified Certificate', 10, '', 'usd', None, None, None, None)
set_modes = [mode1, mode2]
for mode in set_modes:
self.create_mode(mode.slug, mode.name, mode.min_price, mode.suggested_prices)
modes = CourseMode.modes_for_course(self.course_key)
self.assertEqual(modes, set_modes)
self.assertEqual(mode1, CourseMode.mode_for_course(self.course_key, u'honor'))
self.assertEqual(mode2, CourseMode.mode_for_course(self.course_key, u'verified'))
self.assertIsNone(CourseMode.mode_for_course(self.course_key, 'DNE'))
def test_min_course_price_for_currency(self):
"""
Get the min course price for a course according to currency
"""
# no modes, should get 0
self.assertEqual(0, CourseMode.min_course_price_for_currency(self.course_key, 'usd'))
# create some modes
mode1 = Mode(u'honor', u'Honor Code Certificate', 10, '', 'usd', None, None, None, None)
mode2 = Mode(u'verified', u'Verified Certificate', 20, '', 'usd', None, None, None, None)
mode3 = Mode(u'honor', u'Honor Code Certificate', 80, '', 'cny', None, None, None, None)
set_modes = [mode1, mode2, mode3]
for mode in set_modes:
self.create_mode(mode.slug, mode.name, mode.min_price, mode.suggested_prices, mode.currency)
self.assertEqual(10, CourseMode.min_course_price_for_currency(self.course_key, 'usd'))
self.assertEqual(80, CourseMode.min_course_price_for_currency(self.course_key, 'cny'))
def test_modes_for_course_expired(self):
expired_mode, _status = self.create_mode('verified', 'Verified Certificate', 10)
expired_mode.expiration_datetime = now() + timedelta(days=-1)
expired_mode.save()
modes = CourseMode.modes_for_course(self.course_key)
self.assertEqual([CourseMode.DEFAULT_MODE], modes)
mode1 = Mode(u'honor', u'Honor Code Certificate', 0, '', 'usd', None, None, None, None)
self.create_mode(mode1.slug, mode1.name, mode1.min_price, mode1.suggested_prices)
modes = CourseMode.modes_for_course(self.course_key)
self.assertEqual([mode1], modes)
expiration_datetime = now() + timedelta(days=1)
expired_mode.expiration_datetime = expiration_datetime
expired_mode.save()
expired_mode_value = Mode(
u'verified',
u'Verified Certificate',
10,
'',
'usd',
expiration_datetime,
None,
None,
None
)
modes = CourseMode.modes_for_course(self.course_key)
self.assertEqual([expired_mode_value, mode1], modes)
<|fim▁hole|> self.assertEqual([CourseMode.DEFAULT_MODE], modes)
def test_verified_mode_for_course(self):
self.create_mode('verified', 'Verified Certificate', 10)
mode = CourseMode.verified_mode_for_course(self.course_key)
self.assertEqual(mode.slug, 'verified')
# verify that the professional mode is preferred
self.create_mode('professional', 'Professional Education Verified Certificate', 10)
mode = CourseMode.verified_mode_for_course(self.course_key)
self.assertEqual(mode.slug, 'professional')
def test_course_has_payment_options(self):
# Has no payment options.
honor, _ = self.create_mode('honor', 'Honor')
self.assertFalse(CourseMode.has_payment_options(self.course_key))
# Now we do have a payment option.
verified, _ = self.create_mode('verified', 'Verified', min_price=5)
self.assertTrue(CourseMode.has_payment_options(self.course_key))
# Remove the verified option.
verified.delete()
self.assertFalse(CourseMode.has_payment_options(self.course_key))
# Finally, give the honor mode payment options
honor.suggested_prices = '5, 10, 15'
honor.save()
self.assertTrue(CourseMode.has_payment_options(self.course_key))
def test_course_has_payment_options_with_no_id_professional(self):
# Has payment options.
self.create_mode('no-id-professional', 'no-id-professional', min_price=5)
self.assertTrue(CourseMode.has_payment_options(self.course_key))
@ddt.data(
([], True),
([("honor", 0), ("audit", 0), ("verified", 100)], True),
([("honor", 100)], False),
([("professional", 100)], False),
([("no-id-professional", 100)], False),
)
@ddt.unpack
def test_can_auto_enroll(self, modes_and_prices, can_auto_enroll):
# Create the modes and min prices
for mode_slug, min_price in modes_and_prices:
self.create_mode(mode_slug, mode_slug.capitalize(), min_price=min_price)
# Verify that we can or cannot auto enroll
self.assertEqual(CourseMode.can_auto_enroll(self.course_key), can_auto_enroll)
@ddt.data(
([], None),
(["honor", "audit", "verified"], "honor"),
(["honor", "audit"], "honor"),
(["audit", "verified"], "audit"),
(["professional"], None),
(["no-id-professional"], None),
(["credit", "audit", "verified"], "audit"),
(["credit"], None),
)
@ddt.unpack
def test_auto_enroll_mode(self, modes, result):
# Verify that the proper auto enroll mode is returned
self.assertEqual(CourseMode.auto_enroll_mode(self.course_key, modes), result)
def test_all_modes_for_courses(self):
now_dt = now()
future = now_dt + timedelta(days=1)
past = now_dt - timedelta(days=1)
# Unexpired, no expiration date
CourseModeFactory.create(
course_id=self.course_key,
mode_display_name="Honor No Expiration",
mode_slug="honor_no_expiration",
expiration_datetime=None
)
# Unexpired, expiration date in future
CourseModeFactory.create(
course_id=self.course_key,
mode_display_name="Honor Not Expired",
mode_slug="honor_not_expired",
expiration_datetime=future
)
# Expired
CourseModeFactory.create(
course_id=self.course_key,
mode_display_name="Verified Expired",
mode_slug="verified_expired",
expiration_datetime=past
)
# We should get all of these back when querying for *all* course modes,
# including ones that have expired.
other_course_key = CourseLocator(org="not", course="a", run="course")
all_modes = CourseMode.all_modes_for_courses([self.course_key, other_course_key])
self.assertEqual(len(all_modes[self.course_key]), 3)
self.assertEqual(all_modes[self.course_key][0].name, "Honor No Expiration")
self.assertEqual(all_modes[self.course_key][1].name, "Honor Not Expired")
self.assertEqual(all_modes[self.course_key][2].name, "Verified Expired")
# Check that we get a default mode for when no course mode is available
self.assertEqual(len(all_modes[other_course_key]), 1)
self.assertEqual(all_modes[other_course_key][0], CourseMode.get_default_course_mode())
@ddt.data('', 'no-id-professional', 'professional', 'verified')
def test_course_has_professional_mode(self, mode):
# check the professional mode.
self.create_mode(mode, 'course mode', 10)
modes_dict = CourseMode.modes_for_course_dict(self.course_key)
if mode in ['professional', 'no-id-professional']:
self.assertTrue(CourseMode.has_professional_mode(modes_dict))
else:
self.assertFalse(CourseMode.has_professional_mode(modes_dict))
@ddt.data('no-id-professional', 'professional', 'verified')
def test_course_is_professional_mode(self, mode):
# check that tuple has professional mode
course_mode, __ = self.create_mode(mode, 'course mode', 10)
if mode in ['professional', 'no-id-professional']:
self.assertTrue(CourseMode.is_professional_mode(course_mode.to_tuple()))
else:
self.assertFalse(CourseMode.is_professional_mode(course_mode.to_tuple()))
def test_course_is_professional_mode_with_invalid_tuple(self):
# check that tuple has professional mode with None
self.assertFalse(CourseMode.is_professional_mode(None))
@ddt.data(
('no-id-professional', False),
('professional', True),
('verified', True),
('honor', False),
('audit', False)
)
@ddt.unpack
def test_is_verified_slug(self, mode_slug, is_verified):
# check that mode slug is verified or not
if is_verified:
self.assertTrue(CourseMode.is_verified_slug(mode_slug))
else:
self.assertFalse(CourseMode.is_verified_slug(mode_slug))
@ddt.data(*itertools.product(
(
CourseMode.HONOR,
CourseMode.AUDIT,
CourseMode.VERIFIED,
CourseMode.PROFESSIONAL,
CourseMode.NO_ID_PROFESSIONAL_MODE
),
(NOW, None),
))
@ddt.unpack
def test_invalid_mode_expiration(self, mode_slug, exp_dt_name):
exp_dt = self.DATES[exp_dt_name]
is_error_expected = CourseMode.is_professional_slug(mode_slug) and exp_dt is not None
try:
self.create_mode(mode_slug=mode_slug, mode_name=mode_slug.title(), expiration_datetime=exp_dt, min_price=10)
self.assertFalse(is_error_expected, "Expected a ValidationError to be thrown.")
except ValidationError as exc:
self.assertTrue(is_error_expected, "Did not expect a ValidationError to be thrown.")
self.assertEqual(
exc.messages,
[u"Professional education modes are not allowed to have expiration_datetime set."],
)
@ddt.data(
("verified", "verify_need_to_verify"),
("verified", "verify_submitted"),
("verified", "verify_approved"),
("verified", 'dummy'),
("verified", None),
('honor', None),
('honor', 'dummy'),
('audit', None),
('professional', None),
('no-id-professional', None),
('no-id-professional', 'dummy')
)
@ddt.unpack
def test_enrollment_mode_display(self, mode, verification_status):
if mode == "verified":
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(verification_status)
)
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(verification_status)
)
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(verification_status)
)
elif mode == "honor":
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(mode)
)
elif mode == "audit":
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(mode)
)
elif mode == "professional":
self.assertEqual(
enrollment_mode_display(mode, verification_status, self.course_key),
self._enrollment_display_modes_dicts(mode)
)
@ddt.data(
(['honor', 'verified', 'credit'], ['honor', 'verified']),
(['professional', 'credit'], ['professional']),
)
@ddt.unpack
def test_hide_credit_modes(self, available_modes, expected_selectable_modes):
# Create the course modes
for mode in available_modes:
CourseModeFactory.create(
course_id=self.course_key,
mode_display_name=mode,
mode_slug=mode,
)
# Check the selectable modes, which should exclude credit
selectable_modes = CourseMode.modes_for_course_dict(self.course_key)
self.assertItemsEqual(selectable_modes.keys(), expected_selectable_modes)
# When we get all unexpired modes, we should see credit as well
all_modes = CourseMode.modes_for_course_dict(self.course_key, only_selectable=False)
self.assertItemsEqual(all_modes.keys(), available_modes)
def _enrollment_display_modes_dicts(self, dict_type):
"""
Helper function to generate the enrollment display mode dict.
"""
dict_keys = ['enrollment_title', 'enrollment_value', 'show_image', 'image_alt', 'display_mode']
display_values = {
"verify_need_to_verify": ["Your verification is pending", "Verified: Pending Verification", True,
'ID verification pending', 'verified'],
"verify_approved": ["You're enrolled as a verified student", "Verified", True, 'ID Verified Ribbon/Badge',
'verified'],
"verify_none": ["", "", False, '', 'audit'],
"honor": ["You're enrolled as an honor code student", "Honor Code", False, '', 'honor'],
"audit": ["", "", False, '', 'audit'],
"professional": ["You're enrolled as a professional education student", "Professional Ed", False, '',
'professional']
}
if dict_type in ['verify_need_to_verify', 'verify_submitted']:
return dict(zip(dict_keys, display_values.get('verify_need_to_verify')))
elif dict_type is None or dict_type == 'dummy':
return dict(zip(dict_keys, display_values.get('verify_none')))
else:
return dict(zip(dict_keys, display_values.get(dict_type)))
def test_expiration_datetime_explicitly_set(self):
""" Verify that setting the expiration_date property sets the explicit flag. """
verified_mode, __ = self.create_mode('verified', 'Verified Certificate', 10)
now_dt = now()
verified_mode.expiration_datetime = now_dt
self.assertTrue(verified_mode.expiration_datetime_is_explicit)
self.assertEqual(verified_mode.expiration_datetime, now_dt)
def test_expiration_datetime_not_explicitly_set(self):
""" Verify that setting the _expiration_date property does not set the explicit flag. """
verified_mode, __ = self.create_mode('verified', 'Verified Certificate', 10)
now_dt = now()
verified_mode._expiration_datetime = now_dt # pylint: disable=protected-access
self.assertFalse(verified_mode.expiration_datetime_is_explicit)
self.assertEqual(verified_mode.expiration_datetime, now_dt)
def test_expiration_datetime_explicitly_set_to_none(self):
""" Verify that setting the _expiration_date property does not set the explicit flag. """
verified_mode, __ = self.create_mode('verified', 'Verified Certificate', 10)
self.assertFalse(verified_mode.expiration_datetime_is_explicit)
verified_mode.expiration_datetime = None
self.assertFalse(verified_mode.expiration_datetime_is_explicit)
self.assertIsNone(verified_mode.expiration_datetime)
@ddt.data(
(CourseMode.AUDIT, False),
(CourseMode.HONOR, False),
(CourseMode.VERIFIED, True),
(CourseMode.CREDIT_MODE, True),
(CourseMode.PROFESSIONAL, True),
(CourseMode.NO_ID_PROFESSIONAL_MODE, True),
)
@ddt.unpack
def test_eligible_for_cert(self, mode_slug, expected_eligibility):
"""Verify that non-audit modes are eligible for a cert."""
self.assertEqual(CourseMode.is_eligible_for_certificate(mode_slug), expected_eligibility)
@ddt.data(
(CourseMode.AUDIT, False),
(CourseMode.HONOR, False),
(CourseMode.VERIFIED, True),
(CourseMode.CREDIT_MODE, False),
(CourseMode.PROFESSIONAL, True),
(CourseMode.NO_ID_PROFESSIONAL_MODE, False),
)
@ddt.unpack
def test_verified_min_price(self, mode_slug, is_error_expected):
"""Verify that verified modes have a price."""
try:
self.create_mode(mode_slug=mode_slug, mode_name=mode_slug.title(), min_price=0)
except ValidationError:
self.assertTrue(is_error_expected, "Did not expect a ValidationError to be thrown.")
else:
self.assertFalse(is_error_expected, "Expected a ValidationError to be thrown.")
class TestDisplayPrices(ModuleStoreTestCase):
@override_settings(PAID_COURSE_REGISTRATION_CURRENCY=["USD", "$"])
def test_get_cosmetic_display_price(self):
"""
Check that get_cosmetic_display_price() returns the correct price given its inputs.
"""
course = CourseFactory.create()
registration_price = 99
course.cosmetic_display_price = 10
with patch('course_modes.models.CourseMode.min_course_price_for_currency', return_value=registration_price):
# Since registration_price is set, it overrides the cosmetic_display_price and should be returned
self.assertEqual(get_cosmetic_display_price(course), "$99")
registration_price = 0
with patch('course_modes.models.CourseMode.min_course_price_for_currency', return_value=registration_price):
# Since registration_price is not set, cosmetic_display_price should be returned
self.assertEqual(get_cosmetic_display_price(course), "$10")
course.cosmetic_display_price = 0
# Since both prices are not set, there is no price, thus "Free"
self.assertEqual(get_cosmetic_display_price(course), "Free")<|fim▁end|> | modes = CourseMode.modes_for_course(CourseLocator('TestOrg', 'TestCourse', 'TestRun')) |
<|file_name|>targeting_spec.js<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import { targeting as targetingInstance, filters, getHighestCpmBidsFromBidPool, sortByDealAndPriceBucketOrCpm } from 'src/targeting.js';
import { config } from 'src/config.js';
import { createBidReceived } from 'test/fixtures/fixtures.js';
import CONSTANTS from 'src/constants.json';
import { auctionManager } from 'src/auctionManager.js';
import * as utils from 'src/utils.js';
import {deepClone} from 'src/utils.js';
const bid1 = {
'bidderCode': 'rubicon',
'width': '300',
'height': '250',
'statusMessage': 'Bid available',
'adId': '148018fe5e',
'cpm': 0.537234,
'ad': 'markup',
'ad_id': '3163950',
'sizeId': '15',
'requestTimestamp': 1454535718610,
'responseTimestamp': 1454535724863,
'timeToRespond': 123,
'pbLg': '0.50',
'pbMg': '0.50',
'pbHg': '0.53',
'adUnitCode': '/123456/header-bid-tag-0',
'bidder': 'rubicon',
'size': '300x250',
'adserverTargeting': {
'foobar': '300x250',
[CONSTANTS.TARGETING_KEYS.BIDDER]: 'rubicon',
[CONSTANTS.TARGETING_KEYS.AD_ID]: '148018fe5e',
[CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]: '0.53',
[CONSTANTS.TARGETING_KEYS.DEAL]: '1234'
},
'dealId': '1234',
'netRevenue': true,
'currency': 'USD',
'ttl': 300
};
const bid2 = {
'bidderCode': 'rubicon',
'width': '300',
'height': '250',
'statusMessage': 'Bid available',
'adId': '5454545',
'cpm': 0.25,
'ad': 'markup',
'ad_id': '3163950',
'sizeId': '15',
'requestTimestamp': 1454535718610,
'responseTimestamp': 1454535724863,
'timeToRespond': 123,
'pbLg': '0.25',
'pbMg': '0.25',
'pbHg': '0.25',
'adUnitCode': '/123456/header-bid-tag-0',
'bidder': 'rubicon',
'size': '300x250',
'adserverTargeting': {
'foobar': '300x250',
[CONSTANTS.TARGETING_KEYS.BIDDER]: 'rubicon',
[CONSTANTS.TARGETING_KEYS.AD_ID]: '5454545',
[CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]: '0.25'
},
'netRevenue': true,
'currency': 'USD',
'ttl': 300
};
const bid3 = {
'bidderCode': 'rubicon',
'width': '300',
'height': '600',
'statusMessage': 'Bid available',
'adId': '48747745',
'cpm': 0.75,
'ad': 'markup',
'ad_id': '3163950',
'sizeId': '15',
'requestTimestamp': 1454535718610,
'responseTimestamp': 1454535724863,
'timeToRespond': 123,
'pbLg': '0.75',
'pbMg': '0.75',
'pbHg': '0.75',
'adUnitCode': '/123456/header-bid-tag-1',
'bidder': 'rubicon',
'size': '300x600',
'adserverTargeting': {
'foobar': '300x600',
[CONSTANTS.TARGETING_KEYS.BIDDER]: 'rubicon',
[CONSTANTS.TARGETING_KEYS.AD_ID]: '48747745',
[CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]: '0.75'
},
'netRevenue': true,
'currency': 'USD',
'ttl': 300
};
const nativeBid1 = {
'bidderCode': 'appnexus',
'width': 0,
'height': 0,
'statusMessage': 'Bid available',
'adId': '591e7c9354b633',
'requestId': '24aae81e32d6f6',
'mediaType': 'native',
'source': 'client',
'cpm': 10,
'creativeId': 97494403,
'currency': 'USD',
'netRevenue': true,
'ttl': 300,
'adUnitCode': '/19968336/prebid_native_example_1',
'appnexus': {
'buyerMemberId': 9325
},
'meta': {
'advertiserId': 2529885
},
'native': {
'title': 'This is a Prebid Native Creative',
'body': 'This is a Prebid Native Creative. There are many like it, but this one is mine.',
'sponsoredBy': 'Prebid.org',
'clickUrl': 'http://prebid.org/dev-docs/show-native-ads.html',
'clickTrackers': ['http://www.clickUrl.com/404'],
'impressionTrackers': ['http://imp.trackerUrl.com/it1'],
'javascriptTrackers': '<script>//js script here</script>',<|fim▁hole|> 'width': 3000
},
'icon': {
'url': 'http://vcdn.adnxs.com/p/creative-image/bd/59/a6/c6/bd59a6c6-0851-411d-a16d-031475a51312.png',
'height': 83,
'width': 127
}
},
'auctionId': '72138a4a-b747-4192-9192-dcc41d675de8',
'responseTimestamp': 1565785219461,
'requestTimestamp': 1565785219405,
'bidder': 'appnexus',
'timeToRespond': 56,
'pbLg': '5.00',
'pbMg': '10.00',
'pbHg': '10.00',
'pbAg': '10.00',
'pbDg': '10.00',
'pbCg': '',
'size': '0x0',
'adserverTargeting': {
[CONSTANTS.TARGETING_KEYS.BIDDER]: 'appnexus',
[CONSTANTS.TARGETING_KEYS.AD_ID]: '591e7c9354b633',
[CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]: '10.00',
[CONSTANTS.TARGETING_KEYS.SIZE]: '0x0',
[CONSTANTS.TARGETING_KEYS.SOURCE]: 'client',
[CONSTANTS.TARGETING_KEYS.FORMAT]: 'native',
[CONSTANTS.NATIVE_KEYS.title]: 'This is a Prebid Native Creative',
[CONSTANTS.NATIVE_KEYS.body]: 'This is a Prebid Native Creative. There are many like it, but this one is mine.',
[CONSTANTS.NATIVE_KEYS.sponsoredBy]: 'Prebid.org',
[CONSTANTS.NATIVE_KEYS.clickUrl]: 'http://prebid.org/dev-docs/show-native-ads.html',
[CONSTANTS.NATIVE_KEYS.image]: 'http://vcdn.adnxs.com/p/creative-image/94/22/cd/0f/9422cd0f-f400-45d3-80f5-2b92629d9257.jpg',
[CONSTANTS.NATIVE_KEYS.icon]: 'http://vcdn.adnxs.com/p/creative-image/bd/59/a6/c6/bd59a6c6-0851-411d-a16d-031475a51312.png'
}
};
const nativeBid2 = {
'bidderCode': 'dgads',
'width': 0,
'height': 0,
'statusMessage': 'Bid available',
'adId': '6e0aba55ed54e5',
'requestId': '4de26ec83d9661',
'mediaType': 'native',
'source': 'client',
'cpm': 1.90909091,
'creativeId': 'xuidx6c901261b0x2b2',
'currency': 'JPY',
'netRevenue': true,
'ttl': 60,
'referrer': 'http://test.localhost:9999/integrationExamples/gpt/demo_native.html?pbjs_debug=true',
'native': {
'image': {
'url': 'https://ads-tr.bigmining.com/img/300x250.png',
'width': 300,
'height': 250
},
'title': 'Test Title',
'body': 'Test Description',
'sponsoredBy': 'test.com',
'clickUrl': 'http://prebid.org/',
'clickTrackers': ['https://www.clickUrl.com/404'],
'impressionTrackers': [
'http://imp.trackerUrl.com/it2'
]
},
'auctionId': '72138a4a-b747-4192-9192-dcc41d675de8',
'responseTimestamp': 1565785219607,
'requestTimestamp': 1565785219409,
'bidder': 'dgads',
'adUnitCode': '/19968336/prebid_native_example_1',
'timeToRespond': 198,
'pbLg': '1.50',
'pbMg': '1.90',
'pbHg': '1.90',
'pbAg': '1.90',
'pbDg': '1.90',
'pbCg': '',
'size': '0x0',
'adserverTargeting': {
[CONSTANTS.TARGETING_KEYS.BIDDER]: 'dgads',
[CONSTANTS.TARGETING_KEYS.AD_ID]: '6e0aba55ed54e5',
[CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]: '1.90',
[CONSTANTS.TARGETING_KEYS.SIZE]: '0x0',
[CONSTANTS.TARGETING_KEYS.SOURCE]: 'client',
[CONSTANTS.TARGETING_KEYS.FORMAT]: 'native',
[CONSTANTS.NATIVE_KEYS.image]: 'https://ads-tr.bigmining.com/img/300x250.png',
[CONSTANTS.NATIVE_KEYS.title]: 'Test Title',
[CONSTANTS.NATIVE_KEYS.body]: 'Test Description',
[CONSTANTS.NATIVE_KEYS.sponsoredBy]: 'test.com',
[CONSTANTS.NATIVE_KEYS.clickUrl]: 'http://prebid.org/'
}
};
describe('targeting tests', function () {
let sandbox;
let enableSendAllBids = false;
let useBidCache;
beforeEach(function() {
sandbox = sinon.sandbox.create();
useBidCache = true;
let origGetConfig = config.getConfig;
sandbox.stub(config, 'getConfig').callsFake(function (key) {
if (key === 'enableSendAllBids') {
return enableSendAllBids;
}
if (key === 'useBidCache') {
return useBidCache;
}
return origGetConfig.apply(config, arguments);
});
});
afterEach(function () {
sandbox.restore();
});
describe('getAllTargeting', function () {
let amBidsReceivedStub;
let amGetAdUnitsStub;
let bidExpiryStub;
let logWarnStub;
let logErrorStub;
let bidsReceived;
beforeEach(function () {
bidsReceived = [bid1, bid2, bid3];
amBidsReceivedStub = sandbox.stub(auctionManager, 'getBidsReceived').callsFake(function() {
return bidsReceived;
});
amGetAdUnitsStub = sandbox.stub(auctionManager, 'getAdUnitCodes').callsFake(function() {
return ['/123456/header-bid-tag-0'];
});
bidExpiryStub = sandbox.stub(filters, 'isBidNotExpired').returns(true);
logWarnStub = sinon.stub(utils, 'logWarn');
logErrorStub = sinon.stub(utils, 'logError');
});
afterEach(function() {
config.resetConfig();
logWarnStub.restore();
logErrorStub.restore();
amBidsReceivedStub.restore();
amGetAdUnitsStub.restore();
bidExpiryStub.restore();
});
describe('when handling different adunit targeting value types', function () {
const adUnitCode = '/123456/header-bid-tag-0';
const adServerTargeting = {};
let getAdUnitsStub;
before(function() {
getAdUnitsStub = sandbox.stub(auctionManager, 'getAdUnits').callsFake(function() {
return [
{
'code': adUnitCode,
[CONSTANTS.JSON_MAPPING.ADSERVER_TARGETING]: adServerTargeting
}
];
});
});
after(function() {
getAdUnitsStub.restore();
});
afterEach(function() {
delete adServerTargeting.test_type;
});
const pairs = [
['string', '2.3', '2.3'],
['number', 2.3, '2.3'],
['boolean', true, 'true'],
['string-separated', '2.3, 4.5', '2.3,4.5'],
['array-of-string', ['2.3', '4.5'], '2.3,4.5'],
['array-of-number', [2.3, 4.5], '2.3,4.5'],
['array-of-boolean', [true, false], 'true,false']
];
pairs.forEach(([type, value, result]) => {
it(`accepts ${type}`, function() {
adServerTargeting.test_type = value;
const targeting = targetingInstance.getAllTargeting([adUnitCode]);
expect(targeting[adUnitCode].test_type).is.equal(result);
});
});
});
describe('when hb_deal is present in bid.adserverTargeting', function () {
let bid4;
beforeEach(function() {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting['hb_bidder'] = bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 0;
enableSendAllBids = true;
bidsReceived.push(bid4);
});
it('returns targeting with both hb_deal and hb_deal_{bidder_code}', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// We should add both keys rather than one or the other
expect(targeting['/123456/header-bid-tag-0']).to.contain.keys('hb_deal', `hb_deal_${bid1.bidderCode}`, `hb_deal_${bid4.bidderCode}`);
// We should assign both keys the same value
expect(targeting['/123456/header-bid-tag-0']['hb_deal']).to.deep.equal(targeting['/123456/header-bid-tag-0'][`hb_deal_${bid1.bidderCode}`]);
});
});
it('will enforce a limit on the number of auction keys when auctionKeyMaxChars setting is active', function () {
config.setConfig({
targetingControls: {
auctionKeyMaxChars: 150
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0', '/123456/header-bid-tag-1']);
expect(targeting['/123456/header-bid-tag-1']).to.deep.equal({});
expect(targeting['/123456/header-bid-tag-0']).to.contain.keys('hb_pb', 'hb_adid', 'hb_bidder', 'hb_deal');
expect(targeting['/123456/header-bid-tag-0']['hb_adid']).to.equal(bid1.adId);
expect(logWarnStub.calledOnce).to.be.true;
});
it('will return an error when auctionKeyMaxChars setting is set too low for any auction keys to be allowed', function () {
config.setConfig({
targetingControls: {
auctionKeyMaxChars: 50
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0', '/123456/header-bid-tag-1']);
expect(targeting['/123456/header-bid-tag-1']).to.deep.equal({});
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({});
expect(logWarnStub.calledTwice).to.be.true;
expect(logErrorStub.calledOnce).to.be.true;
});
describe('when bidLimit is present in setConfig', function () {
let bid4;
beforeEach(function() {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting['hb_bidder'] = bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 2.25;
bid4.adId = '8383838';
enableSendAllBids = true;
bidsReceived.push(bid4);
});
it('when sendBidsControl.bidLimit is set greater than 0 in getHighestCpmBidsFromBidPool', function () {
config.setConfig({
sendBidsControl: {
bidLimit: 2,
dealPrioritization: true
}
});
const bids = getHighestCpmBidsFromBidPool(bidsReceived, utils.getHighestCpm, 2);
expect(bids.length).to.equal(3);
expect(bids[0].adId).to.equal('8383838');
expect(bids[1].adId).to.equal('148018fe5e');
expect(bids[2].adId).to.equal('48747745');
});
it('when sendBidsControl.bidLimit is set greater than 0 and deal priortization is false in getHighestCpmBidsFromBidPool', function () {
config.setConfig({
sendBidsControl: {
bidLimit: 2,
dealPrioritization: false
}
});
const bids = getHighestCpmBidsFromBidPool(bidsReceived, utils.getHighestCpm, 2);
expect(bids.length).to.equal(3);
expect(bids[0].adId).to.equal('8383838');
expect(bids[1].adId).to.equal('148018fe5e');
expect(bids[2].adId).to.equal('48747745');
});
it('selects the top n number of bids when enableSendAllBids is true and and bitLimit is set', function () {
config.setConfig({
sendBidsControl: {
bidLimit: 1
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
let limitedBids = Object.keys(targeting['/123456/header-bid-tag-0']).filter(key => key.indexOf(CONSTANTS.TARGETING_KEYS.PRICE_BUCKET + '_') != -1)
expect(limitedBids.length).to.equal(1);
});
it('sends all bids when enableSendAllBids is true and and bitLimit is above total number of bids received', function () {
config.setConfig({
sendBidsControl: {
bidLimit: 50
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
let limitedBids = Object.keys(targeting['/123456/header-bid-tag-0']).filter(key => key.indexOf(CONSTANTS.TARGETING_KEYS.PRICE_BUCKET + '_') != -1)
expect(limitedBids.length).to.equal(2);
});
it('Sends all bids when enableSendAllBids is true and and bidLimit is set to 0', function () {
config.setConfig({
sendBidsControl: {
bidLimit: 0
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
let limitedBids = Object.keys(targeting['/123456/header-bid-tag-0']).filter(key => key.indexOf(CONSTANTS.TARGETING_KEYS.PRICE_BUCKET + '_') != -1)
expect(limitedBids.length).to.equal(2);
});
});
describe('targetingControls.allowZeroCpmBids', function () {
let bid4;
let bidderSettingsStorage;
before(function() {
bidderSettingsStorage = $$PREBID_GLOBAL$$.bidderSettings;
});
beforeEach(function () {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting = {
hb_pb: '0.0',
hb_adid: '567891011',
hb_bidder: 'appnexus',
};
bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 0;
bidsReceived = [bid4];
});
after(function() {
bidsReceived = [bid1, bid2, bid3];
$$PREBID_GLOBAL$$.bidderSettings = bidderSettingsStorage;
})
it('targeting should not include a 0 cpm by default', function() {
bid4.adserverTargeting = {};
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({});
});
it('targeting should allow a 0 cpm with targetingControls.allowZeroCpmBids set to true', function () {
$$PREBID_GLOBAL$$.bidderSettings = {
standard: {
allowZeroCpmBids: true
}
};
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_pb', 'hb_bidder', 'hb_adid', 'hb_bidder_appnexus', 'hb_adid_appnexus', 'hb_pb_appnexus');
expect(targeting['/123456/header-bid-tag-0']['hb_pb']).to.equal('0.0')
});
});
describe('targetingControls.allowTargetingKeys', function () {
let bid4;
beforeEach(function() {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting = {
hb_deal: '4321',
hb_pb: '0.1',
hb_adid: '567891011',
hb_bidder: 'appnexus',
};
bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 0.1; // losing bid so not included if enableSendAllBids === false
bid4.dealId = '4321';
enableSendAllBids = true;
config.setConfig({
targetingControls: {
allowTargetingKeys: ['BIDDER', 'AD_ID', 'PRICE_BUCKET']
}
});
bidsReceived.push(bid4);
});
it('targeting should include custom keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('foobar');
});
it('targeting should include keys prefixed by allowed default targeting keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_bidder_rubicon', 'hb_adid_rubicon', 'hb_pb_rubicon');
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_bidder_appnexus', 'hb_adid_appnexus', 'hb_pb_appnexus');
});
it('targeting should not include keys prefixed by disallowed default targeting keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.not.have.all.keys(['hb_deal_appnexus', 'hb_deal_rubicon']);
});
});
describe('targetingControls.addTargetingKeys', function () {
let winningBid = null;
beforeEach(function () {
bidsReceived = [bid1, bid2, nativeBid1, nativeBid2].map(deepClone);
bidsReceived.forEach((bid) => {
bid.adserverTargeting[CONSTANTS.TARGETING_KEYS.SOURCE] = 'test-source';
bid.adUnitCode = 'adunit';
if (winningBid == null || bid.cpm > winningBid.cpm) {
winningBid = bid;
}
});
enableSendAllBids = true;
});
const expandKey = function (key) {
const keys = new Set();
if (winningBid.adserverTargeting[key] != null) {
keys.add(key);
}
bidsReceived
.filter((bid) => bid.adserverTargeting[key] != null)
.map((bid) => bid.bidderCode)
.forEach((code) => keys.add(`${key}_${code}`.substr(0, 20)));
return new Array(...keys);
}
const targetingResult = function () {
return targetingInstance.getAllTargeting(['adunit'])['adunit'];
}
it('should include added keys', function () {
config.setConfig({
targetingControls: {
addTargetingKeys: ['SOURCE']
}
});
expect(targetingResult()).to.include.all.keys(...expandKey(CONSTANTS.TARGETING_KEYS.SOURCE));
});
it('should keep default and native keys', function() {
config.setConfig({
targetingControls: {
addTargetingKeys: ['SOURCE']
}
});
const defaultKeys = new Set(Object.values(CONSTANTS.DEFAULT_TARGETING_KEYS));
Object.values(CONSTANTS.NATIVE_KEYS).forEach((k) => defaultKeys.add(k));
const expectedKeys = new Set();
bidsReceived
.map((bid) => Object.keys(bid.adserverTargeting))
.reduce((left, right) => left.concat(right), [])
.filter((key) => defaultKeys.has(key))
.map(expandKey)
.reduce((left, right) => left.concat(right), [])
.forEach((k) => expectedKeys.add(k));
expect(targetingResult()).to.include.all.keys(...expectedKeys);
});
it('should not be allowed together with allowTargetingKeys', function () {
config.setConfig({
targetingControls: {
allowTargetingKeys: [CONSTANTS.TARGETING_KEYS.BIDDER],
addTargetingKeys: [CONSTANTS.TARGETING_KEYS.SOURCE]
}
});
expect(targetingResult).to.throw();
});
});
describe('targetingControls.allowSendAllBidsTargetingKeys', function () {
let bid4;
beforeEach(function() {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting = {
hb_deal: '4321',
hb_pb: '0.1',
hb_adid: '567891011',
hb_bidder: 'appnexus',
};
bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 0.1; // losing bid so not included if enableSendAllBids === false
bid4.dealId = '4321';
enableSendAllBids = true;
config.setConfig({
targetingControls: {
allowTargetingKeys: ['BIDDER', 'AD_ID', 'PRICE_BUCKET'],
allowSendAllBidsTargetingKeys: ['PRICE_BUCKET', 'AD_ID']
}
});
bidsReceived.push(bid4);
});
it('targeting should include custom keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('foobar');
});
it('targeting should only include keys prefixed by allowed default send all bids targeting keys and standard keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_bidder', 'hb_adid', 'hb_pb');
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_adid_rubicon', 'hb_pb_rubicon');
expect(targeting['/123456/header-bid-tag-0']).to.include.all.keys('hb_bidder', 'hb_adid', 'hb_pb', 'hb_adid_appnexus', 'hb_pb_appnexus');
});
it('targeting should not include keys prefixed by disallowed default targeting keys and disallowed send all bid targeting keys', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.not.have.all.keys(['hb_deal', 'hb_bidder_rubicon', 'hb_bidder_appnexus', 'hb_deal_appnexus', 'hb_deal_rubicon']);
});
});
describe('targetingControls.alwaysIncludeDeals', function () {
let bid4;
beforeEach(function() {
bid4 = utils.deepClone(bid1);
bid4.adserverTargeting = {
hb_deal: '4321',
hb_pb: '0.1',
hb_adid: '567891011',
hb_bidder: 'appnexus',
};
bid4.bidder = bid4.bidderCode = 'appnexus';
bid4.cpm = 0.1; // losing bid so not included if enableSendAllBids === false
bid4.dealId = '4321';
enableSendAllBids = false;
bidsReceived.push(bid4);
});
it('does not include losing deals when alwaysIncludeDeals not set', function () {
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// Rubicon wins bid and has deal, but alwaysIncludeDeals is false, so only top bid plus deal_id
// appnexus does not get sent since alwaysIncludeDeals is not defined
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({
'hb_deal_rubicon': '1234',
'hb_deal': '1234',
'hb_pb': '0.53',
'hb_adid': '148018fe5e',
'hb_bidder': 'rubicon',
'foobar': '300x250'
});
});
it('does not include losing deals when alwaysIncludeDeals set to false', function () {
config.setConfig({
targetingControls: {
alwaysIncludeDeals: false
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// Rubicon wins bid and has deal, but alwaysIncludeDeals is false, so only top bid plus deal_id
// appnexus does not get sent since alwaysIncludeDeals is false
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({
'hb_deal_rubicon': '1234', // This is just how it works before this PR, always added no matter what for winner if they have deal
'hb_deal': '1234',
'hb_pb': '0.53',
'hb_adid': '148018fe5e',
'hb_bidder': 'rubicon',
'foobar': '300x250'
});
});
it('includes losing deals when alwaysIncludeDeals set to true and also winning deals bidder KVPs', function () {
config.setConfig({
targetingControls: {
alwaysIncludeDeals: true
}
});
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// Rubicon wins bid and has a deal, so all KVPs for them are passed (top plus bidder specific)
// Appnexus had deal so passed through
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({
'hb_deal_rubicon': '1234',
'hb_deal': '1234',
'hb_pb': '0.53',
'hb_adid': '148018fe5e',
'hb_bidder': 'rubicon',
'foobar': '300x250',
'hb_pb_rubicon': '0.53',
'hb_adid_rubicon': '148018fe5e',
'hb_bidder_rubicon': 'rubicon',
'hb_deal_appnexus': '4321',
'hb_pb_appnexus': '0.1',
'hb_adid_appnexus': '567891011',
'hb_bidder_appnexus': 'appnexus'
});
});
it('includes winning bid even when it is not a deal, plus other deal KVPs', function () {
config.setConfig({
targetingControls: {
alwaysIncludeDeals: true
}
});
let bid5 = utils.deepClone(bid4);
bid5.adserverTargeting = {
hb_pb: '3.0',
hb_adid: '111111',
hb_bidder: 'pubmatic',
};
bid5.bidder = bid5.bidderCode = 'pubmatic';
bid5.cpm = 3.0; // winning bid!
delete bid5.dealId; // no deal with winner
bidsReceived.push(bid5);
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// Pubmatic wins but no deal. So only top bid KVPs for them is sent
// Rubicon has a dealId so passed through
// Appnexus has a dealId so passed through
expect(targeting['/123456/header-bid-tag-0']).to.deep.equal({
'hb_bidder': 'pubmatic',
'hb_adid': '111111',
'hb_pb': '3.0',
'foobar': '300x250',
'hb_deal_rubicon': '1234',
'hb_pb_rubicon': '0.53',
'hb_adid_rubicon': '148018fe5e',
'hb_bidder_rubicon': 'rubicon',
'hb_deal_appnexus': '4321',
'hb_pb_appnexus': '0.1',
'hb_adid_appnexus': '567891011',
'hb_bidder_appnexus': 'appnexus'
});
});
});
it('selects the top bid when enableSendAllBids true', function () {
enableSendAllBids = true;
let targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// we should only get the targeting data for the one requested adunit
expect(Object.keys(targeting).length).to.equal(1);
let sendAllBidCpm = Object.keys(targeting['/123456/header-bid-tag-0']).filter(key => key.indexOf(CONSTANTS.TARGETING_KEYS.PRICE_BUCKET + '_') != -1)
// we shouldn't get more than 1 key for hb_pb_${bidder}
expect(sendAllBidCpm.length).to.equal(1);
// expect the winning CPM to be equal to the sendAllBidCPM
expect(targeting['/123456/header-bid-tag-0'][CONSTANTS.TARGETING_KEYS.PRICE_BUCKET + '_rubicon']).to.deep.equal(targeting['/123456/header-bid-tag-0'][CONSTANTS.TARGETING_KEYS.PRICE_BUCKET]);
});
it('ensures keys are properly generated when enableSendAllBids is true and multiple bidders use native', function() {
const nativeAdUnitCode = '/19968336/prebid_native_example_1';
enableSendAllBids = true;
// update mocks for this test to return native bids
amBidsReceivedStub.callsFake(function() {
return [nativeBid1, nativeBid2];
});
amGetAdUnitsStub.callsFake(function() {
return [nativeAdUnitCode];
});
let targeting = targetingInstance.getAllTargeting([nativeAdUnitCode]);
expect(targeting[nativeAdUnitCode].hb_native_image).to.equal(nativeBid1.native.image.url);
expect(targeting[nativeAdUnitCode].hb_native_linkurl).to.equal(nativeBid1.native.clickUrl);
expect(targeting[nativeAdUnitCode].hb_native_title).to.equal(nativeBid1.native.title);
expect(targeting[nativeAdUnitCode].hb_native_image_dgad).to.exist.and.to.equal(nativeBid2.native.image.url);
expect(targeting[nativeAdUnitCode].hb_pb_dgads).to.exist.and.to.equal(nativeBid2.pbMg);
expect(targeting[nativeAdUnitCode].hb_native_body_appne).to.exist.and.to.equal(nativeBid1.native.body);
});
it('does not include adpod type bids in the getBidsReceived results', function () {
let adpodBid = utils.deepClone(bid1);
adpodBid.video = { context: 'adpod', durationSeconds: 15, durationBucket: 15 };
adpodBid.cpm = 5;
bidsReceived.push(adpodBid);
const targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
expect(targeting['/123456/header-bid-tag-0']).to.contain.keys('hb_deal', 'hb_adid', 'hb_bidder');
expect(targeting['/123456/header-bid-tag-0']['hb_adid']).to.equal(bid1.adId);
});
}); // end getAllTargeting tests
describe('getAllTargeting without bids return empty object', function () {
let amBidsReceivedStub;
let amGetAdUnitsStub;
let bidExpiryStub;
beforeEach(function () {
amBidsReceivedStub = sandbox.stub(auctionManager, 'getBidsReceived').callsFake(function() {
return [];
});
amGetAdUnitsStub = sandbox.stub(auctionManager, 'getAdUnitCodes').callsFake(function() {
return ['/123456/header-bid-tag-0'];
});
bidExpiryStub = sandbox.stub(filters, 'isBidNotExpired').returns(true);
});
it('returns targetingSet correctly', function () {
let targeting = targetingInstance.getAllTargeting(['/123456/header-bid-tag-0']);
// we should only get the targeting data for the one requested adunit to at least exist even though it has no keys to set
expect(Object.keys(targeting).length).to.equal(1);
});
}); // end getAllTargeting without bids return empty object
describe('Targeting in concurrent auctions', function () {
describe('check getOldestBid', function () {
let bidExpiryStub;
let auctionManagerStub;
beforeEach(function () {
bidExpiryStub = sandbox.stub(filters, 'isBidNotExpired').returns(true);
auctionManagerStub = sandbox.stub(auctionManager, 'getBidsReceived');
});
it('should use bids from pool to get Winning Bid', function () {
let bidsReceived = [
createBidReceived({bidder: 'appnexus', cpm: 7, auctionId: 1, responseTimestamp: 100, adUnitCode: 'code-0', adId: 'adid-1'}),
createBidReceived({bidder: 'rubicon', cpm: 6, auctionId: 1, responseTimestamp: 101, adUnitCode: 'code-1', adId: 'adid-2'}),
createBidReceived({bidder: 'appnexus', cpm: 6, auctionId: 2, responseTimestamp: 102, adUnitCode: 'code-0', adId: 'adid-3'}),
createBidReceived({bidder: 'rubicon', cpm: 6, auctionId: 2, responseTimestamp: 103, adUnitCode: 'code-1', adId: 'adid-4'}),
];
let adUnitCodes = ['code-0', 'code-1'];
let bids = targetingInstance.getWinningBids(adUnitCodes, bidsReceived);
expect(bids.length).to.equal(2);
expect(bids[0].adId).to.equal('adid-1');
expect(bids[1].adId).to.equal('adid-2');
});
it('should honor useBidCache', function() {
useBidCache = true;
auctionManagerStub.returns([
createBidReceived({bidder: 'appnexus', cpm: 7, auctionId: 1, responseTimestamp: 100, adUnitCode: 'code-0', adId: 'adid-1'}),
createBidReceived({bidder: 'appnexus', cpm: 5, auctionId: 2, responseTimestamp: 102, adUnitCode: 'code-0', adId: 'adid-2'}),
]);
let adUnitCodes = ['code-0'];
targetingInstance.setLatestAuctionForAdUnit('code-0', 2);
let bids = targetingInstance.getWinningBids(adUnitCodes);
expect(bids.length).to.equal(1);
expect(bids[0].adId).to.equal('adid-1');
useBidCache = false;
bids = targetingInstance.getWinningBids(adUnitCodes);
expect(bids.length).to.equal(1);
expect(bids[0].adId).to.equal('adid-2');
});
it('should not use rendered bid to get winning bid', function () {
let bidsReceived = [
createBidReceived({bidder: 'appnexus', cpm: 8, auctionId: 1, responseTimestamp: 100, adUnitCode: 'code-0', adId: 'adid-1', status: 'rendered'}),
createBidReceived({bidder: 'rubicon', cpm: 6, auctionId: 1, responseTimestamp: 101, adUnitCode: 'code-1', adId: 'adid-2'}),
createBidReceived({bidder: 'appnexus', cpm: 7, auctionId: 2, responseTimestamp: 102, adUnitCode: 'code-0', adId: 'adid-3'}),
createBidReceived({bidder: 'rubicon', cpm: 6, auctionId: 2, responseTimestamp: 103, adUnitCode: 'code-1', adId: 'adid-4'}),
];
auctionManagerStub.returns(bidsReceived);
let adUnitCodes = ['code-0', 'code-1'];
let bids = targetingInstance.getWinningBids(adUnitCodes);
expect(bids.length).to.equal(2);
expect(bids[0].adId).to.equal('adid-2');
expect(bids[1].adId).to.equal('adid-3');
});
it('should use highest cpm bid from bid pool to get winning bid', function () {
// Pool is having 4 bids from 2 auctions. There are 2 bids from rubicon, #2 which is highest cpm bid will be selected to take part in auction.
let bidsReceived = [
createBidReceived({bidder: 'appnexus', cpm: 8, auctionId: 1, responseTimestamp: 100, adUnitCode: 'code-0', adId: 'adid-1'}),
createBidReceived({bidder: 'rubicon', cpm: 9, auctionId: 1, responseTimestamp: 101, adUnitCode: 'code-0', adId: 'adid-2'}),
createBidReceived({bidder: 'appnexus', cpm: 7, auctionId: 2, responseTimestamp: 102, adUnitCode: 'code-0', adId: 'adid-3'}),
createBidReceived({bidder: 'rubicon', cpm: 8, auctionId: 2, responseTimestamp: 103, adUnitCode: 'code-0', adId: 'adid-4'}),
];
auctionManagerStub.returns(bidsReceived);
let adUnitCodes = ['code-0'];
let bids = targetingInstance.getWinningBids(adUnitCodes);
expect(bids.length).to.equal(1);
expect(bids[0].adId).to.equal('adid-2');
});
});
describe('check bidExpiry', function () {
let auctionManagerStub;
let timestampStub;
beforeEach(function () {
auctionManagerStub = sandbox.stub(auctionManager, 'getBidsReceived');
timestampStub = sandbox.stub(utils, 'timestamp');
});
it('should not include expired bids in the auction', function () {
timestampStub.returns(200000);
// Pool is having 4 bids from 2 auctions. All the bids are expired and only bid #3 is passing the bidExpiry check.
let bidsReceived = [
createBidReceived({bidder: 'appnexus', cpm: 18, auctionId: 1, responseTimestamp: 100, adUnitCode: 'code-0', adId: 'adid-1', ttl: 150}),
createBidReceived({bidder: 'sampleBidder', cpm: 16, auctionId: 1, responseTimestamp: 101, adUnitCode: 'code-0', adId: 'adid-2', ttl: 100}),
createBidReceived({bidder: 'appnexus', cpm: 7, auctionId: 2, responseTimestamp: 102, adUnitCode: 'code-0', adId: 'adid-3', ttl: 300}),
createBidReceived({bidder: 'rubicon', cpm: 6, auctionId: 2, responseTimestamp: 103, adUnitCode: 'code-0', adId: 'adid-4', ttl: 50}),
];
auctionManagerStub.returns(bidsReceived);
let adUnitCodes = ['code-0', 'code-1'];
let bids = targetingInstance.getWinningBids(adUnitCodes);
expect(bids.length).to.equal(1);
expect(bids[0].adId).to.equal('adid-3');
});
});
});
describe('sortByDealAndPriceBucketOrCpm', function() {
it('will properly sort bids when some bids have deals and some do not', function () {
let bids = [{
adserverTargeting: {
hb_adid: 'abc',
hb_pb: '1.00',
hb_deal: '1234'
}
}, {
adserverTargeting: {
hb_adid: 'def',
hb_pb: '0.50',
}
}, {
adserverTargeting: {
hb_adid: 'ghi',
hb_pb: '20.00',
hb_deal: '4532'
}
}, {
adserverTargeting: {
hb_adid: 'jkl',
hb_pb: '9.00',
hb_deal: '9864'
}
}, {
adserverTargeting: {
hb_adid: 'mno',
hb_pb: '50.00',
}
}, {
adserverTargeting: {
hb_adid: 'pqr',
hb_pb: '100.00',
}
}];
bids.sort(sortByDealAndPriceBucketOrCpm());
expect(bids[0].adserverTargeting.hb_adid).to.equal('ghi');
expect(bids[1].adserverTargeting.hb_adid).to.equal('jkl');
expect(bids[2].adserverTargeting.hb_adid).to.equal('abc');
expect(bids[3].adserverTargeting.hb_adid).to.equal('pqr');
expect(bids[4].adserverTargeting.hb_adid).to.equal('mno');
expect(bids[5].adserverTargeting.hb_adid).to.equal('def');
});
it('will properly sort bids when all bids have deals', function () {
let bids = [{
adserverTargeting: {
hb_adid: 'abc',
hb_pb: '1.00',
hb_deal: '1234'
}
}, {
adserverTargeting: {
hb_adid: 'def',
hb_pb: '0.50',
hb_deal: '4321'
}
}, {
adserverTargeting: {
hb_adid: 'ghi',
hb_pb: '2.50',
hb_deal: '4532'
}
}, {
adserverTargeting: {
hb_adid: 'jkl',
hb_pb: '2.00',
hb_deal: '9864'
}
}];
bids.sort(sortByDealAndPriceBucketOrCpm());
expect(bids[0].adserverTargeting.hb_adid).to.equal('ghi');
expect(bids[1].adserverTargeting.hb_adid).to.equal('jkl');
expect(bids[2].adserverTargeting.hb_adid).to.equal('abc');
expect(bids[3].adserverTargeting.hb_adid).to.equal('def');
});
it('will properly sort bids when no bids have deals', function () {
let bids = [{
adserverTargeting: {
hb_adid: 'abc',
hb_pb: '1.00'
}
}, {
adserverTargeting: {
hb_adid: 'def',
hb_pb: '0.10'
}
}, {
adserverTargeting: {
hb_adid: 'ghi',
hb_pb: '10.00'
}
}, {
adserverTargeting: {
hb_adid: 'jkl',
hb_pb: '10.01'
}
}, {
adserverTargeting: {
hb_adid: 'mno',
hb_pb: '1.00'
}
}, {
adserverTargeting: {
hb_adid: 'pqr',
hb_pb: '100.00'
}
}];
bids.sort(sortByDealAndPriceBucketOrCpm());
expect(bids[0].adserverTargeting.hb_adid).to.equal('pqr');
expect(bids[1].adserverTargeting.hb_adid).to.equal('jkl');
expect(bids[2].adserverTargeting.hb_adid).to.equal('ghi');
expect(bids[3].adserverTargeting.hb_adid).to.equal('abc');
expect(bids[4].adserverTargeting.hb_adid).to.equal('mno');
expect(bids[5].adserverTargeting.hb_adid).to.equal('def');
});
it('will properly sort bids when some bids have deals and some do not and by cpm when flag is set to true', function () {
let bids = [{
cpm: 1.04,
adserverTargeting: {
hb_adid: 'abc',
hb_pb: '1.00',
hb_deal: '1234'
}
}, {
cpm: 0.50,
adserverTargeting: {
hb_adid: 'def',
hb_pb: '0.50',
hb_deal: '4532'
}
}, {
cpm: 0.53,
adserverTargeting: {
hb_adid: 'ghi',
hb_pb: '0.50',
hb_deal: '4532'
}
}, {
cpm: 9.04,
adserverTargeting: {
hb_adid: 'jkl',
hb_pb: '9.00',
hb_deal: '9864'
}
}, {
cpm: 50.00,
adserverTargeting: {
hb_adid: 'mno',
hb_pb: '50.00',
}
}, {
cpm: 100.00,
adserverTargeting: {
hb_adid: 'pqr',
hb_pb: '100.00',
}
}];
bids.sort(sortByDealAndPriceBucketOrCpm(true));
expect(bids[0].adserverTargeting.hb_adid).to.equal('jkl');
expect(bids[1].adserverTargeting.hb_adid).to.equal('abc');
expect(bids[2].adserverTargeting.hb_adid).to.equal('ghi');
expect(bids[3].adserverTargeting.hb_adid).to.equal('def');
expect(bids[4].adserverTargeting.hb_adid).to.equal('pqr');
expect(bids[5].adserverTargeting.hb_adid).to.equal('mno');
});
});
describe('setTargetingForAst', function () {
let sandbox,
apnTagStub;
beforeEach(function() {
sandbox = sinon.createSandbox();
sandbox.stub(targetingInstance, 'resetPresetTargetingAST');
apnTagStub = sandbox.stub(window.apntag, 'setKeywords');
});
afterEach(function () {
sandbox.restore();
});
it('should set single addUnit code', function() {
let adUnitCode = 'testdiv-abc-ad-123456-0';
sandbox.stub(targetingInstance, 'getAllTargeting').returns({
'testdiv1-abc-ad-123456-0': {hb_bidder: 'appnexus'}
});
targetingInstance.setTargetingForAst(adUnitCode);
expect(targetingInstance.getAllTargeting.called).to.equal(true);
expect(targetingInstance.resetPresetTargetingAST.called).to.equal(true);
expect(apnTagStub.callCount).to.equal(1);
expect(apnTagStub.getCall(0).args[0]).to.deep.equal('testdiv1-abc-ad-123456-0');
expect(apnTagStub.getCall(0).args[1]).to.deep.equal({HB_BIDDER: 'appnexus'});
});
it('should set array of addUnit codes', function() {
let adUnitCodes = ['testdiv1-abc-ad-123456-0', 'testdiv2-abc-ad-123456-0']
sandbox.stub(targetingInstance, 'getAllTargeting').returns({
'testdiv1-abc-ad-123456-0': {hb_bidder: 'appnexus'},
'testdiv2-abc-ad-123456-0': {hb_bidder: 'appnexus'}
});
targetingInstance.setTargetingForAst(adUnitCodes);
expect(targetingInstance.getAllTargeting.called).to.equal(true);
expect(targetingInstance.resetPresetTargetingAST.called).to.equal(true);
expect(apnTagStub.callCount).to.equal(2);
expect(apnTagStub.getCall(1).args[0]).to.deep.equal('testdiv2-abc-ad-123456-0');
expect(apnTagStub.getCall(1).args[1]).to.deep.equal({HB_BIDDER: 'appnexus'});
});
});
});<|fim▁end|> | 'image': {
'url': 'http://vcdn.adnxs.com/p/creative-image/94/22/cd/0f/9422cd0f-f400-45d3-80f5-2b92629d9257.jpg',
'height': 2250, |
<|file_name|>Lexer.cpp<|end_file_name|><|fim▁begin|>//
// Lexer.cpp
// lut-lang
//
// Created by Mehdi Kitane on 13/03/2015.
// Copyright (c) 2015 H4314. All rights reserved.
//
#include "Lexer.h"
#include <string>
#include <regex>
#include <iostream>
#include "TokenType.h"
#include "ErrorHandler.h"<|fim▁hole|>using std::endl;
using std::smatch;
using std::string;
using std::regex_search;
using std ::smatch;
// Regexs
const char keyword_str[] = "^(const |var |ecrire |lire )";
const char identifier_str[] = "^([a-zA-Z][a-zA-Z0-9]*)";
const char number_str[] = "^([0-9]*\\.?[0-9]+)";
const char single_operators_str[] = "^(\\+|-|\\*|\\/|\\(|\\)|;|=|,)";
const char affectation_str[] = "^(:=)";
const std::regex keyword(keyword_str);
const std::regex identifier(identifier_str);
const std::regex number(number_str);
const std::regex single_operators(single_operators_str);
const std::regex affectation(affectation_str);
int Lexer::find_first_not_of(string str) {
string::iterator it;
int index = 0;
for (it = str.begin(); it < str.end(); it++, index++) {
switch ( str.at(index) ) {
case ' ':
this->column++;
break;
case '\t':
break;
case '\n':
this->line++;
this->column = 0;
break;
case '\r':
break;
case '\f':
break;
case '\v':
default:
return index;
break;
}
}
return -1;
}
string& Lexer::ltrim(string& s) {
s.erase(0, find_first_not_of(s));
return s;
}
Lexer::Lexer(string inString) : inputString(inString) {
this->currentToken = new ASTTokenNode(TokenType::INVALID_SYMBOL);
this->line = 0;
this->column = 0;
this->column_next_incrementation = 0;
}
bool Lexer::has_next() {
// remove spaces before analyzing
// we remove left spaces and not right to handle cases like "const "
ltrim(inputString);
if ( inputString.length() <= 0 ) {
currentToken = new ASTTokenNode(TokenType::ENDOFFILE);
return false;
}
return true;
}
ASTTokenNode* Lexer::top() {
return currentToken;
}
void Lexer::shift() {
if ( !has_next() )
return;
this->column += this->column_next_incrementation;
std::smatch m;
if ( !analyze(inputString, m) ) {
ErrorHandler::getInstance().LexicalError(this->getLine(), this->getColumn(), inputString.at(0));
ErrorHandler::getInstance().outputErrors();
currentToken = new ASTTokenNode(TokenType::INVALID_SYMBOL);
inputString.erase(0, 1); // not sure
return;
}
this->column_next_incrementation = (int)m.length();
inputString = m.suffix().str();
}
bool Lexer::analyze(string s, smatch &m) {
if ( std::regex_search(inputString, m, keyword) ) {
std::string currentTokenValue = m.str();
switch (currentTokenValue[0]) {
case 'c':
currentToken = new ASTTokenNode(TokenType::CONST);
break;
case 'v':
currentToken = new ASTTokenNode(TokenType::VAR);
break;
case 'e':
currentToken = new ASTTokenNode(TokenType::WRITE);
break;
case 'l':
currentToken = new ASTTokenNode(TokenType::READ);
break;
default:
#warning "symbole non reconnu"
return false;
}
} else if ( std::regex_search(inputString, m, identifier) ) {
std::string currentTokenValue = m.str();
currentToken = new ASTTokenNode(TokenType::ID, currentTokenValue);
} else if ( std::regex_search(inputString, m, number) ) {
std::string currentTokenValue = m.str();
currentToken = new ASTTokenNode(TokenType::VAL, currentTokenValue);
} else if ( std::regex_search(inputString, m, single_operators) ) {
std::string currentTokenValue = m.str();
switch (currentTokenValue[0]) {
case '+':
currentToken = new ASTTokenNode(TokenType::ADD, "+");
break;
case '-':
currentToken = new ASTTokenNode(TokenType::SUB, "-");
break;
case '*':
currentToken = new ASTTokenNode(TokenType::MUL, "*");
break;
case '/':
currentToken = new ASTTokenNode(TokenType::DIV, "/");
break;
case '(':
currentToken = new ASTTokenNode(TokenType::PO);
break;
case ')':
currentToken = new ASTTokenNode(TokenType::PF);
break;
case ';':
currentToken = new ASTTokenNode(TokenType::PV);
break;
case '=':
currentToken = new ASTTokenNode(TokenType::EQ);
break;
case ',':
currentToken = new ASTTokenNode(TokenType::V);
break;
default:
#warning "symbole non reconnu"
return false;
}
} else if ( std::regex_search(inputString, m, affectation) ) {
currentToken = new ASTTokenNode(TokenType::AFF);
} else {
#warning "symbole non reconnu"
return false;
}
return true;
}
int Lexer::getLine() {
return this->line+1;
}
int Lexer::getColumn() {
return this->column+1;
}<|fim▁end|> | using std::cout; |
<|file_name|>registrator.go<|end_file_name|><|fim▁begin|>package v1
import (
"github.com/ertgl/croncache"
)
func init() {
err := croncache.TaskManagerRepository().Register(MODULE_NAME, Generator)<|fim▁hole|> if err != nil {
croncache.HandleFatalError(err)
}
}<|fim▁end|> | |
<|file_name|>muParserDLL.cpp<|end_file_name|><|fim▁begin|>/*
_____ __ _____________ _______ ______ ___________<|fim▁hole|> Copyright (C) 2004 - 2020 Ingo Berg
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or other materials provided
with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if defined(MUPARSER_DLL)
#if defined(_WIN32)
#define WIN32_LEAN_AND_MEAN
#define _CRT_SECURE_NO_WARNINGS
#define _CRT_SECURE_NO_DEPRECATE
#include <windows.h>
#endif
#include <cassert>
#include "muParserDLL.h"
#include "muParser.h"
#include "muParserInt.h"
#include "muParserError.h"
#if defined(_MSC_VER)
#pragma warning(push)
#pragma warning(disable : 26812)
#endif
#define MU_TRY \
try \
{
#define MU_CATCH \
} \
catch (muError_t &e) \
{ \
ParserTag *pTag = static_cast<ParserTag*>(a_hParser); \
pTag->exc = e; \
pTag->bError = true; \
if (pTag->errHandler) \
(pTag->errHandler)(a_hParser); \
} \
catch (...) \
{ \
ParserTag *pTag = static_cast<ParserTag*>(a_hParser); \
pTag->exc = muError_t(mu::ecINTERNAL_ERROR); \
pTag->bError = true; \
if (pTag->errHandler) \
(pTag->errHandler)(a_hParser); \
}
/** \file
\brief This file contains the implementation of the DLL interface of muparser.
*/
typedef mu::ParserBase::exception_type muError_t;
typedef mu::ParserBase muParser_t;
int g_nBulkSize;
class ParserTag
{
public:
ParserTag(int nType)
: pParser((nType == muBASETYPE_FLOAT)
? (mu::ParserBase*)new mu::Parser()
: (nType == muBASETYPE_INT) ? (mu::ParserBase*)new mu::ParserInt() : nullptr)
, exc()
, errHandler(nullptr)
, bError(false)
, m_nParserType(nType)
{}
~ParserTag()
{
delete pParser;
}
mu::ParserBase* pParser;
mu::ParserBase::exception_type exc;
muErrorHandler_t errHandler;
bool bError;
private:
ParserTag(const ParserTag& ref);
ParserTag& operator=(const ParserTag& ref);
int m_nParserType;
};
static muChar_t s_tmpOutBuf[2048];
//---------------------------------------------------------------------------
//
//
// unexported functions
//
//
//---------------------------------------------------------------------------
inline muParser_t* AsParser(muParserHandle_t a_hParser)
{
return static_cast<ParserTag*>(a_hParser)->pParser;
}
inline ParserTag* AsParserTag(muParserHandle_t a_hParser)
{
return static_cast<ParserTag*>(a_hParser);
}
#if defined(_WIN32)
BOOL APIENTRY DllMain(HANDLE /*hModule*/, DWORD ul_reason_for_call, LPVOID /*lpReserved*/)
{
switch (ul_reason_for_call)
{
case DLL_PROCESS_ATTACH:
break;
case DLL_THREAD_ATTACH:
case DLL_THREAD_DETACH:
case DLL_PROCESS_DETACH:
break;
}
return TRUE;
}
#endif
//---------------------------------------------------------------------------
//
//
// exported functions
//
//
//---------------------------------------------------------------------------
API_EXPORT(void) mupSetVarFactory(muParserHandle_t a_hParser, muFacFun_t a_pFactory, void* pUserData)
{
MU_TRY
muParser_t* p(AsParser(a_hParser));
p->SetVarFactory(a_pFactory, pUserData);
MU_CATCH
}
/** \brief Create a new Parser instance and return its handle. */
API_EXPORT(muParserHandle_t) mupCreate(int nBaseType)
{
switch (nBaseType)
{
case muBASETYPE_FLOAT: return (void*)(new ParserTag(muBASETYPE_FLOAT));
case muBASETYPE_INT: return (void*)(new ParserTag(muBASETYPE_INT));
default: return nullptr;
}
}
/** \brief Release the parser instance related with a parser handle. */
API_EXPORT(void) mupRelease(muParserHandle_t a_hParser)
{
MU_TRY
ParserTag* p = static_cast<ParserTag*>(a_hParser);
delete p;
MU_CATCH
}
API_EXPORT(const muChar_t*) mupGetVersion(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
#ifndef _UNICODE
sprintf(s_tmpOutBuf, "%s", p->GetVersion().c_str());
#else
wsprintf(s_tmpOutBuf, _T("%s"), p->GetVersion().c_str());
#endif
return s_tmpOutBuf;
MU_CATCH
return _T("");
}
/** \brief Evaluate the expression. */
API_EXPORT(muFloat_t) mupEval(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
return p->Eval();
MU_CATCH
return 0;
}
API_EXPORT(muFloat_t*) mupEvalMulti(muParserHandle_t a_hParser, int* nNum)
{
MU_TRY
if (nNum == nullptr)
throw std::runtime_error("Argument is null!");
muParser_t* const p(AsParser(a_hParser));
return p->Eval(*nNum);
MU_CATCH
return 0;
}
API_EXPORT(void) mupEvalBulk(muParserHandle_t a_hParser, muFloat_t* a_res, int nSize)
{
MU_TRY
muParser_t* p(AsParser(a_hParser));
p->Eval(a_res, nSize);
MU_CATCH
}
API_EXPORT(void) mupSetExpr(muParserHandle_t a_hParser, const muChar_t* a_szExpr)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->SetExpr(a_szExpr);
MU_CATCH
}
API_EXPORT(void) mupRemoveVar(muParserHandle_t a_hParser, const muChar_t* a_szName)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->RemoveVar(a_szName);
MU_CATCH
}
/** \brief Release all parser variables.
\param a_hParser Handle to the parser instance.
*/
API_EXPORT(void) mupClearVar(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->ClearVar();
MU_CATCH
}
/** \brief Release all parser variables.
\param a_hParser Handle to the parser instance.
*/
API_EXPORT(void) mupClearConst(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->ClearConst();
MU_CATCH
}
/** \brief Clear all user defined operators.
\param a_hParser Handle to the parser instance.
*/
API_EXPORT(void) mupClearOprt(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->ClearOprt();
MU_CATCH
}
API_EXPORT(void) mupClearFun(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->ClearFun();
MU_CATCH
}
API_EXPORT(void) mupDefineFun0(muParserHandle_t a_hParser,
const muChar_t* a_szName,
muFun0_t a_pFun,
muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun1(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun1_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun2(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun2_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun3(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun3_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun4(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun4_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun5(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun5_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun6(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun6_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun7(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun7_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun8(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun8_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun9(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun9_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineFun10(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun10_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun0(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun0_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun1(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun1_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun2(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun2_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun3(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun3_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun4(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun4_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun5(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun5_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun6(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun6_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun7(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun7_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun8(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun8_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun9(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun9_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkFun10(muParserHandle_t a_hParser, const muChar_t* a_szName, muBulkFun10_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineStrFun1(muParserHandle_t a_hParser, const muChar_t* a_szName, muStrFun1_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineStrFun2(muParserHandle_t a_hParser, const muChar_t* a_szName, muStrFun2_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineStrFun3(muParserHandle_t a_hParser, const muChar_t* a_szName, muStrFun3_t a_pFun)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, false);
MU_CATCH
}
API_EXPORT(void) mupDefineMultFun(muParserHandle_t a_hParser, const muChar_t* a_szName, muMultFun_t a_pFun, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineFun(a_szName, a_pFun, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineOprt(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun2_t a_pFun, muInt_t a_nPrec, muInt_t a_nOprtAsct, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineOprt(a_szName, a_pFun, a_nPrec, (mu::EOprtAssociativity)a_nOprtAsct, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineVar(muParserHandle_t a_hParser, const muChar_t* a_szName, muFloat_t* a_pVar)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineVar(a_szName, a_pVar);
MU_CATCH
}
API_EXPORT(void) mupDefineBulkVar(muParserHandle_t a_hParser, const muChar_t* a_szName, muFloat_t* a_pVar)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineVar(a_szName, a_pVar);
MU_CATCH
}
API_EXPORT(void) mupDefineConst(muParserHandle_t a_hParser, const muChar_t* a_szName, muFloat_t a_fVal)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineConst(a_szName, a_fVal);
MU_CATCH
}
API_EXPORT(void) mupDefineStrConst(muParserHandle_t a_hParser, const muChar_t* a_szName, const muChar_t* a_szVal)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineStrConst(a_szName, a_szVal);
MU_CATCH
}
API_EXPORT(const muChar_t*) mupGetExpr(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
// C# explodes when pMsg is returned directly. For some reason it can't access
// the memory where the message lies directly.
#ifndef _UNICODE
sprintf(s_tmpOutBuf, "%s", p->GetExpr().c_str());
#else
wsprintf(s_tmpOutBuf, _T("%s"), p->GetExpr().c_str());
#endif
return s_tmpOutBuf;
MU_CATCH
return _T("");
}
API_EXPORT(void) mupDefinePostfixOprt(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun1_t a_pOprt, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefinePostfixOprt(a_szName, a_pOprt, a_bAllowOpt != 0);
MU_CATCH
}
API_EXPORT(void) mupDefineInfixOprt(muParserHandle_t a_hParser, const muChar_t* a_szName, muFun1_t a_pOprt, muBool_t a_bAllowOpt)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->DefineInfixOprt(a_szName, a_pOprt, a_bAllowOpt != 0);
MU_CATCH
}
// Define character sets for identifiers
API_EXPORT(void) mupDefineNameChars(muParserHandle_t a_hParser, const muChar_t* a_szCharset)
{
muParser_t* const p(AsParser(a_hParser));
p->DefineNameChars(a_szCharset);
}
API_EXPORT(void) mupDefineOprtChars(muParserHandle_t a_hParser, const muChar_t* a_szCharset)
{
muParser_t* const p(AsParser(a_hParser));
p->DefineOprtChars(a_szCharset);
}
API_EXPORT(void) mupDefineInfixOprtChars(muParserHandle_t a_hParser, const muChar_t* a_szCharset)
{
muParser_t* const p(AsParser(a_hParser));
p->DefineInfixOprtChars(a_szCharset);
}
/** \brief Get the number of variables defined in the parser.
\param a_hParser [in] Must be a valid parser handle.
\return The number of used variables.
\sa mupGetExprVar
*/
API_EXPORT(int) mupGetVarNum(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::varmap_type VarMap = p->GetVar();
return (int)VarMap.size();
MU_CATCH
return 0; // never reached
}
/** \brief Return a variable that is used in an expression.
\param a_hParser [in] A valid parser handle.
\param a_iVar [in] The index of the variable to return.
\param a_szName [out] Pointer to the variable name.
\param a_pVar [out] Pointer to the variable.
\throw nothrow
Prior to calling this function call mupGetExprVarNum in order to get the
number of variables in the expression. If the parameter a_iVar is greater
than the number of variables both a_szName and a_pVar will be set to zero.
As a side effect this function will trigger an internal calculation of the
expression undefined variables will be set to zero during this calculation.
During the calculation user defined callback functions present in the expression
will be called, this is unavoidable.
*/
API_EXPORT(void) mupGetVar(muParserHandle_t a_hParser, unsigned a_iVar, const muChar_t** a_szName, muFloat_t** a_pVar)
{
// A static buffer is needed for the name since i can't return the
// pointer from the map.
static muChar_t szName[1024];
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::varmap_type VarMap = p->GetVar();
if (a_iVar >= VarMap.size())
{
*a_szName = 0;
*a_pVar = 0;
return;
}
mu::varmap_type::const_iterator item;
item = VarMap.begin();
for (unsigned i = 0; i < a_iVar; ++i)
++item;
#ifndef _UNICODE
strncpy(szName, item->first.c_str(), sizeof(szName));
#else
wcsncpy(szName, item->first.c_str(), sizeof(szName));
#endif
szName[sizeof(szName) - 1] = 0;
*a_szName = &szName[0];
*a_pVar = item->second;
return;
MU_CATCH
* a_szName = 0;
*a_pVar = 0;
}
/** \brief Get the number of variables used in the expression currently set in the parser.
\param a_hParser [in] Must be a valid parser handle.
\return The number of used variables.
\sa mupGetExprVar
*/
API_EXPORT(int) mupGetExprVarNum(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::varmap_type VarMap = p->GetUsedVar();
return (int)VarMap.size();
MU_CATCH
return 0; // never reached
}
/** \brief Return a variable that is used in an expression.
Prior to calling this function call mupGetExprVarNum in order to get the
number of variables in the expression. If the parameter a_iVar is greater
than the number of variables both a_szName and a_pVar will be set to zero.
As a side effect this function will trigger an internal calculation of the
expression undefined variables will be set to zero during this calculation.
During the calculation user defined callback functions present in the expression
will be called, this is unavoidable.
\param a_hParser [in] A valid parser handle.
\param a_iVar [in] The index of the variable to return.
\param a_szName [out] Pointer to the variable name.
\param a_pVar [out] Pointer to the variable.
\throw nothrow
*/
API_EXPORT(void) mupGetExprVar(muParserHandle_t a_hParser, unsigned a_iVar, const muChar_t** a_szName, muFloat_t** a_pVar)
{
// A static buffer is needed for the name since i can't return the
// pointer from the map.
static muChar_t szName[1024];
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::varmap_type VarMap = p->GetUsedVar();
if (a_iVar >= VarMap.size())
{
*a_szName = 0;
*a_pVar = 0;
return;
}
mu::varmap_type::const_iterator item;
item = VarMap.begin();
for (unsigned i = 0; i < a_iVar; ++i)
++item;
#ifndef _UNICODE
strncpy(szName, item->first.c_str(), sizeof(szName));
#else
wcsncpy(szName, item->first.c_str(), sizeof(szName));
#endif
szName[sizeof(szName) - 1] = 0;
*a_szName = &szName[0];
*a_pVar = item->second;
return;
MU_CATCH
* a_szName = 0;
*a_pVar = 0;
}
/** \brief Return the number of constants defined in a parser. */
API_EXPORT(int) mupGetConstNum(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::valmap_type ValMap = p->GetConst();
return (int)ValMap.size();
MU_CATCH
return 0; // never reached
}
API_EXPORT(void) mupSetArgSep(muParserHandle_t a_hParser, const muChar_t cArgSep)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->SetArgSep(cArgSep);
MU_CATCH
}
API_EXPORT(void) mupResetLocale(muParserHandle_t a_hParser)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->ResetLocale();
MU_CATCH
}
API_EXPORT(void) mupSetDecSep(muParserHandle_t a_hParser, const muChar_t cDecSep)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->SetDecSep(cDecSep);
MU_CATCH
}
API_EXPORT(void) mupSetThousandsSep(muParserHandle_t a_hParser, const muChar_t cThousandsSep)
{
MU_TRY
muParser_t* const p(AsParser(a_hParser));
p->SetThousandsSep(cThousandsSep);
MU_CATCH
}
//---------------------------------------------------------------------------
/** \brief Retrieve name and value of a single parser constant.
\param a_hParser [in] a valid parser handle
\param a_iVar [in] Index of the constant to query
\param a_pszName [out] pointer to a null terminated string with the constant name
\param [out] The constant value
*/
API_EXPORT(void) mupGetConst(muParserHandle_t a_hParser, unsigned a_iVar, const muChar_t** a_pszName, muFloat_t* a_fVal)
{
// A static buffer is needed for the name since i can't return the
// pointer from the map.
static muChar_t szName[1024];
MU_TRY
muParser_t* const p(AsParser(a_hParser));
const mu::valmap_type ValMap = p->GetConst();
if (a_iVar >= ValMap.size())
{
*a_pszName = 0;
*a_fVal = 0;
return;
}
mu::valmap_type::const_iterator item;
item = ValMap.begin();
for (unsigned i = 0; i < a_iVar; ++i)
++item;
#ifndef _UNICODE
strncpy(szName, item->first.c_str(), sizeof(szName));
#else
wcsncpy(szName, item->first.c_str(), sizeof(szName));
#endif
szName[sizeof(szName) - 1] = 0;
*a_pszName = &szName[0];
*a_fVal = item->second;
return;
MU_CATCH
* a_pszName = 0;
*a_fVal = 0;
}
/** \brief Add a custom value recognition function. */
API_EXPORT(void) mupAddValIdent(muParserHandle_t a_hParser, muIdentFun_t a_pFun)
{
MU_TRY
muParser_t* p(AsParser(a_hParser));
p->AddValIdent(a_pFun);
MU_CATCH
}
/** \brief Query if an error occurred.
After querying the internal error bit will be reset. So a consecutive call
will return false.
*/
API_EXPORT(muBool_t) mupError(muParserHandle_t a_hParser)
{
bool bError(AsParserTag(a_hParser)->bError);
AsParserTag(a_hParser)->bError = false;
return bError;
}
/** \brief Reset the internal error flag. */
API_EXPORT(void) mupErrorReset(muParserHandle_t a_hParser)
{
AsParserTag(a_hParser)->bError = false;
}
API_EXPORT(void) mupSetErrorHandler(muParserHandle_t a_hParser, muErrorHandler_t a_pHandler)
{
AsParserTag(a_hParser)->errHandler = a_pHandler;
}
/** \brief Return the message associated with the last error. */
API_EXPORT(const muChar_t*) mupGetErrorMsg(muParserHandle_t a_hParser)
{
ParserTag* const p(AsParserTag(a_hParser));
const muChar_t* pMsg = p->exc.GetMsg().c_str();
// C# explodes when pMsg is returned directly. For some reason it can't access
// the memory where the message lies directly.
#ifndef _UNICODE
sprintf(s_tmpOutBuf, "%s", pMsg);
#else
wsprintf(s_tmpOutBuf, _T("%s"), pMsg);
#endif
return s_tmpOutBuf;
}
/** \brief Return the message associated with the last error. */
API_EXPORT(const muChar_t*) mupGetErrorToken(muParserHandle_t a_hParser)
{
ParserTag* const p(AsParserTag(a_hParser));
const muChar_t* pToken = p->exc.GetToken().c_str();
// C# explodes when pMsg is returned directly. For some reason it can't access
// the memory where the message lies directly.
#ifndef _UNICODE
sprintf(s_tmpOutBuf, "%s", pToken);
#else
wsprintf(s_tmpOutBuf, _T("%s"), pToken);
#endif
return s_tmpOutBuf;
}
/** \brief Return the code associated with the last error.
*/
API_EXPORT(int) mupGetErrorCode(muParserHandle_t a_hParser)
{
return AsParserTag(a_hParser)->exc.GetCode();
}
/** \brief Return the position associated with the last error. */
API_EXPORT(int) mupGetErrorPos(muParserHandle_t a_hParser)
{
return (int)AsParserTag(a_hParser)->exc.GetPos();
}
API_EXPORT(muFloat_t*) mupCreateVar()
{
return new muFloat_t(0);
}
API_EXPORT(void) mupReleaseVar(muFloat_t* ptr)
{
delete ptr;
}
#if defined(_MSC_VER)
#pragma warning(pop)
#endif
#endif // MUPARSER_DLL<|fim▁end|> | / \| | \____ \__ \\_ __ \/ ___// __ \_ __ \
| Y Y \ | / |_> > __ \| | \/\___ \\ ___/| | \/
|__|_| /____/| __(____ /__| /____ >\___ >__|
\/ |__| \/ \/ \/ |
<|file_name|>Quest.java<|end_file_name|><|fim▁begin|>package pixlepix.auracascade.data;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
/**
* Created by localmacaccount on 5/31/15.
*/
public class Quest {
//TODO QUEST
public static int nextId;
public final ItemStack target;
public final ItemStack result;
public final int id;
public String string;
public Quest(String string, ItemStack target, ItemStack result) {
this.target = target;
this.result = result;
this.string = string;
this.id = nextId;
nextId++;
}
public boolean hasCompleted(EntityPlayer player) {
// QuestData questData = (QuestData) player.getExtendedProperties(QuestData.EXT_PROP_NAME);
// return questData.completedQuests.contains(this);
return false;
}<|fim▁hole|> public void complete(EntityPlayer player) {
//QuestData questData = (QuestData) player.getExtendedProperties(QuestData.EXT_PROP_NAME);
// questData.completedQuests.add(this);
// AuraCascade.analytics.eventDesign("questComplete", id);
}
}<|fim▁end|> | |
<|file_name|>QuestionService.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>import {Question} from 'common/models.js';
import {Injector} from 'kusema.js';
var I = new Injector('$http', '$q');
var QuestionService = function() {
BaseContentService.call(this, true);
I.init();
this.urlStem = 'api/questions';
}
QuestionService.prototype = Object.create(BaseContentService.prototype, {
model: {writable: false, enumerable: false, value: Question}
});
QuestionService.prototype.getNextTenQuestions = function (requestNumber, group) {
var groupURL = (group) ? ('/'+group) : '';
return I.$http.get(this.urlBase + '/tenMore' + groupURL + '/' + requestNumber)
.then(function(response) {
return this.createClientModels(response.data);
}.bind(this));
};
QuestionService.prototype.getFeed = function(requestNumber, group) {
var groupURL = (group) ? (group+'/') : '';
return I.$http.get(this.urlBase+'/feed/'+groupURL+requestNumber)
.then( (response) => {
if (response.status == 204) {
return I.$q.reject(new Error('No more questions'));
}
return this.createClientModels(response.data)
} );
}
import kusema from 'kusema.js';
kusema.service('questionService', QuestionService);<|fim▁end|> | import BaseContentService from './BaseContentService.js'; |
<|file_name|>currency.py<|end_file_name|><|fim▁begin|># Lookup Bitcoin value from exchanges
from exchanges.bitfinex import Bitfinex
import re<|fim▁hole|>
def bitcoinValue(msg):
val = Bitfinex().get_current_price()
formattedVal = "$" + "{:,.2f}".format(val)
if re.search(r"(?i)moon", msg):
return "To the moon! " + formattedVal
else:
return "Bitcoin: " + formattedVal<|fim▁end|> | |
<|file_name|>sanitizer.py<|end_file_name|><|fim▁begin|># Copyright 2010-2020 Kurt McKee <[email protected]>
# Copyright 2002-2008 Mark Pilgrim
# All rights reserved.
#
# This file is a part of feedparser.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
import re
from .html import _BaseHTMLProcessor
from .sgml import _SGML_AVAILABLE
from .urls import make_safe_absolute_uri
class _HTMLSanitizer(_BaseHTMLProcessor):
acceptable_elements = {
'a',
'abbr',
'acronym',
'address',
'area',
'article',
'aside',
'audio',
'b',
'big',
'blockquote',
'br',
'button',
'canvas',
'caption',
'center',
'cite',
'code',
'col',
'colgroup',
'command',
'datagrid',
'datalist',
'dd',
'del',
'details',
'dfn',
'dialog',
'dir',
'div',
'dl',
'dt',
'em',
'event-source',
'fieldset',
'figcaption',
'figure',
'font',
'footer',
'form',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'header',
'hr',
'i',
'img',
'input',
'ins',
'kbd',
'keygen',
'label',
'legend',
'li',
'm',
'map',
'menu',
'meter',
'multicol',
'nav',
'nextid',
'noscript',
'ol',
'optgroup',
'option',
'output',
'p',
'pre',
'progress',
'q',
's',
'samp',
'section',
'select',
'small',
'sound',
'source',
'spacer',
'span',
'strike',
'strong',
'sub',
'sup',
'table',
'tbody',
'td',
'textarea',
'tfoot',
'th',
'thead',
'time',
'tr',
'tt',
'u',
'ul',
'var',
'video',
}
acceptable_attributes = {
'abbr',
'accept',
'accept-charset',
'accesskey',
'action',
'align',
'alt',
'autocomplete',
'autofocus',
'axis',
'background',
'balance',
'bgcolor',
'bgproperties',
'border',
'bordercolor',
'bordercolordark',
'bordercolorlight',
'bottompadding',
'cellpadding',
'cellspacing',
'ch',
'challenge',
'char',
'charoff',
'charset',
'checked',
'choff',
'cite',
'class',
'clear',
'color',
'cols',
'colspan',
'compact',
'contenteditable',
'controls',
'coords',
'data',
'datafld',
'datapagesize',
'datasrc',
'datetime',
'default',
'delay',
'dir',
'disabled',
'draggable',
'dynsrc',
'enctype',
'end',
'face',
'for',
'form',
'frame',
'galleryimg',
'gutter',
'headers',
'height',
'hidden',
'hidefocus',
'high',
'href',
'hreflang',
'hspace',
'icon',
'id',
'inputmode',
'ismap',
'keytype',
'label',
'lang',
'leftspacing',
'list',
'longdesc',
'loop',
'loopcount',
'loopend',
'loopstart',
'low',
'lowsrc',
'max',
'maxlength',
'media',
'method',
'min',
'multiple',
'name',
'nohref',
'noshade',
'nowrap',
'open',
'optimum',
'pattern',
'ping',
'point-size',
'poster',
'pqg',
'preload',
'prompt',
'radiogroup',
'readonly',
'rel',
'repeat-max',
'repeat-min',
'replace',
'required',
'rev',
'rightspacing',
'rows',
'rowspan',
'rules',
'scope',
'selected',
'shape',
'size',
'span',
'src',
'start',
'step',
'style',
'summary',
'suppress',
'tabindex',
'target',
'template',
'title',
'toppadding',
'type',
'unselectable',
'urn',
'usemap',
'valign',
'value',
'variable',
'volume',
'vrml',
'vspace',
'width',
'wrap',
'xml:lang',
}
unacceptable_elements_with_end_tag = {
'applet',
'script',
'style',
}
acceptable_css_properties = {
'azimuth',
'background-color',
'border-bottom-color',
'border-collapse',
'border-color',
'border-left-color',
'border-right-color',
'border-top-color',
'clear',
'color',
'cursor',
'direction',
'display',
'elevation',
'float',
'font',
'font-family',
'font-size',
'font-style',
'font-variant',
'font-weight',
'height',
'letter-spacing',
'line-height',
'overflow',
'pause',
'pause-after',
'pause-before',
'pitch',
'pitch-range',
'richness',
'speak',
'speak-header',
'speak-numeral',
'speak-punctuation',
'speech-rate',
'stress',
'text-align',
'text-decoration',
'text-indent',
'unicode-bidi',
'vertical-align',
'voice-family',
'volume',
'white-space',
'width',
}
# survey of common keywords found in feeds
acceptable_css_keywords = {
'!important',
'aqua',
'auto',
'black',
'block',
'blue',
'bold',
'both',
'bottom',
'brown',
'center',
'collapse',
'dashed',
'dotted',
'fuchsia',
'gray',
'green',
'italic',
'left',
'lime',
'maroon',
'medium',
'navy',
'none',
'normal',
'nowrap',
'olive',
'pointer',
'purple',
'red',
'right',
'silver',
'solid',
'teal',
'top',
'transparent',
'underline',
'white',
'yellow',
}
valid_css_values = re.compile(
r'^('
r'#[0-9a-f]+' # Hex values
r'|rgb\(\d+%?,\d*%?,?\d*%?\)?' # RGB values
r'|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?' # Sizes/widths
r')$'
)
mathml_elements = {
'annotation',
'annotation-xml',
'maction',
'maligngroup',
'malignmark',
'math',
'menclose',
'merror',
'mfenced',
'mfrac',
'mglyph',
'mi',
'mlabeledtr',
'mlongdiv',
'mmultiscripts',
'mn',
'mo',
'mover',
'mpadded',
'mphantom',
'mprescripts',
'mroot',
'mrow',
'ms',
'mscarries',
'mscarry',
'msgroup',
'msline',
'mspace',
'msqrt',
'msrow',
'mstack',
'mstyle',
'msub',
'msubsup',
'msup',
'mtable',
'mtd',
'mtext',
'mtr',
'munder',
'munderover',
'none',
'semantics',
}
mathml_attributes = {
'accent',
'accentunder',
'actiontype',<|fim▁hole|> 'altimg',
'altimg-height',
'altimg-valign',
'altimg-width',
'alttext',
'bevelled',
'charalign',
'close',
'columnalign',
'columnlines',
'columnspacing',
'columnspan',
'columnwidth',
'crossout',
'decimalpoint',
'denomalign',
'depth',
'dir',
'display',
'displaystyle',
'edge',
'encoding',
'equalcolumns',
'equalrows',
'fence',
'fontstyle',
'fontweight',
'form',
'frame',
'framespacing',
'groupalign',
'height',
'href',
'id',
'indentalign',
'indentalignfirst',
'indentalignlast',
'indentshift',
'indentshiftfirst',
'indentshiftlast',
'indenttarget',
'infixlinebreakstyle',
'largeop',
'length',
'linebreak',
'linebreakmultchar',
'linebreakstyle',
'lineleading',
'linethickness',
'location',
'longdivstyle',
'lquote',
'lspace',
'mathbackground',
'mathcolor',
'mathsize',
'mathvariant',
'maxsize',
'minlabelspacing',
'minsize',
'movablelimits',
'notation',
'numalign',
'open',
'other',
'overflow',
'position',
'rowalign',
'rowlines',
'rowspacing',
'rowspan',
'rquote',
'rspace',
'scriptlevel',
'scriptminsize',
'scriptsizemultiplier',
'selection',
'separator',
'separators',
'shift',
'side',
'src',
'stackalign',
'stretchy',
'subscriptshift',
'superscriptshift',
'symmetric',
'voffset',
'width',
'xlink:href',
'xlink:show',
'xlink:type',
'xmlns',
'xmlns:xlink',
}
# svgtiny - foreignObject + linearGradient + radialGradient + stop
svg_elements = {
'a',
'animate',
'animateColor',
'animateMotion',
'animateTransform',
'circle',
'defs',
'desc',
'ellipse',
'font-face',
'font-face-name',
'font-face-src',
'foreignObject',
'g',
'glyph',
'hkern',
'line',
'linearGradient',
'marker',
'metadata',
'missing-glyph',
'mpath',
'path',
'polygon',
'polyline',
'radialGradient',
'rect',
'set',
'stop',
'svg',
'switch',
'text',
'title',
'tspan',
'use',
}
# svgtiny + class + opacity + offset + xmlns + xmlns:xlink
svg_attributes = {
'accent-height',
'accumulate',
'additive',
'alphabetic',
'arabic-form',
'ascent',
'attributeName',
'attributeType',
'baseProfile',
'bbox',
'begin',
'by',
'calcMode',
'cap-height',
'class',
'color',
'color-rendering',
'content',
'cx',
'cy',
'd',
'descent',
'display',
'dur',
'dx',
'dy',
'end',
'fill',
'fill-opacity',
'fill-rule',
'font-family',
'font-size',
'font-stretch',
'font-style',
'font-variant',
'font-weight',
'from',
'fx',
'fy',
'g1',
'g2',
'glyph-name',
'gradientUnits',
'hanging',
'height',
'horiz-adv-x',
'horiz-origin-x',
'id',
'ideographic',
'k',
'keyPoints',
'keySplines',
'keyTimes',
'lang',
'marker-end',
'marker-mid',
'marker-start',
'markerHeight',
'markerUnits',
'markerWidth',
'mathematical',
'max',
'min',
'name',
'offset',
'opacity',
'orient',
'origin',
'overline-position',
'overline-thickness',
'panose-1',
'path',
'pathLength',
'points',
'preserveAspectRatio',
'r',
'refX',
'refY',
'repeatCount',
'repeatDur',
'requiredExtensions',
'requiredFeatures',
'restart',
'rotate',
'rx',
'ry',
'slope',
'stemh',
'stemv',
'stop-color',
'stop-opacity',
'strikethrough-position',
'strikethrough-thickness',
'stroke',
'stroke-dasharray',
'stroke-dashoffset',
'stroke-linecap',
'stroke-linejoin',
'stroke-miterlimit',
'stroke-opacity',
'stroke-width',
'systemLanguage',
'target',
'text-anchor',
'to',
'transform',
'type',
'u1',
'u2',
'underline-position',
'underline-thickness',
'unicode',
'unicode-range',
'units-per-em',
'values',
'version',
'viewBox',
'visibility',
'width',
'widths',
'x',
'x-height',
'x1',
'x2',
'xlink:actuate',
'xlink:arcrole',
'xlink:href',
'xlink:role',
'xlink:show',
'xlink:title',
'xlink:type',
'xml:base',
'xml:lang',
'xml:space',
'xmlns',
'xmlns:xlink',
'y',
'y1',
'y2',
'zoomAndPan',
}
svg_attr_map = None
svg_elem_map = None
acceptable_svg_properties = {
'fill',
'fill-opacity',
'fill-rule',
'stroke',
'stroke-linecap',
'stroke-linejoin',
'stroke-opacity',
'stroke-width',
}
def __init__(self, encoding=None, _type='application/xhtml+xml'):
super(_HTMLSanitizer, self).__init__(encoding, _type)
self.unacceptablestack = 0
self.mathmlOK = 0
self.svgOK = 0
def reset(self):
super(_HTMLSanitizer, self).reset()
self.unacceptablestack = 0
self.mathmlOK = 0
self.svgOK = 0
def unknown_starttag(self, tag, attrs):
acceptable_attributes = self.acceptable_attributes
keymap = {}
if tag not in self.acceptable_elements or self.svgOK:
if tag in self.unacceptable_elements_with_end_tag:
self.unacceptablestack += 1
# add implicit namespaces to html5 inline svg/mathml
if self._type.endswith('html'):
if not dict(attrs).get('xmlns'):
if tag == 'svg':
attrs.append(('xmlns', 'http://www.w3.org/2000/svg'))
if tag == 'math':
attrs.append(('xmlns', 'http://www.w3.org/1998/Math/MathML'))
# not otherwise acceptable, perhaps it is MathML or SVG?
if tag == 'math' and ('xmlns', 'http://www.w3.org/1998/Math/MathML') in attrs:
self.mathmlOK += 1
if tag == 'svg' and ('xmlns', 'http://www.w3.org/2000/svg') in attrs:
self.svgOK += 1
# chose acceptable attributes based on tag class, else bail
if self.mathmlOK and tag in self.mathml_elements:
acceptable_attributes = self.mathml_attributes
elif self.svgOK and tag in self.svg_elements:
# For most vocabularies, lowercasing is a good idea. Many
# svg elements, however, are camel case.
if not self.svg_attr_map:
lower = [attr.lower() for attr in self.svg_attributes]
mix = [a for a in self.svg_attributes if a not in lower]
self.svg_attributes = lower
self.svg_attr_map = {a.lower(): a for a in mix}
lower = [attr.lower() for attr in self.svg_elements]
mix = [a for a in self.svg_elements if a not in lower]
self.svg_elements = lower
self.svg_elem_map = {a.lower(): a for a in mix}
acceptable_attributes = self.svg_attributes
tag = self.svg_elem_map.get(tag, tag)
keymap = self.svg_attr_map
elif tag not in self.acceptable_elements:
return
# declare xlink namespace, if needed
if self.mathmlOK or self.svgOK:
if any((a for a in attrs if a[0].startswith('xlink:'))):
if not ('xmlns:xlink', 'http://www.w3.org/1999/xlink') in attrs:
attrs.append(('xmlns:xlink', 'http://www.w3.org/1999/xlink'))
clean_attrs = []
for key, value in self.normalize_attrs(attrs):
if key == 'style' and 'style' in acceptable_attributes:
clean_value = self.sanitize_style(value)
if clean_value:
clean_attrs.append((key, clean_value))
elif key in acceptable_attributes:
key = keymap.get(key, key)
# make sure the uri uses an acceptable uri scheme
if key == 'href':
value = make_safe_absolute_uri(value)
clean_attrs.append((key, value))
super(_HTMLSanitizer, self).unknown_starttag(tag, clean_attrs)
def unknown_endtag(self, tag):
if tag not in self.acceptable_elements:
if tag in self.unacceptable_elements_with_end_tag:
self.unacceptablestack -= 1
if self.mathmlOK and tag in self.mathml_elements:
if tag == 'math' and self.mathmlOK:
self.mathmlOK -= 1
elif self.svgOK and tag in self.svg_elements:
tag = self.svg_elem_map.get(tag, tag)
if tag == 'svg' and self.svgOK:
self.svgOK -= 1
else:
return
super(_HTMLSanitizer, self).unknown_endtag(tag)
def handle_pi(self, text):
pass
def handle_decl(self, text):
pass
def handle_data(self, text):
if not self.unacceptablestack:
super(_HTMLSanitizer, self).handle_data(text)
def sanitize_style(self, style):
# disallow urls
style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
# This replaced a regexp that used re.match and was prone to
# pathological back-tracking.
if re.sub(r"\s*[-\w]+\s*:\s*[^:;]*;?", '', style).strip():
return ''
clean = []
for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.acceptable_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background', 'border', 'margin', 'padding']:
for keyword in value.split():
if (
keyword not in self.acceptable_css_keywords
and not self.valid_css_values.match(keyword)
):
break
else:
clean.append(prop + ': ' + value + ';')
elif self.svgOK and prop.lower() in self.acceptable_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
def parse_comment(self, i, report=1):
ret = super(_HTMLSanitizer, self).parse_comment(i, report)
if ret >= 0:
return ret
# if ret == -1, this may be a malicious attempt to circumvent
# sanitization, or a page-destroying unclosed comment
match = re.compile(r'--[^>]*>').search(self.rawdata, i+4)
if match:
return match.end()
# unclosed comment; deliberately fail to handle_data()
return len(self.rawdata)
def _sanitize_html(html_source, encoding, _type):
if not _SGML_AVAILABLE:
return html_source
p = _HTMLSanitizer(encoding, _type)
html_source = html_source.replace('<![CDATA[', '<![CDATA[')
p.feed(html_source)
data = p.output()
data = data.strip().replace('\r\n', '\n')
return data
# Match XML entity declarations.
# Example: <!ENTITY copyright "(C)">
RE_ENTITY_PATTERN = re.compile(br'^\s*<!ENTITY([^>]*?)>', re.MULTILINE)
# Match XML DOCTYPE declarations.
# Example: <!DOCTYPE feed [ ]>
RE_DOCTYPE_PATTERN = re.compile(br'^\s*<!DOCTYPE([^>]*?)>', re.MULTILINE)
# Match safe entity declarations.
# This will allow hexadecimal character references through,
# as well as text, but not arbitrary nested entities.
# Example: cubed "³"
# Example: copyright "(C)"
# Forbidden: explode1 "&explode2;&explode2;"
RE_SAFE_ENTITY_PATTERN = re.compile(br'\s+(\w+)\s+"(&#\w+;|[^&"]*)"')
def replace_doctype(data):
"""Strips and replaces the DOCTYPE, returns (rss_version, stripped_data)
rss_version may be 'rss091n' or None
stripped_data is the same XML document with a replaced DOCTYPE
"""
# Divide the document into two groups by finding the location
# of the first element that doesn't begin with '<?' or '<!'.
start = re.search(br'<\w', data)
start = start and start.start() or -1
head, data = data[:start+1], data[start+1:]
# Save and then remove all of the ENTITY declarations.
entity_results = RE_ENTITY_PATTERN.findall(head)
head = RE_ENTITY_PATTERN.sub(b'', head)
# Find the DOCTYPE declaration and check the feed type.
doctype_results = RE_DOCTYPE_PATTERN.findall(head)
doctype = doctype_results and doctype_results[0] or b''
if b'netscape' in doctype.lower():
version = 'rss091n'
else:
version = None
# Re-insert the safe ENTITY declarations if a DOCTYPE was found.
replacement = b''
if len(doctype_results) == 1 and entity_results:
safe_entities = [
e
for e in entity_results
if RE_SAFE_ENTITY_PATTERN.match(e)
]
if safe_entities:
replacement = b'<!DOCTYPE feed [\n<!ENTITY' \
+ b'>\n<!ENTITY '.join(safe_entities) \
+ b'>\n]>'
data = RE_DOCTYPE_PATTERN.sub(replacement, head) + data
# Precompute the safe entities for the loose parser.
safe_entities = {
k.decode('utf-8'): v.decode('utf-8')
for k, v in RE_SAFE_ENTITY_PATTERN.findall(replacement)
}
return version, data, safe_entities<|fim▁end|> | 'align',
'alignmentscope', |
<|file_name|>config.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v3_4_experimental
import (
"github.com/coreos/ignition/v2/config/merge"
"github.com/coreos/ignition/v2/config/shared/errors"
"github.com/coreos/ignition/v2/config/util"
prev "github.com/coreos/ignition/v2/config/v3_3"
"github.com/coreos/ignition/v2/config/v3_4_experimental/translate"
"github.com/coreos/ignition/v2/config/v3_4_experimental/types"
"github.com/coreos/ignition/v2/config/validate"
"github.com/coreos/go-semver/semver"
"github.com/coreos/vcontext/report"
)
func Merge(parent, child types.Config) types.Config {
res, _ := merge.MergeStructTranscribe(parent, child)
return res.(types.Config)
}
// Parse parses the raw config into a types.Config struct and generates a report of any
// errors, warnings, info, and deprecations it encountered
func Parse(rawConfig []byte) (types.Config, report.Report, error) {
if len(rawConfig) == 0 {
return types.Config{}, report.Report{}, errors.ErrEmpty
}
var config types.Config
if rpt, err := util.HandleParseErrors(rawConfig, &config); err != nil {
return types.Config{}, rpt, err
}<|fim▁hole|> return types.Config{}, report.Report{}, errors.ErrUnknownVersion
}
rpt := validate.ValidateWithContext(config, rawConfig)
if rpt.IsFatal() {
return types.Config{}, rpt, errors.ErrInvalid
}
return config, rpt, nil
}
// ParseCompatibleVersion parses the raw config of version 3.4.0-experimental or
// lesser into a 3.4-exp types.Config struct and generates a report of any errors,
// warnings, info, and deprecations it encountered
func ParseCompatibleVersion(raw []byte) (types.Config, report.Report, error) {
version, rpt, err := util.GetConfigVersion(raw)
if err != nil {
return types.Config{}, rpt, err
}
if version == types.MaxVersion {
return Parse(raw)
}
prevCfg, r, err := prev.ParseCompatibleVersion(raw)
if err != nil {
return types.Config{}, r, err
}
return translate.Translate(prevCfg), r, nil
}<|fim▁end|> |
version, err := semver.NewVersion(config.Ignition.Version)
if err != nil || *version != types.MaxVersion { |
<|file_name|>hashmap.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Unordered containers, implemented as hash-tables (`HashSet` and `HashMap` types)
use std::container::{Container, Mutable, Map, MutableMap, Set, MutableSet};
use std::clone::Clone;
use std::cmp::{Eq, TotalEq, Equiv, max};
use std::default::Default;
use std::fmt;
use std::fmt::Show;
use std::hash::{Hash, Hasher, sip};
use std::iter;
use std::iter::{Iterator, FromIterator, Extendable};
use std::iter::{FilterMap, Chain, Repeat, Zip};
use std::iter::{range, range_inclusive};
use std::mem::replace;
use std::num;
use std::option::{Option, Some, None};
use rand;
use rand::Rng;
use std::result::{Ok, Err};
use std::slice::ImmutableVector;
mod table {
extern crate libc;
use std::clone::Clone;
use std::cmp::Eq;
use std::hash::{Hash, Hasher};
use std::kinds::marker;
use std::num::CheckedMul;
use std::option::{Option, Some, None};
use std::prelude::Drop;
use std::ptr;
use std::ptr::RawPtr;
use std::rt::global_heap;
use std::intrinsics::{size_of, transmute, move_val_init};
use std::iter::{Iterator, range_step_inclusive};
static EMPTY_BUCKET: u64 = 0u64;
/// The raw hashtable, providing safe-ish access to the unzipped and highly
/// optimized arrays of hashes, keys, and values.
///
/// This design uses less memory and is a lot faster than the naive
/// `~[Option<u64, K, V>]`, because we don't pay for the overhead of an
/// option on every element, and we get a generally more cache-aware design.
///
/// Key invariants of this structure:
///
/// - if hashes[i] == EMPTY_BUCKET, then keys[i] and vals[i] have
/// 'undefined' contents. Don't read from them. This invariant is
/// enforced outside this module with the [EmptyIndex], [FullIndex],
/// and [SafeHash] types/concepts.
///
/// - An `EmptyIndex` is only constructed for a bucket at an index with
/// a hash of EMPTY_BUCKET.
///
/// - A `FullIndex` is only constructed for a bucket at an index with a
/// non-EMPTY_BUCKET hash.
///
/// - A `SafeHash` is only constructed for non-`EMPTY_BUCKET` hash. We get
/// around hashes of zero by changing them to 0x800_0000, which will
/// likely hash to the same bucket, but not be represented as "empty".
///
/// - All three "arrays represented by pointers" are the same length:
/// `capacity`. This is set at creation and never changes. The arrays
/// are unzipped to save space (we don't have to pay for the padding
/// between odd sized elements, such as in a map from u64 to u8), and
/// be more cache aware (scanning through 8 hashes brings in 2 cache
/// lines, since they're all right beside each other).
///
/// You can kind of think of this module/data structure as a safe wrapper
/// around just the "table" part of the hashtable. It enforces some
/// invariants at the type level and employs some performance trickery,
/// but in general is just a tricked out `Vec<Option<u64, K, V>>`.
///
/// FIXME(cgaebel):
///
/// Feb 11, 2014: This hashtable was just implemented, and, hard as I tried,
/// isn't yet totally safe. There's a "known exploit" that you can create
/// multiple FullIndexes for a bucket, `take` one, and then still `take`
/// the other causing undefined behavior. Currently, there's no story
/// for how to protect against this statically. Therefore, there are asserts
/// on `take`, `get`, `get_mut`, and `put` which check the bucket state.
/// With time, and when we're confident this works correctly, they should
/// be removed. Also, the bounds check in `peek` is especially painful,
/// as that's called in the innermost loops of the hashtable and has the
/// potential to be a major performance drain. Remove this too.
///
/// Or, better than remove, only enable these checks for debug builds.
/// There's currently no "debug-only" asserts in rust, so if you're reading
/// this and going "what? of course there are debug-only asserts!", then
/// please make this use them!
pub struct RawTable<K, V> {
capacity: uint,
size: uint,
hashes: *mut u64,
keys: *mut K,
vals: *mut V,
}
/// Represents an index into a `RawTable` with no key or value in it.
pub struct EmptyIndex {
idx: int,
nocopy: marker::NoCopy,
}
/// Represents an index into a `RawTable` with a key, value, and hash
/// in it.
pub struct FullIndex {
idx: int,
hash: SafeHash,
nocopy: marker::NoCopy,
}
impl FullIndex {
/// Since we get the hash for free whenever we check the bucket state,
/// this function is provided for fast access, letting us avoid making
/// redundant trips back to the hashtable.
pub fn hash(&self) -> SafeHash { self.hash }
/// Same comment as with `hash`.
pub fn raw_index(&self) -> uint { self.idx as uint }
}
/// Represents the state of a bucket: it can either have a key/value
/// pair (be full) or not (be empty). You cannot `take` empty buckets,
/// and you cannot `put` into full buckets.
pub enum BucketState {
Empty(EmptyIndex),
Full(FullIndex),
}
/// A hash that is not zero, since we use that to represent empty buckets.
#[deriving(Eq)]
pub struct SafeHash {
hash: u64,
}
impl SafeHash {
/// Peek at the hash value, which is guaranteed to be non-zero.
pub fn inspect(&self) -> u64 { self.hash }
}
/// We need to remove hashes of 0. That's reserved for empty buckets.
/// This function wraps up `hash_keyed` to be the only way outside this
/// module to generate a SafeHash.
pub fn make_hash<T: Hash<S>, S, H: Hasher<S>>(hasher: &H, t: &T) -> SafeHash {
match hasher.hash(t) {
// This constant is exceedingly likely to hash to the same
// bucket, but it won't be counted as empty!
EMPTY_BUCKET => SafeHash { hash: 0x8000_0000_0000_0000 },
h => SafeHash { hash: h },
}
}
impl<K, V> RawTable<K, V> {
/// Does not initialize the buckets. The caller should ensure they,
/// at the very least, set every hash to EMPTY_BUCKET.
unsafe fn new_uninitialized(capacity: uint) -> RawTable<K, V> {
let hashes_size =
capacity.checked_mul(&size_of::<u64>()).expect("capacity overflow");
let keys_size =
capacity.checked_mul(&size_of::< K >()).expect("capacity overflow");
let vals_size =
capacity.checked_mul(&size_of::< V >()).expect("capacity overflow");
/*
The following code was my first pass at making RawTable only
allocate a single buffer, since that's all it needs. There's
no logical reason for this to require three calls to malloc.
However, I'm not convinced the code below is correct. If you
want to take a stab at it, please do! The alignment is
especially tricky to get right, especially if you need more
alignment than malloc guarantees.
let hashes_offset = 0;
let keys_offset = align_size(hashes_offset + hashes_size, keys_align);
let vals_offset = align_size(keys_offset + keys_size, vals_align);
let end = vals_offset + vals_size;
let buffer = global_heap::malloc_raw(end);
let hashes = buffer.offset(hashes_offset) as *mut u64;
let keys = buffer.offset(keys_offset) as *mut K;
let vals = buffer.offset(vals_offset) as *mut V;
*/
let hashes = global_heap::malloc_raw(hashes_size) as *mut u64;
let keys = global_heap::malloc_raw(keys_size) as *mut K;
let vals = global_heap::malloc_raw(vals_size) as *mut V;
RawTable {
capacity: capacity,
size: 0,
hashes: hashes,
keys: keys,
vals: vals,
}
}
/// Creates a new raw table from a given capacity. All buckets are
/// initially empty.
pub fn new(capacity: uint) -> RawTable<K, V> {
unsafe {
let ret = RawTable::new_uninitialized(capacity);
for i in range(0, ret.capacity() as int) {
*ret.hashes.offset(i) = EMPTY_BUCKET;
}
ret
}
}
/// Reads a bucket at a given index, returning an enum indicating whether
/// there's anything there or not. You need to match on this enum to get
/// the appropriate types to pass on to most of the rest of the functions
/// in this module.
pub fn peek(&self, index: uint) -> BucketState {
// FIXME #12049
if cfg!(test) { assert!(index < self.capacity) }
let idx = index as int;
let hash = unsafe { *self.hashes.offset(idx) };
let nocopy = marker::NoCopy;
match hash {
EMPTY_BUCKET =>
Empty(EmptyIndex {
idx: idx,
nocopy: nocopy
}),
full_hash =>
Full(FullIndex {
idx: idx,
hash: SafeHash { hash: full_hash },
nocopy: nocopy,
})
}
}
/// Gets references to the key and value at a given index.
pub fn read<'a>(&'a self, index: &FullIndex) -> (&'a K, &'a V) {
let idx = index.idx;
unsafe {
// FIXME #12049
if cfg!(test) { assert!(*self.hashes.offset(idx) != EMPTY_BUCKET) }
(&'a *self.keys.offset(idx),
&'a *self.vals.offset(idx))
}
}
/// Gets references to the key and value at a given index, with the
/// value's reference being mutable.
pub fn read_mut<'a>(&'a mut self, index: &FullIndex) -> (&'a K, &'a mut V) {
let idx = index.idx;
unsafe {
// FIXME #12049
if cfg!(test) { assert!(*self.hashes.offset(idx) != EMPTY_BUCKET) }
(&'a *self.keys.offset(idx),
&'a mut *self.vals.offset(idx))
}
}
/// Read everything, mutably.
pub fn read_all_mut<'a>(&'a mut self, index: &FullIndex)
-> (&'a mut SafeHash, &'a mut K, &'a mut V) {
let idx = index.idx;
// I'm totally abusing the fact that a pointer to any u64 in the
// hashtable at a full index is a safe hash. Thanks to `SafeHash`
// just being a wrapper around u64, this is true. It's just really
// really really really unsafe. However, the exposed API is now
// impossible to get wrong. You cannot insert an empty hash into
// this slot now.
unsafe {
// FIXME #12049
if cfg!(test) { assert!(*self.hashes.offset(idx) != EMPTY_BUCKET) }
(transmute(self.hashes.offset(idx)),
&'a mut *self.keys.offset(idx),
&'a mut *self.vals.offset(idx))
}
}
/// Puts a key and value pair, along with the key's hash, into a given
/// index in the hashtable. Note how the `EmptyIndex` is 'moved' into this
/// function, because that slot will no longer be empty when we return!
/// Because we know this, a FullIndex is returned for later use, pointing
/// to the newly-filled slot in the hashtable.
///
/// Use `make_hash` to construct a `SafeHash` to pass to this function.
pub fn put(&mut self, index: EmptyIndex, hash: SafeHash, k: K, v: V) -> FullIndex {
let idx = index.idx;
unsafe {
// FIXME #12049
if cfg!(test) { assert!(*self.hashes.offset(idx) == EMPTY_BUCKET) }
*self.hashes.offset(idx) = hash.inspect();
move_val_init(&mut *self.keys.offset(idx), k);
move_val_init(&mut *self.vals.offset(idx), v);
}
self.size += 1;
FullIndex { idx: idx, hash: hash, nocopy: marker::NoCopy }
}
/// Removes a key and value from the hashtable.
///
/// This works similarly to `put`, building an `EmptyIndex` out of the
/// taken FullIndex.
pub fn take(&mut self, index: FullIndex) -> (EmptyIndex, K, V) {
let idx = index.idx;
unsafe {
// FIXME #12049
if cfg!(test) { assert!(*self.hashes.offset(idx) != EMPTY_BUCKET) }
let hash_ptr = self.hashes.offset(idx);
*hash_ptr = EMPTY_BUCKET;
// Drop the mutable constraint.
let keys = self.keys as *K;
let vals = self.vals as *V;
let k = ptr::read(keys.offset(idx));
let v = ptr::read(vals.offset(idx));
self.size -= 1;
(EmptyIndex { idx: idx, nocopy: marker::NoCopy }, k, v)
}
}
/// The hashtable's capacity, similar to a vector's.
pub fn capacity(&self) -> uint {
self.capacity
}
/// The number of elements ever `put` in the hashtable, minus the number
/// of elements ever `take`n.
pub fn size(&self) -> uint {
self.size
}
pub fn iter<'a>(&'a self) -> Entries<'a, K, V> {
Entries { table: self, idx: 0 }
}
pub fn mut_iter<'a>(&'a mut self) -> MutEntries<'a, K, V> {
MutEntries { table: self, idx: 0 }
}
pub fn move_iter(self) -> MoveEntries<K, V> {
MoveEntries { table: self, idx: 0 }
}
}
pub struct Entries<'a, K, V> {
table: &'a RawTable<K, V>,
idx: uint,
}
pub struct MutEntries<'a, K, V> {
table: &'a mut RawTable<K, V>,
idx: uint,
}
pub struct MoveEntries<K, V> {
table: RawTable<K, V>,
idx: uint,
}
impl<'a, K, V> Iterator<(&'a K, &'a V)> for Entries<'a, K, V> {
fn next(&mut self) -> Option<(&'a K, &'a V)> {
while self.idx < self.table.capacity() {
let i = self.idx;
self.idx += 1;
match self.table.peek(i) {
Empty(_) => {},
Full(idx) => return Some(self.table.read(&idx))
}
}
None
}
fn size_hint(&self) -> (uint, Option<uint>) {
let size = self.table.size() - self.idx;
(size, Some(size))
}
}
impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> {
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
while self.idx < self.table.capacity() {
let i = self.idx;
self.idx += 1;
match self.table.peek(i) {
Empty(_) => {},
// the transmute here fixes:
// error: lifetime of `self` is too short to guarantee its contents
// can be safely reborrowed
Full(idx) => unsafe {
return Some(transmute(self.table.read_mut(&idx)))
}
}
}
None
}
fn size_hint(&self) -> (uint, Option<uint>) {
let size = self.table.size() - self.idx;
(size, Some(size))
}
}
impl<K, V> Iterator<(SafeHash, K, V)> for MoveEntries<K, V> {
fn next(&mut self) -> Option<(SafeHash, K, V)> {
while self.idx < self.table.capacity() {
let i = self.idx;
self.idx += 1;
match self.table.peek(i) {
Empty(_) => {},
Full(idx) => {
let h = idx.hash();
let (_, k, v) = self.table.take(idx);
return Some((h, k, v));
}
}
}
None
}
fn size_hint(&self) -> (uint, Option<uint>) {
let size = self.table.size();
(size, Some(size))
}
}
impl<K: Clone, V: Clone> Clone for RawTable<K, V> {
fn clone(&self) -> RawTable<K, V> {
unsafe {
let mut new_ht = RawTable::new_uninitialized(self.capacity());
for i in range(0, self.capacity()) {
match self.peek(i) {
Empty(_) => {
*new_ht.hashes.offset(i as int) = EMPTY_BUCKET;
},
Full(idx) => {
let hash = idx.hash().inspect();
let (k, v) = self.read(&idx);
*new_ht.hashes.offset(i as int) = hash;
move_val_init(&mut *new_ht.keys.offset(i as int), (*k).clone());
move_val_init(&mut *new_ht.vals.offset(i as int), (*v).clone());
}
}
}
new_ht.size = self.size();
new_ht
}
}
}
#[unsafe_destructor]
impl<K, V> Drop for RawTable<K, V> {
fn drop(&mut self) {
// Ideally, this should be in reverse, since we're likely to have
// partially taken some elements out with `.move_iter()` from the
// front.
for i in range_step_inclusive(self.capacity as int - 1, 0, -1) {
// Check if the size is 0, so we don't do a useless scan when
// dropping empty tables such as on resize.
if self.size == 0 { break }
match self.peek(i as uint) {
Empty(_) => {},
Full(idx) => { self.take(idx); }
}
}
assert!(self.size == 0);
unsafe {
libc::free(self.vals as *mut libc::c_void);
libc::free(self.keys as *mut libc::c_void);
libc::free(self.hashes as *mut libc::c_void);
}
}
}
}
// We use this type for the load factor, to avoid floating point operations
// which might not be supported efficiently on some hardware.
//
// We use small u16s here to save space in the hashtable. They get upcasted
// to u64s when we actually use them.
type Fraction = (u16, u16); // (numerator, denominator)
// multiplication by a fraction, in a way that won't generally overflow for
// array sizes outside a factor of 10 of U64_MAX.
fn fraction_mul(lhs: uint, (num, den): Fraction) -> uint {
(((lhs as u64) * (num as u64)) / (den as u64)) as uint
}
static INITIAL_LOG2_CAP: uint = 5;
static INITIAL_CAPACITY: uint = 1 << INITIAL_LOG2_CAP; // 2^5
static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
// The main performance trick in this hashmap is called Robin Hood Hashing.
// It gains its excellent performance from one key invariant:
//
// If an insertion collides with an existing element, and that elements
// "probe distance" (how far away the element is from its ideal location)
// is higher than how far we've already probed, swap the elements.
//
// This massively lowers variance in probe distance, and allows us to get very
// high load factors with good performance. The 90% load factor I use is rather
// conservative.
//
// > Why a load factor of 90%?
//
// In general, all the distances to inital buckets will converge on the mean.
// At a load factor of α, the odds of finding the target bucket after k
// probes is approximately 1-α^k. If we set this equal to 50% (since we converge
// on the mean) and set k=8 (64-byte cache line / 8-byte hash), α=0.92. I round
// this down to 0.90 to make the math easier on the CPU and avoid its FPU.
// Since on average we start the probing in the middle of a cache line, this
// strategy pulls in two cache lines of hashes on every lookup. I think that's
// pretty good, but if you want to trade off some space, it could go down to one
// cache line on average with an α of 0.84.
//
// > Wait, what? Where did you get 1-α^k from?
//
// On the first probe, your odds of a collision with an existing element is α.
// The odds of doing this twice in a row is approximatelly α^2. For three times,
// α^3, etc. Therefore, the odds of colliding k times is α^k. The odds of NOT
// colliding after k tries is 1-α^k.
//
// Future Improvements (FIXME!)
// ============================
//
// Allow the load factor to be changed dynamically and/or at initialization.
// I'm having trouble figuring out a sane API for this without exporting my
// hackish fraction type, while still avoiding floating point.
//
// Also, would it be possible for us to reuse storage when growing the
// underlying table? This is exactly the use case for 'realloc', and may
// be worth exploring.
//
// Future Optimizations (FIXME!)
// =============================
//
// The paper cited below mentions an implementation which keeps track of the
// distance-to-initial-bucket histogram. I'm suspicious of this approach because
// it requires maintaining an internal map. If this map were replaced with a
// hashmap, it would be faster, but now our data structure is self-referential
// and blows up. Also, this allows very good first guesses, but array accesses
// are no longer linear and in one direction, as we have now. There is also
// memory and cache pressure that this map would entail that would be very
// difficult to properly see in a microbenchmark.
//
// Another possible design choice that I made without any real reason is
// parameterizing the raw table over keys and values. Technically, all we need
// is the size and alignment of keys and values, and the code should be just as
// efficient (well, we might need one for power-of-two size and one for not...).
// This has the potential to reduce code bloat in rust executables, without
// really losing anything except 4 words (key size, key alignment, val size,
// val alignment) which can be passed in to every call of a `RawTable` function.
// This would definitely be an avenue worth exploring if people start complaining
// about the size of rust executables.
//
// There's also two optimizations that have been omitted regarding how the
// hashtable allocates. The first is that a hashtable which never has an element
// inserted should not allocate. I'm suspicious of this one, because supporting
// that internally gains no performance over just using an
// `Option<HashMap<K, V>>`, and is significantly more complicated.
//
// The second omitted allocation optimization is that right now we allocate three
// arrays to back the hashtable. This is wasteful. In theory, we only need one
// array, and each of the three original arrays can just be slices of it. This
// would reduce the pressure on the allocator, and will play much nicer with the
// rest of the system. An initial implementation is commented out in
// `table::RawTable::new`, but I'm not confident it works for all sane alignments,
// especially if a type needs more alignment than `malloc` provides.
/// A hash map implementation which uses linear probing with Robin
/// Hood bucket stealing.
///
/// The hashes are all keyed by the task-local random number generator
/// on creation by default, this means the ordering of the keys is
/// randomized, but makes the tables more resistant to
/// denial-of-service attacks (Hash DoS). This behaviour can be
/// overriden with one of the constructors.
///
/// It is required that the keys implement the `Eq` and `Hash` traits, although
/// this can frequently be achieved by using `#[deriving(Eq, Hash)]`.
///
/// Relevant papers/articles:
///
/// 1. Pedro Celis. ["Robin Hood Hashing"](https://cs.uwaterloo.ca/research/tr/1986/CS-86-14.pdf)
/// 2. Emmanuel Goossaert. ["Robin Hood
/// hashing"](http://codecapsule.com/2013/11/11/robin-hood-hashing/)
/// 3. Emmanuel Goossaert. ["Robin Hood hashing: backward shift
/// deletion"](http://codecapsule.com/2013/11/17/robin-hood-hashing-backward-shift-deletion/)
///
/// # Example
///
/// ```rust
/// use collections::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, &str>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // review some books.
/// book_reviews.insert("Adventures of Hucklebury Fin", "My favorite book.");
/// book_reviews.insert("Grimms' Fairy Tales", "Masterpiece.");
/// book_reviews.insert("Pride and Prejudice", "Very enjoyable.");
/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot.");
///
/// // check for a specific one.
/// if !book_reviews.contains_key(& &"Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove(& &"The Adventures of Sherlock Holmes");
///
/// // look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for book in to_find.iter() {
/// match book_reviews.find(book) {
/// Some(review) => println!("{}: {}", *book, *review),
/// None => println!("{} is unreviewed.", *book)
/// }
/// }
///
/// // iterate over everything.
/// for (book, review) in book_reviews.iter() {
/// println!("{}: \"{}\"", *book, *review);
/// }
/// ```
#[deriving(Clone)]
pub struct HashMap<K, V, H = sip::SipHasher> {
// All hashes are keyed on these values, to prevent hash collision attacks.
hasher: H,
// When size == grow_at, we double the capacity.
grow_at: uint,
// The capacity must never drop below this.
minimum_capacity: uint,
table: table::RawTable<K, V>,
// We keep this at the end since it's 4-bytes, unlike everything else
// in this struct. Might as well save a word of padding!
load_factor: Fraction,
}
/// Get the number of elements which will force the capacity to grow.
fn grow_at(capacity: uint, load_factor: Fraction) -> uint {
fraction_mul(capacity, load_factor)
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> HashMap<K, V, H> {
/// Get the number of elements which will force the capacity to shrink.
/// When size == self.shrink_at(), we halve the capacity.
fn shrink_at(&self) -> uint {
self.table.capacity() >> 2
}
// Probe the `idx`th bucket for a given hash, returning the index of the
// target bucket.
//
// This exploits the power-of-two size of the hashtable. As long as this
// is always true, we can use a bitmask of cap-1 to do modular arithmetic.
//
// Prefer to use this with increasing values of `idx` rather than repeatedly
// calling `probe_next`. This reduces data-dependencies between loops, which
// can help the optimizer, and certainly won't hurt it. `probe_next` is
// simply for convenience, and is no more efficient than `probe`.
fn probe(&self, hash: &table::SafeHash, idx: uint) -> uint {
let hash_mask = self.table.capacity() - 1;
// So I heard a rumor that unsigned overflow is safe in rust..
((hash.inspect() as uint) + idx) & hash_mask
}
// Generate the next probe in a sequence. Prefer to use 'probe' by itself,
// but this can sometimes be useful.
fn probe_next(&self, probe: uint) -> uint {
let hash_mask = self.table.capacity() - 1;
(probe + 1) & hash_mask
}
fn make_hash<X: Hash<S>>(&self, x: &X) -> table::SafeHash {
table::make_hash(&self.hasher, x)
}
/// Get the distance of the bucket at the given index that it lies
/// from its 'ideal' location.
///
/// In the cited blog posts above, this is called the "distance to
/// inital bucket", or DIB.
fn bucket_distance(&self, index_of_elem: &table::FullIndex) -> uint {
// where the hash of the element that happens to reside at
// `index_of_elem` tried to place itself first.
let first_probe_index = self.probe(&index_of_elem.hash(), 0);
let raw_index = index_of_elem.raw_index();
if first_probe_index <= raw_index {
// probe just went forward
raw_index - first_probe_index
} else {
// probe wrapped around the hashtable
raw_index + (self.table.capacity() - first_probe_index)
}
}
/// Search for a pre-hashed key.
fn search_hashed_generic(&self, hash: &table::SafeHash, is_match: |&K| -> bool)
-> Option<table::FullIndex> {
for num_probes in range(0u, self.table.size()) {
let probe = self.probe(hash, num_probes);
let idx = match self.table.peek(probe) {
table::Empty(_) => return None, // hit an empty bucket
table::Full(idx) => idx
};
// We can finish the search early if we hit any bucket
// with a lower distance to initial bucket than we've probed.
if self.bucket_distance(&idx) < num_probes { return None }
// If the hash doesn't match, it can't be this one..
if hash != &idx.hash() { continue }
let (k, _) = self.table.read(&idx);
// If the key doesn't match, it can't be this one..
if !is_match(k) { continue }
return Some(idx);
}
return None
}
fn search_hashed(&self, hash: &table::SafeHash, k: &K) -> Option<table::FullIndex> {
self.search_hashed_generic(hash, |k_| *k == *k_)
}
fn search_equiv<Q: Hash<S> + Equiv<K>>(&self, q: &Q) -> Option<table::FullIndex> {
self.search_hashed_generic(&self.make_hash(q), |k| q.equiv(k))
}
/// Search for a key, yielding the index if it's found in the hashtable.
/// If you already have the hash for the key lying around, use
/// search_hashed.
fn search(&self, k: &K) -> Option<table::FullIndex> {
self.search_hashed(&self.make_hash(k), k)
}
fn pop_internal(&mut self, starting_index: table::FullIndex) -> Option<V> {
let starting_probe = starting_index.raw_index();
let ending_probe = {
let mut probe = self.probe_next(starting_probe);
for _ in range(0u, self.table.size()) {
match self.table.peek(probe) {
table::Empty(_) => {}, // empty bucket. this is the end of our shifting.
table::Full(idx) => {
// Bucket that isn't us, which has a non-zero probe distance.
// This isn't the ending index, so keep searching.
if self.bucket_distance(&idx) != 0 {
probe = self.probe_next(probe);
continue;
}
// if we do have a bucket_distance of zero, we're at the end
// of what we need to shift.
}
}
break;
}
probe
};
let (_, _, retval) = self.table.take(starting_index);
let mut probe = starting_probe;
let mut next_probe = self.probe_next(probe);
// backwards-shift all the elements after our newly-deleted one.
while next_probe != ending_probe {
match self.table.peek(next_probe) {
table::Empty(_) => {
// nothing to shift in. just empty it out.
match self.table.peek(probe) {
table::Empty(_) => {},
table::Full(idx) => { self.table.take(idx); }
}
},
table::Full(next_idx) => {
// something to shift. move it over!
let next_hash = next_idx.hash();
let (_, next_key, next_val) = self.table.take(next_idx);
match self.table.peek(probe) {
table::Empty(idx) => {
self.table.put(idx, next_hash, next_key, next_val);
},
table::Full(idx) => {
let (emptyidx, _, _) = self.table.take(idx);
self.table.put(emptyidx, next_hash, next_key, next_val);
}
}
}
}
probe = next_probe;
next_probe = self.probe_next(next_probe);
}
// Done the backwards shift, but there's still an element left!
// Empty it out.
match self.table.peek(probe) {
table::Empty(_) => {},
table::Full(idx) => { self.table.take(idx); }
}
// Now we're done all our shifting. Return the value we grabbed
// earlier.
return Some(retval);
}
/// Like `pop`, but can operate on any type that is equivalent to a key.
#[experimental]
pub fn pop_equiv<Q:Hash<S> + Equiv<K>>(&mut self, k: &Q) -> Option<V> {
if self.table.size() == 0 {
return None
}
let potential_new_size = self.table.size() - 1;
self.make_some_room(potential_new_size);
let starting_index = match self.search_equiv(k) {
Some(idx) => idx,
None => return None,
};
self.pop_internal(starting_index)
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> Container for HashMap<K, V, H> {
/// Return the number of elements in the map
fn len(&self) -> uint { self.table.size() }
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> Mutable for HashMap<K, V, H> {
/// Clear the map, removing all key-value pairs.
fn clear(&mut self) {
self.minimum_capacity = self.table.size();
for i in range(0, self.table.capacity()) {
match self.table.peek(i) {
table::Empty(_) => {},
table::Full(idx) => { self.table.take(idx); }
}
}
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> Map<K, V> for HashMap<K, V, H> {
fn find<'a>(&'a self, k: &K) -> Option<&'a V> {
self.search(k).map(|idx| {
let (_, v) = self.table.read(&idx);
v
})
}
fn contains_key(&self, k: &K) -> bool {
self.search(k).is_some()
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> MutableMap<K, V> for HashMap<K, V, H> {
fn find_mut<'a>(&'a mut self, k: &K) -> Option<&'a mut V> {
match self.search(k) {
None => None,
Some(idx) => {
let (_, v) = self.table.read_mut(&idx);
Some(v)
}
}
}
fn swap(&mut self, k: K, v: V) -> Option<V> {
let hash = self.make_hash(&k);
let potential_new_size = self.table.size() + 1;
self.make_some_room(potential_new_size);
for dib in range_inclusive(0u, self.table.size()) {
let probe = self.probe(&hash, dib);
let idx = match self.table.peek(probe) {
table::Empty(idx) => {
// Found a hole!
self.table.put(idx, hash, k, v);
return None;
},
table::Full(idx) => idx
};
if idx.hash() == hash {
let (bucket_k, bucket_v) = self.table.read_mut(&idx);
if k == *bucket_k {
// Found an existing value.
return Some(replace(bucket_v, v));
}
}
let probe_dib = self.bucket_distance(&idx);
if probe_dib < dib {
// Found a luckier bucket. This implies that the key does not
// already exist in the hashtable. Just do a robin hood
// insertion, then.
self.robin_hood(idx, probe_dib, hash, k, v);
return None;
}
}
// We really shouldn't be here.
fail!("Internal HashMap error: Out of space.");
}
fn pop(&mut self, k: &K) -> Option<V> {
if self.table.size() == 0 {
return None
}
let potential_new_size = self.table.size() - 1;
self.make_some_room(potential_new_size);
let starting_index = match self.search(k) {
Some(idx) => idx,
None => return None,
};
self.pop_internal(starting_index)
}
}
impl<K: Hash + TotalEq, V> HashMap<K, V, sip::SipHasher> {
/// Create an empty HashMap.
pub fn new() -> HashMap<K, V, sip::SipHasher> {
HashMap::with_capacity(INITIAL_CAPACITY)
}
pub fn with_capacity(capacity: uint) -> HashMap<K, V, sip::SipHasher> {
let mut r = rand::task_rng();
let r0 = r.gen();
let r1 = r.gen();
let hasher = sip::SipHasher::new_with_keys(r0, r1);
HashMap::with_capacity_and_hasher(capacity, hasher)
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S>> HashMap<K, V, H> {
pub fn with_hasher(hasher: H) -> HashMap<K, V, H> {
HashMap::with_capacity_and_hasher(INITIAL_CAPACITY, hasher)
}
/// Create an empty HashMap with space for at least `capacity`
/// elements, using `hasher` to hash the keys.
///
/// Warning: `hasher` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
pub fn with_capacity_and_hasher(capacity: uint, hasher: H) -> HashMap<K, V, H> {
let cap = num::next_power_of_two(max(INITIAL_CAPACITY, capacity));
HashMap {
hasher: hasher,
load_factor: INITIAL_LOAD_FACTOR,
grow_at: grow_at(cap, INITIAL_LOAD_FACTOR),
minimum_capacity: cap,
table: table::RawTable::new(cap),
}
}
/// The hashtable will never try to shrink below this size. You can use
/// this function to reduce reallocations if your hashtable frequently
/// grows and shrinks by large amounts.
///
/// This function has no effect on the operational semantics of the
/// hashtable, only on performance.
pub fn reserve(&mut self, new_minimum_capacity: uint) {
let cap = num::next_power_of_two(
max(INITIAL_CAPACITY, new_minimum_capacity));
self.minimum_capacity = cap;
if self.table.capacity() < cap {
self.resize(cap);
}
}
/// Resizes the internal vectors to a new capacity. It's your responsibility to:
/// 1) Make sure the new capacity is enough for all the elements, accounting
/// for the load factor.
/// 2) Ensure new_capacity is a power of two.
fn resize(&mut self, new_capacity: uint) {
assert!(self.table.size() <= new_capacity);
assert!((new_capacity - 1) & new_capacity == 0);
self.grow_at = grow_at(new_capacity, self.load_factor);
let old_table = replace(&mut self.table, table::RawTable::new(new_capacity));
let old_size = old_table.size();
for (h, k, v) in old_table.move_iter() {
self.manual_insert_hashed_nocheck(h, k, v);
}
assert_eq!(self.table.size(), old_size);
}
/// Performs any necessary resize operations, such that there's space for
/// new_size elements.
fn make_some_room(&mut self, new_size: uint) {
let should_shrink = new_size <= self.shrink_at();
let should_grow = self.grow_at <= new_size;
if should_grow {
let new_capacity = self.table.capacity() << 1;
self.resize(new_capacity);
} else if should_shrink {
let new_capacity = self.table.capacity() >> 1;
// Never shrink below the minimum capacity
if self.minimum_capacity <= new_capacity {
self.resize(new_capacity);
}
}
}
/// Perform robin hood bucket stealing at the given 'index'. You must
/// also pass that probe's "distance to initial bucket" so we don't have
/// to recalculate it, as well as the total number of probes already done
/// so we have some sort of upper bound on the number of probes to do.
///
/// 'hash', 'k', and 'v' are the elements to robin hood into the hashtable.
fn robin_hood(&mut self, mut index: table::FullIndex, mut dib_param: uint,
mut hash: table::SafeHash, mut k: K, mut v: V) {
'outer: loop {
let (old_hash, old_key, old_val) = {
let (old_hash_ref, old_key_ref, old_val_ref) =
self.table.read_all_mut(&index);
let old_hash = replace(old_hash_ref, hash);
let old_key = replace(old_key_ref, k);
let old_val = replace(old_val_ref, v);
(old_hash, old_key, old_val)
};
let mut probe = self.probe_next(index.raw_index());
for dib in range(dib_param + 1, self.table.size()) {
let full_index = match self.table.peek(probe) {
table::Empty(idx) => {
// Finally. A hole!
self.table.put(idx, old_hash, old_key, old_val);
return;
},
table::Full(idx) => idx
};
let probe_dib = self.bucket_distance(&full_index);
// Robin hood! Steal the spot.
if probe_dib < dib {
index = full_index;
dib_param = probe_dib;
hash = old_hash;
k = old_key;
v = old_val;
continue 'outer;
}
probe = self.probe_next(probe);
}
fail!("HashMap fatal error: 100% load factor?");
}
}
/// Manually insert a pre-hashed key-value pair, without first checking
/// that there's enough room in the buckets. Returns a reference to the
/// newly insert value.
///
/// If the key already exists, the hashtable will be returned untouched
/// and a reference to the existing element will be returned.
fn manual_insert_hashed_nocheck<'a>(
&'a mut self, hash: table::SafeHash, k: K, v: V) -> &'a mut V {
for dib in range_inclusive(0u, self.table.size()) {
let probe = self.probe(&hash, dib);
let idx = match self.table.peek(probe) {
table::Empty(idx) => {
// Found a hole!
let fullidx = self.table.put(idx, hash, k, v);
let (_, val) = self.table.read_mut(&fullidx);
return val;
},
table::Full(idx) => idx
};
if idx.hash() == hash {
let (bucket_k, bucket_v) = self.table.read_mut(&idx);
// FIXME #12147 the conditional return confuses
// borrowck if we return bucket_v directly
let bv: *mut V = bucket_v;
if k == *bucket_k {
// Key already exists. Get its reference.
return unsafe {&mut *bv};
}
}
let probe_dib = self.bucket_distance(&idx);
if probe_dib < dib {
// Found a luckier bucket than me. Better steal his spot.
self.robin_hood(idx, probe_dib, hash, k, v);
// Now that it's stolen, just read the value's pointer
// right out of the table!
match self.table.peek(probe) {
table::Empty(_) => fail!("Just stole a spot, but now that spot's empty."),
table::Full(idx) => {
let (_, v) = self.table.read_mut(&idx);
return v;
}
}
}
}
// We really shouldn't be here.
fail!("Internal HashMap error: Out of space.");
}
fn manual_insert_hashed<'a>(&'a mut self, hash: table::SafeHash, k: K, v: V) -> &'a mut V {
let potential_new_size = self.table.size() + 1;
self.make_some_room(potential_new_size);
self.manual_insert_hashed_nocheck(hash, k, v)
}
/// Inserts an element, returning a reference to that element inside the
/// hashtable.
fn manual_insert<'a>(&'a mut self, k: K, v: V) -> &'a mut V {
let hash = self.make_hash(&k);
self.manual_insert_hashed(hash, k, v)
}
/// Return the value corresponding to the key in the map, or insert
/// and return the value if it doesn't exist.
pub fn find_or_insert<'a>(&'a mut self, k: K, v: V) -> &'a mut V {
match self.search(&k) {
Some(idx) => {
let (_, v_ref) = self.table.read_mut(&idx);
v_ref
},
None => self.manual_insert(k, v)
}
}
/// Return the value corresponding to the key in the map, or create,
/// insert, and return a new value if it doesn't exist.
pub fn find_or_insert_with<'a>(&'a mut self, k: K, f: |&K| -> V)
-> &'a mut V {
match self.search(&k) {
Some(idx) => {
let (_, v_ref) = self.table.read_mut(&idx);
v_ref
},
None => {
let v = f(&k);
self.manual_insert(k, v)
}
}
}
/// Insert a key-value pair into the map if the key is not already present.
/// Otherwise, modify the existing value for the key.
/// Returns the new or modified value for the key.
pub fn insert_or_update_with<'a>(
&'a mut self,
k: K,
v: V,
f: |&K, &mut V|)
-> &'a mut V {
match self.search(&k) {
None => self.manual_insert(k, v),
Some(idx) => {
let (_, v_ref) = self.table.read_mut(&idx);
f(&k, v_ref);
v_ref
}
}
}
/// Retrieves a value for the given key, failing if the key is not present.
pub fn get<'a>(&'a self, k: &K) -> &'a V {
match self.find(k) {
Some(v) => v,
None => fail!("No entry found for key: {:?}", k)
}
}
/// Retrieves a (mutable) value for the given key, failing if the key is not present.
pub fn get_mut<'a>(&'a mut self, k: &K) -> &'a mut V {
match self.find_mut(k) {
Some(v) => v,
None => fail!("No entry found for key: {:?}", k)
}
}
/// Return true if the map contains a value for the specified key,
/// using equivalence.
pub fn contains_key_equiv<Q: Hash<S> + Equiv<K>>(&self, key: &Q) -> bool {
self.search_equiv(key).is_some()
}
/// Return the value corresponding to the key in the map, using
/// equivalence.
pub fn find_equiv<'a, Q: Hash<S> + Equiv<K>>(&'a self, k: &Q) -> Option<&'a V> {
match self.search_equiv(k) {
None => None,
Some(idx) => {
let (_, v_ref) = self.table.read(&idx);
Some(v_ref)
}
}<|fim▁hole|>
/// An iterator visiting all keys in arbitrary order.
/// Iterator element type is &'a K.
pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
self.iter().map(|(k, _v)| k)
}
/// An iterator visiting all values in arbitrary order.
/// Iterator element type is &'a V.
pub fn values<'a>(&'a self) -> Values<'a, K, V> {
self.iter().map(|(_k, v)| v)
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// Iterator element type is (&'a K, &'a V).
pub fn iter<'a>(&'a self) -> Entries<'a, K, V> {
self.table.iter()
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// Iterator element type is (&'a K, &'a mut V).
pub fn mut_iter<'a>(&'a mut self) -> MutEntries<'a, K, V> {
self.table.mut_iter()
}
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
pub fn move_iter(self) -> MoveEntries<K, V> {
self.table.move_iter().map(|(_, k, v)| (k, v))
}
}
impl<K: TotalEq + Hash<S>, V: Clone, S, H: Hasher<S>> HashMap<K, V, H> {
/// Like `find`, but returns a copy of the value.
pub fn find_copy(&self, k: &K) -> Option<V> {
self.find(k).map(|v| (*v).clone())
}
/// Like `get`, but returns a copy of the value.
pub fn get_copy(&self, k: &K) -> V {
(*self.get(k)).clone()
}
}
impl<K: TotalEq + Hash<S>, V: Eq, S, H: Hasher<S>> Eq for HashMap<K, V, H> {
fn eq(&self, other: &HashMap<K, V, H>) -> bool {
if self.len() != other.len() { return false; }
self.iter().all(|(key, value)| {
match other.find(key) {
None => false,
Some(v) => *value == *v
}
})
}
}
impl<K: TotalEq + Hash<S> + Show, V: Show, S, H: Hasher<S>> Show for HashMap<K, V, H> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f.buf, r"\{"));
for (i, (k, v)) in self.iter().enumerate() {
if i != 0 { try!(write!(f.buf, ", ")); }
try!(write!(f.buf, "{}: {}", *k, *v));
}
write!(f.buf, r"\}")
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S> + Default> Default for HashMap<K, V, H> {
fn default() -> HashMap<K, V, H> {
HashMap::with_capacity_and_hasher(INITIAL_CAPACITY, Default::default())
}
}
/// HashMap iterator
pub type Entries<'a, K, V> = table::Entries<'a, K, V>;
/// HashMap mutable values iterator
pub type MutEntries<'a, K, V> = table::MutEntries<'a, K, V>;
/// HashMap move iterator
pub type MoveEntries<K, V> =
iter::Map<'static, (table::SafeHash, K, V), (K, V), table::MoveEntries<K, V>>;
/// HashMap keys iterator
pub type Keys<'a, K, V> =
iter::Map<'static, (&'a K, &'a V), &'a K, Entries<'a, K, V>>;
/// HashMap values iterator
pub type Values<'a, K, V> =
iter::Map<'static, (&'a K, &'a V), &'a V, Entries<'a, K, V>>;
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S> + Default> FromIterator<(K, V)> for HashMap<K, V, H> {
fn from_iter<T: Iterator<(K, V)>>(iter: T) -> HashMap<K, V, H> {
let (lower, _) = iter.size_hint();
let mut map = HashMap::with_capacity_and_hasher(lower, Default::default());
map.extend(iter);
map
}
}
impl<K: TotalEq + Hash<S>, V, S, H: Hasher<S> + Default> Extendable<(K, V)> for HashMap<K, V, H> {
fn extend<T: Iterator<(K, V)>>(&mut self, mut iter: T) {
for (k, v) in iter {
self.insert(k, v);
}
}
}
/// HashSet iterator
pub type SetItems<'a, K> =
iter::Map<'static, (&'a K, &'a ()), &'a K, Entries<'a, K, ()>>;
/// HashSet move iterator
pub type SetMoveItems<K> =
iter::Map<'static, (K, ()), K, MoveEntries<K, ()>>;
/// An implementation of a hash set using the underlying representation of a
/// HashMap where the value is (). As with the `HashMap` type, a `HashSet`
/// requires that the elements implement the `Eq` and `Hash` traits.
#[deriving(Clone)]
pub struct HashSet<T, H = sip::SipHasher> {
map: HashMap<T, (), H>
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> Eq for HashSet<T, H> {
// FIXME #11998: Since the value is a (), and `find` returns a Some(&()),
// we trigger #11998 when matching on it. I've fallen back to manual
// iteration until this is fixed.
fn eq(&self, other: &HashSet<T, H>) -> bool {
if self.len() != other.len() { return false; }
self.iter().all(|key| other.map.contains_key(key))
}
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> Container for HashSet<T, H> {
/// Return the number of elements in the set
fn len(&self) -> uint { self.map.len() }
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> Mutable for HashSet<T, H> {
/// Clear the set, removing all values.
fn clear(&mut self) { self.map.clear() }
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> Set<T> for HashSet<T, H> {
/// Return true if the set contains a value
fn contains(&self, value: &T) -> bool { self.map.search(value).is_some() }
/// Return true if the set has no elements in common with `other`.
/// This is equivalent to checking for an empty intersection.
fn is_disjoint(&self, other: &HashSet<T, H>) -> bool {
self.iter().all(|v| !other.contains(v))
}
/// Return true if the set is a subset of another
fn is_subset(&self, other: &HashSet<T, H>) -> bool {
self.iter().all(|v| other.contains(v))
}
/// Return true if the set is a superset of another
fn is_superset(&self, other: &HashSet<T, H>) -> bool {
other.is_subset(self)
}
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> MutableSet<T> for HashSet<T, H> {
/// Add a value to the set. Return true if the value was not already
/// present in the set.
fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()) }
/// Remove a value from the set. Return true if the value was
/// present in the set.
fn remove(&mut self, value: &T) -> bool { self.map.remove(value) }
}
impl<T: Hash + TotalEq> HashSet<T, sip::SipHasher> {
/// Create an empty HashSet
pub fn new() -> HashSet<T, sip::SipHasher> {
HashSet::with_capacity(INITIAL_CAPACITY)
}
/// Create an empty HashSet with space for at least `n` elements in
/// the hash table.
pub fn with_capacity(capacity: uint) -> HashSet<T, sip::SipHasher> {
HashSet { map: HashMap::with_capacity(capacity) }
}
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S>> HashSet<T, H> {
pub fn with_hasher(hasher: H) -> HashSet<T, H> {
HashSet::with_capacity_and_hasher(INITIAL_CAPACITY, hasher)
}
/// Create an empty HashSet with space for at least `capacity`
/// elements in the hash table, using `hasher` to hash the keys.
///
/// Warning: `hasher` is normally randomly generated, and
/// is designed to allow `HashSet`s to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
pub fn with_capacity_and_hasher(capacity: uint, hasher: H) -> HashSet<T, H> {
HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) }
}
/// Reserve space for at least `n` elements in the hash table.
pub fn reserve(&mut self, n: uint) {
self.map.reserve(n)
}
/// Returns true if the hash set contains a value equivalent to the
/// given query value.
pub fn contains_equiv<Q: Hash<S> + Equiv<T>>(&self, value: &Q) -> bool {
self.map.contains_key_equiv(value)
}
/// An iterator visiting all elements in arbitrary order.
/// Iterator element type is &'a T.
pub fn iter<'a>(&'a self) -> SetItems<'a, T> {
self.map.keys()
}
/// Creates a consuming iterator, that is, one that moves each value out
/// of the set in arbitrary order. The set cannot be used after calling
/// this.
pub fn move_iter(self) -> SetMoveItems<T> {
self.map.move_iter().map(|(k, _)| k)
}
/// Visit the values representing the difference
pub fn difference<'a>(&'a self, other: &'a HashSet<T, H>) -> SetAlgebraItems<'a, T, H> {
Repeat::new(other)
.zip(self.iter())
.filter_map(|(other, elt)| {
if !other.contains(elt) { Some(elt) } else { None }
})
}
/// Visit the values representing the symmetric difference
pub fn symmetric_difference<'a>(&'a self, other: &'a HashSet<T, H>)
-> Chain<SetAlgebraItems<'a, T, H>, SetAlgebraItems<'a, T, H>> {
self.difference(other).chain(other.difference(self))
}
/// Visit the values representing the intersection
pub fn intersection<'a>(&'a self, other: &'a HashSet<T, H>)
-> SetAlgebraItems<'a, T, H> {
Repeat::new(other)
.zip(self.iter())
.filter_map(|(other, elt)| {
if other.contains(elt) { Some(elt) } else { None }
})
}
/// Visit the values representing the union
pub fn union<'a>(&'a self, other: &'a HashSet<T, H>)
-> Chain<SetItems<'a, T>, SetAlgebraItems<'a, T, H>> {
self.iter().chain(other.difference(self))
}
}
impl<T: TotalEq + Hash<S> + fmt::Show, S, H: Hasher<S>> fmt::Show for HashSet<T, H> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f.buf, r"\{"));
for (i, x) in self.iter().enumerate() {
if i != 0 { try!(write!(f.buf, ", ")); }
try!(write!(f.buf, "{}", *x));
}
write!(f.buf, r"\}")
}
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S> + Default> FromIterator<T> for HashSet<T, H> {
fn from_iter<I: Iterator<T>>(iter: I) -> HashSet<T, H> {
let (lower, _) = iter.size_hint();
let mut set = HashSet::with_capacity_and_hasher(lower, Default::default());
set.extend(iter);
set
}
}
impl<T: TotalEq + Hash<S>, S, H: Hasher<S> + Default> Extendable<T> for HashSet<T, H> {
fn extend<I: Iterator<T>>(&mut self, mut iter: I) {
for k in iter {
self.insert(k);
}
}
}
impl<T: TotalEq + Hash> Default for HashSet<T, sip::SipHasher> {
fn default() -> HashSet<T> { HashSet::new() }
}
// `Repeat` is used to feed the filter closure an explicit capture
// of a reference to the other set
/// Set operations iterator
pub type SetAlgebraItems<'a, T, H> =
FilterMap<'static, (&'a HashSet<T, H>, &'a T), &'a T,
Zip<Repeat<&'a HashSet<T, H>>, SetItems<'a, T>>>;
#[cfg(test)]
mod test_map {
use super::HashMap;
use std::cmp::Equiv;
use std::hash::Hash;
use std::iter::{Iterator,range_inclusive,range_step_inclusive};
use std::local_data;
use std::vec;
struct KindaIntLike(int);
impl Equiv<int> for KindaIntLike {
fn equiv(&self, other: &int) -> bool {
let KindaIntLike(this) = *self;
this == *other
}
}
impl<S: Writer> Hash<S> for KindaIntLike {
fn hash(&self, state: &mut S) {
let KindaIntLike(this) = *self;
this.hash(state)
}
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1));
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2));
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4));
assert_eq!(m.len(), 2);
assert_eq!(*m.find(&1).unwrap(), 2);
assert_eq!(*m.find(&2).unwrap(), 4);
}
local_data_key!(drop_vector: vec::Vec<int>)
#[deriving(Hash, Eq, TotalEq)]
struct Dropable {
k: uint
}
impl Dropable {
fn new(k: uint) -> Dropable {
local_data::get_mut(drop_vector,
|v| { v.unwrap().as_mut_slice()[k] += 1; });
Dropable { k: k }
}
}
impl Drop for Dropable {
fn drop(&mut self) {
local_data::get_mut(drop_vector, |v|
{ v.unwrap().as_mut_slice()[self.k] -= 1; });
}
}
#[test]
fn test_drops() {
local_data::set(drop_vector, vec::Vec::from_elem(200, 0));
{
let mut m = HashMap::new();
local_data::get(drop_vector, |v| {
for i in range(0u, 200) {
assert_eq!(v.unwrap().as_slice()[i], 0);
}
});
for i in range(0u, 100) {
let d1 = Dropable::new(i);
let d2 = Dropable::new(i+100);
m.insert(d1, d2);
}
local_data::get(drop_vector, |v| {
for i in range(0u, 200) {
assert_eq!(v.unwrap().as_slice()[i], 1);
}
});
for i in range(0u, 50) {
let k = Dropable::new(i);
let v = m.pop(&k);
assert!(v.is_some());
local_data::get(drop_vector, |v| {
assert_eq!(v.unwrap().as_slice()[i], 1);
assert_eq!(v.unwrap().as_slice()[i+100], 1);
});
}
local_data::get(drop_vector, |v| {
for i in range(0u, 50) {
assert_eq!(v.unwrap().as_slice()[i], 0);
assert_eq!(v.unwrap().as_slice()[i+100], 0);
}
for i in range(50u, 100) {
assert_eq!(v.unwrap().as_slice()[i], 1);
assert_eq!(v.unwrap().as_slice()[i+100], 1);
}
});
}
local_data::get(drop_vector, |v| {
for i in range(0u, 200) {
assert_eq!(v.unwrap().as_slice()[i], 0);
}
});
}
#[test]
fn test_empty_pop() {
let mut m: HashMap<int, bool> = HashMap::new();
assert_eq!(m.pop(&0), None);
}
#[test]
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in range(0, 10) {
assert!(m.is_empty());
for i in range_inclusive(1, 1000) {
assert!(m.insert(i, i));
for j in range_inclusive(1, i) {
let r = m.find(&j);
assert_eq!(r, Some(&j));
}
for j in range_inclusive(i+1, 1000) {
let r = m.find(&j);
assert_eq!(r, None);
}
}
for i in range_inclusive(1001, 2000) {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in range_inclusive(1, 1000) {
assert!(m.remove(&i));
for j in range_inclusive(1, i) {
assert!(!m.contains_key(&j));
}
for j in range_inclusive(i+1, 1000) {
assert!(m.contains_key(&j));
}
}
for i in range_inclusive(1, 1000) {
assert!(!m.contains_key(&i));
}
for i in range_inclusive(1, 1000) {
assert!(m.insert(i, i));
}
// remove backwards
for i in range_step_inclusive(1000, 1, -1) {
assert!(m.remove(&i));
for j in range_inclusive(i, 1000) {
assert!(!m.contains_key(&j));
}
for j in range_inclusive(1, i-1) {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12));
assert!(m.insert(2, 8));
assert!(m.insert(5, 14));
let new = 100;
match m.find_mut(&5) {
None => fail!(), Some(x) => *x = new
}
assert_eq!(m.find(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2));
assert_eq!(*m.find(&1).unwrap(), 2);
assert!(!m.insert(1, 3));
assert_eq!(*m.find(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2));
assert!(m.insert(5, 3));
assert!(m.insert(9, 4));
assert_eq!(*m.find(&9).unwrap(), 4);
assert_eq!(*m.find(&5).unwrap(), 3);
assert_eq!(*m.find(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2));
assert_eq!(*m.find(&1).unwrap(), 2);
assert!(m.insert(5, 3));
assert_eq!(*m.find(&1).unwrap(), 2);
assert_eq!(*m.find(&5).unwrap(), 3);
assert!(m.insert(9, 4));
assert_eq!(*m.find(&1).unwrap(), 2);
assert_eq!(*m.find(&5).unwrap(), 3);
assert_eq!(*m.find(&9).unwrap(), 4);
assert!(m.remove(&1));
assert_eq!(*m.find(&9).unwrap(), 4);
assert_eq!(*m.find(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2));
assert!(!m.is_empty());
assert!(m.remove(&1));
assert!(m.is_empty());
}
#[test]
fn test_pop() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.pop(&1), Some(2));
assert_eq!(m.pop(&1), None);
}
#[test]
#[allow(experimental)]
fn test_pop_equiv() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.pop_equiv(&KindaIntLike(1)), Some(2));
assert_eq!(m.pop_equiv(&KindaIntLike(1)), None);
}
#[test]
fn test_swap() {
let mut m = HashMap::new();
assert_eq!(m.swap(1, 2), None);
assert_eq!(m.swap(1, 3), Some(2));
assert_eq!(m.swap(1, 4), Some(3));
}
#[test]
fn test_move_iter() {
let hm = {
let mut hm = HashMap::new();
hm.insert('a', 1);
hm.insert('b', 2);
hm
};
let v = hm.move_iter().collect::<Vec<(char, int)>>();
assert!([('a', 1), ('b', 2)] == v.as_slice() || [('b', 2), ('a', 1)] == v.as_slice());
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in range(0u, 32) {
assert!(m.insert(i, i*2));
}
assert_eq!(m.len(), 32);
let mut observed = 0;
for (k, v) in m.iter() {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map = vec.move_iter().collect::<HashMap<int, char>>();
let keys = map.keys().map(|&k| k).collect::<Vec<int>>();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map = vec.move_iter().collect::<HashMap<int, char>>();
let values = map.values().map(|&v| v).collect::<Vec<char>>();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.find(&1).is_none());
m.insert(1, 2);
match m.find(&1) {
None => fail!(),
Some(v) => assert!(*v == 2)
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0u;
let old_resize_at = m.grow_at;
while old_resize_at == m.grow_at {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_find_equiv() {
let mut m = HashMap::new();
let (foo, bar, baz) = (1,2,3);
m.insert(~"foo", foo);
m.insert(~"bar", bar);
m.insert(~"baz", baz);
assert_eq!(m.find_equiv(&("foo")), Some(&foo));
assert_eq!(m.find_equiv(&("bar")), Some(&bar));
assert_eq!(m.find_equiv(&("baz")), Some(&baz));
assert_eq!(m.find_equiv(&("qux")), None);
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v));
}
}
}
#[cfg(test)]
mod test_set {
use super::HashSet;
use std::container::Container;
use std::slice::ImmutableEqVector;
#[test]
fn test_disjoint() {
let mut xs = HashSet::new();
let mut ys = HashSet::new();
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(5));
assert!(ys.insert(11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(7));
assert!(xs.insert(19));
assert!(xs.insert(4));
assert!(ys.insert(2));
assert!(ys.insert(-11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(ys.insert(7));
assert!(!xs.is_disjoint(&ys));
assert!(!ys.is_disjoint(&xs));
}
#[test]
fn test_subset_and_superset() {
let mut a = HashSet::new();
assert!(a.insert(0));
assert!(a.insert(5));
assert!(a.insert(11));
assert!(a.insert(7));
let mut b = HashSet::new();
assert!(b.insert(0));
assert!(b.insert(7));
assert!(b.insert(19));
assert!(b.insert(250));
assert!(b.insert(11));
assert!(b.insert(200));
assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(!b.is_superset(&a));
assert!(b.insert(5));
assert!(a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(b.is_superset(&a));
}
#[test]
fn test_iterate() {
let mut a = HashSet::new();
for i in range(0u, 32) {
assert!(a.insert(i));
}
let mut observed = 0;
for k in a.iter() {
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_intersection() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(11));
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(77));
assert!(a.insert(103));
assert!(a.insert(5));
assert!(a.insert(-5));
assert!(b.insert(2));
assert!(b.insert(11));
assert!(b.insert(77));
assert!(b.insert(-9));
assert!(b.insert(-42));
assert!(b.insert(5));
assert!(b.insert(3));
let mut i = 0;
let expected = [3, 5, 11, 77];
for x in a.intersection(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(3));
assert!(b.insert(9));
let mut i = 0;
let expected = [1, 5, 11];
for x in a.difference(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_symmetric_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(-2));
assert!(b.insert(3));
assert!(b.insert(9));
assert!(b.insert(14));
assert!(b.insert(22));
let mut i = 0;
let expected = [-2, 1, 5, 11, 14, 22];
for x in a.symmetric_difference(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_union() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(a.insert(16));
assert!(a.insert(19));
assert!(a.insert(24));
assert!(b.insert(-2));
assert!(b.insert(1));
assert!(b.insert(5));
assert!(b.insert(9));
assert!(b.insert(13));
assert!(b.insert(19));
let mut i = 0;
let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
for x in a.union(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_from_iter() {
let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<int> = xs.iter().map(|&x| x).collect();
for x in xs.iter() {
assert!(set.contains(x));
}
}
#[test]
fn test_move_iter() {
let hs = {
let mut hs = HashSet::new();
hs.insert('a');
hs.insert('b');
hs
};
let v = hs.move_iter().collect::<Vec<char>>();
assert!(['a', 'b'] == v.as_slice() || ['b', 'a'] == v.as_slice());
}
#[test]
fn test_eq() {
// These constants once happened to expose a bug in insert().
// I'm keeping them around to prevent a regression.
let mut s1 = HashSet::new();
s1.insert(1);
s1.insert(2);
s1.insert(3);
let mut s2 = HashSet::new();
s2.insert(1);
s2.insert(2);
assert!(s1 != s2);
s2.insert(3);
assert_eq!(s1, s2);
}
#[test]
fn test_show() {
let mut set: HashSet<int> = HashSet::new();
let empty: HashSet<int> = HashSet::new();
set.insert(1);
set.insert(2);
let set_str = format!("{}", set);
assert!(set_str == ~"{1, 2}" || set_str == ~"{2, 1}");
assert_eq!(format!("{}", empty), ~"{}");
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use std::iter::{range_inclusive};
#[bench]
fn insert(b: &mut Bencher) {
use super::HashMap;
let mut m = HashMap::new();
for i in range_inclusive(1, 1000) {
m.insert(i, i);
}
let mut k = 1001;
b.iter(|| {
m.insert(k, k);
k += 1;
});
}
#[bench]
fn find_existing(b: &mut Bencher) {
use super::HashMap;
let mut m = HashMap::new();
for i in range_inclusive(1, 1000) {
m.insert(i, i);
}
b.iter(|| {
m.contains_key(&412);
});
}
#[bench]
fn find_nonexisting(b: &mut Bencher) {
use super::HashMap;
let mut m = HashMap::new();
for i in range_inclusive(1, 1000) {
m.insert(i, i);
}
b.iter(|| {
m.contains_key(&2048);
});
}
#[bench]
fn hashmap_as_queue(b: &mut Bencher) {
use super::HashMap;
let mut m = HashMap::new();
for i in range_inclusive(1, 1000) {
m.insert(i, i);
}
let mut k = 1;
b.iter(|| {
m.pop(&k);
m.insert(k + 1000, k + 1000);
k += 1;
});
}
#[bench]
fn find_pop_insert(b: &mut Bencher) {
use super::HashMap;
let mut m = HashMap::new();
for i in range_inclusive(1, 1000) {
m.insert(i, i);
}
let mut k = 1;
b.iter(|| {
m.find(&(k + 400));
m.find(&(k + 2000));
m.pop(&k);
m.insert(k + 1000, k + 1000);
k += 1;
})
}
}<|fim▁end|> | } |
<|file_name|>ExecExecutor.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.executor.transactions;
import io.netty.buffer.ByteBuf;
import java.util.Queue;
<|fim▁hole|>import org.apache.geode.cache.TransactionId;
import org.apache.geode.redis.internal.Coder;
import org.apache.geode.redis.internal.Command;
import org.apache.geode.redis.internal.ExecutionHandlerContext;
import org.apache.geode.redis.internal.RedisConstants;
public class ExecExecutor extends TransactionExecutor {
@Override
public void executeCommand(Command command, ExecutionHandlerContext context) {
CacheTransactionManager txm = context.getCacheTransactionManager();
if (!context.hasTransaction()) {
command.setResponse(Coder.getNilResponse(context.getByteBufAllocator()));
return;
}
TransactionId transactionId = context.getTransactionID();
txm.resume(transactionId);
boolean hasError = hasError(context.getTransactionQueue());
if (hasError)
txm.rollback();
else {
try {
txm.commit();
} catch (CommitConflictException e) {
command.setResponse(Coder.getErrorResponse(context.getByteBufAllocator(),
RedisConstants.ERROR_COMMIT_CONFLICT));
context.clearTransaction();
return;
}
}
ByteBuf response = constructResponseExec(context);
command.setResponse(response);
context.clearTransaction();
}
private ByteBuf constructResponseExec(ExecutionHandlerContext context) {
Queue<Command> cQ = context.getTransactionQueue();
ByteBuf response = context.getByteBufAllocator().buffer();
response.writeByte(Coder.ARRAY_ID);
response.writeBytes(Coder.intToBytes(cQ.size()));
response.writeBytes(Coder.CRLFar);
for (Command c : cQ) {
ByteBuf r = c.getResponse();
response.writeBytes(r);
}
return response;
}
private boolean hasError(Queue<Command> queue) {
for (Command c : queue) {
if (c.hasError())
return true;
}
return false;
}
}<|fim▁end|> | import org.apache.geode.cache.CacheTransactionManager;
import org.apache.geode.cache.CommitConflictException; |
<|file_name|>cast.py<|end_file_name|><|fim▁begin|>"""
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
<|fim▁hole|>
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
"""Send play commmand."""
self.cast.media_controller.play()
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state()<|fim▁end|> | def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app() |
<|file_name|>categorical.rs<|end_file_name|><|fim▁begin|>use distribution;
use random;
/// A categorical distribution.
#[derive(Clone)]
pub struct Categorical {
k: usize,
p: Vec<f64>,
cumsum: Vec<f64>,
}
impl Categorical {
/// Create a categorical distribution with success probability `p`.
///
/// It should hold that `p[i] >= 0`, `p[i] <= 1`, and `sum(p) == 1`.
pub fn new(p: &[f64]) -> Categorical {
should!(is_probability_vector(p), {
const EPSILON: f64 = 1e-12;
p.iter().all(|&p| p >= 0.0 && p <= 1.0) &&
(p.iter().fold(0.0, |sum, &p| sum + p) - 1.0).abs() < EPSILON
});
let k = p.len();
let mut cumsum = p.to_vec();
for i in 1..(k - 1) {
cumsum[i] += cumsum[i - 1];
}
cumsum[k - 1] = 1.0;
Categorical { k: k, p: p.to_vec(), cumsum: cumsum }
}
/// Return the number of categories.
#[inline(always)]
pub fn k(&self) -> usize { self.k }
/// Return the event probabilities.
#[inline(always)]
pub fn p(&self) -> &[f64] { &self.p }
}
impl distribution::Distribution for Categorical {
type Value = usize;
fn cdf(&self, x: f64) -> f64 {
if x < 0.0 {
return 0.0;
}
let x = x as usize;
if x >= self.k {
return 1.0;
}
self.cumsum[x]
}
}
impl distribution::Discrete for Categorical {
#[inline]
fn pmf(&self, x: usize) -> f64 {
should!(x < self.k);
self.p[x]
}
}
impl distribution::Entropy for Categorical {
fn entropy(&self) -> f64 {
-self.p.iter().fold(0.0, |sum, p| sum + p * p.ln())
}
}
impl distribution::Inverse for Categorical {
fn inv_cdf(&self, p: f64) -> usize {
should!(0.0 <= p && p <= 1.0);
self.cumsum.iter().position(|&sum| sum > 0.0 && sum >= p).unwrap_or_else(|| {
self.p.iter().rposition(|&p| p > 0.0).unwrap()
})
}
}
impl distribution::Kurtosis for Categorical {
fn kurtosis(&self) -> f64 {
use distribution::{Mean, Variance};
let (mean, variance) = (self.mean(), self.variance());
let kurt = self.p.iter().enumerate().fold(0.0, |sum, (i, p)| {
sum + (i as f64 - mean).powi(4) * p
});
kurt / variance.powi(2) - 3.0
}
}
impl distribution::Mean for Categorical {
fn mean(&self) -> f64 {
self.p.iter().enumerate().fold(0.0, |sum, (i, p)| sum + i as f64 * p)
}
}
impl distribution::Median for Categorical {
fn median(&self) -> f64 {
if self.p[0] > 0.5 {
return 0.0;
}
if self.p[0] == 0.5 {
return 0.5;
}
for (i, &sum) in self.cumsum.iter().enumerate() {
if sum == 0.5 {
return (2 * i - 1) as f64 / 2.0;
} else if sum > 0.5 {
return i as f64;
}
}
unreachable!()
}
}
impl distribution::Modes for Categorical {
fn modes(&self) -> Vec<usize> {
let mut modes = Vec::new();
let mut max = 0.0;
for (i, &p) in self.p.iter().enumerate() {
if p == max {
modes.push(i);
}
if p > max {
max = p;
modes = vec![i];
}
}
modes
}
}
impl distribution::Sample for Categorical {
#[inline]
fn sample<S>(&self, source: &mut S) -> usize where S: random::Source {
use distribution::Inverse;
self.inv_cdf(source.read::<f64>())
}
}
impl distribution::Skewness for Categorical {
fn skewness(&self) -> f64 {
use distribution::{Mean, Variance};
let (mean, variance) = (self.mean(), self.variance());
let skew = self.p.iter().enumerate().fold(0.0, |sum, (i, p)| {
sum + (i as f64 - mean).powi(3) * p
});
skew / (variance * variance.sqrt())
}
}
impl distribution::Variance for Categorical {
fn variance(&self) -> f64 {
use distribution::Mean;
let mean = self.mean();
self.p.iter().enumerate().fold(0.0, |sum, (i, p)| {
sum + (i as f64 - mean).powi(2) * p
})
}
}
#[cfg(test)]
mod tests {
use prelude::*;
macro_rules! new(
(equal $k:expr) => { Categorical::new(&[1.0 / $k as f64; $k]) };
($p:expr) => { Categorical::new(&$p); }
);
#[test]
fn cdf() {
let d = new!([0.0, 0.75, 0.25, 0.0]);
let p = vec![0.0, 0.0, 0.75, 1.0, 1.0];
let x = (-1..4).map(|x| d.cdf(x as f64)).collect::<Vec<_>>();
assert_eq!(&x, &p);
let x = (-1..4).map(|x| d.cdf(x as f64 + 0.5)).collect::<Vec<_>>();
assert_eq!(&x, &p);
let d = new!(equal 3);
let p = vec![0.0, 1.0 / 3.0, 2.0 / 3.0, 1.0];
let x = (-1..3).map(|x| d.cdf(x as f64)).collect::<Vec<_>>();
assert_eq!(&x, &p);
let x = (-1..3).map(|x| d.cdf(x as f64 + 0.5)).collect::<Vec<_>>();
assert_eq!(&x, &p);
}
#[test]
fn pmf() {
let p = [0.0, 0.75, 0.25, 0.0];
let d = new!(p);
assert_eq!(&(0..4).map(|x| d.pmf(x)).collect::<Vec<_>>(), &p.to_vec());
let d = new!(equal 3);
assert_eq!(&(0..3).map(|x| d.pmf(x)).collect::<Vec<_>>(), &vec![1.0 / 3.0; 3])
}
#[test]
fn entropy() {
use std::f64::consts::LN_2;
assert_eq!(new!(equal 2).entropy(), LN_2);
assert_eq!(new!([0.1, 0.2, 0.3, 0.4]).entropy(), 1.2798542258336676);
}
#[test]
fn inv_cdf() {
let d = new!([0.0, 0.75, 0.25, 0.0]);
let p = vec![0.0, 0.75, 0.7500001, 1.0];
assert_eq!(&p.iter().map(|&p| d.inv_cdf(p)).collect::<Vec<_>>(), &vec![1, 1, 2, 2]);
let d = new!(equal 3);
let p = vec![0.0, 0.5, 0.75, 1.0];
assert_eq!(&p.iter().map(|&p| d.inv_cdf(p)).collect::<Vec<_>>(), &vec![0, 1, 2, 2]);
}
#[test]
fn kurtosis() {
assert_eq!(new!(equal 2).kurtosis(), -2.0);
assert_eq!(new!([0.1, 0.2, 0.3, 0.4]).kurtosis(), -0.7999999999999998);
}
#[test]
fn mean() {
assert_eq!(new!(equal 3).mean(), 1.0);<|fim▁hole|> #[test]
fn median() {
assert_eq!(new!([0.6, 0.2, 0.2]).median(), 0.0);
assert_eq!(new!(equal 2).median(), 0.5);
assert_eq!(new!([0.1, 0.2, 0.3, 0.4]).median(), 2.0);
assert_eq!(new!([1.0 / 6.0, 1.0 / 3.0, 1.0 / 3.0, 1.0 / 6.0]).median(), 0.5);
}
#[test]
fn modes() {
assert_eq!(new!([0.6, 0.2, 0.2]).modes(), vec![0]);
assert_eq!(new!(equal 2).modes(), vec![0, 1]);
assert_eq!(new!(equal 3).modes(), vec![0, 1, 2]);
assert_eq!(new!([0.4, 0.2, 0.4]).modes(), vec![0, 2]);
assert_eq!(new!([1.0 / 6.0, 1.0 / 3.0, 1.0 / 3.0, 1.0 / 6.0]).modes(), vec![1, 2]);
}
#[test]
fn sample() {
let mut source = random::default();
let sum = Independent(&new!([0.0, 0.5, 0.5]), &mut source).take(100).fold(0, |a, b| a + b);
assert!(100 <= sum && sum <= 200);
let p = (0..11).map(|i| if i % 2 != 0 { 0.2 } else { 0.0 }).collect::<Vec<_>>();
assert!(Independent(&new!(p), &mut source).take(1000).all(|x| x % 2 != 0));
}
#[test]
fn skewness() {
assert_eq!(new!(equal 6).skewness(), 0.0);
assert_eq!(new!([1.0 / 6.0, 1.0 / 3.0, 1.0 / 3.0, 1.0 / 6.0]).skewness(), 0.0);
assert_eq!(new!([0.1, 0.2, 0.3, 0.4]).skewness(), -0.6);
}
#[test]
fn variance() {
assert_eq!(new!(equal 3).variance(), 2.0 / 3.0);
assert_eq!(new!([1.0 / 6.0, 1.0 / 3.0, 1.0 / 3.0, 1.0 / 6.0]).variance(), 11.0 / 12.0);
}
}<|fim▁end|> | assert_eq!(new!([0.3, 0.3, 0.4]).mean(), 1.1);
assert_eq!(new!([1.0 / 6.0, 1.0 / 3.0, 1.0 / 3.0, 1.0 / 6.0]).mean(), 1.5);
}
|
<|file_name|>http_server.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use iron::Iron;
use iron::Listening;
use staticfile::Static;
use mount::Mount;
pub fn start_http(ip_port: &str) -> Listening {
println!("Serving HTTP on: {}", ip_port);<|fim▁hole|>}
fn get_mount() -> Mount {
// let views_handler = views::get_handler();
//
// let mut rest_chain = Chain::new(rest_api::rest_router());
// rest_chain.link_before(rest_api::AuthToken);
let mut mount = Mount::new();
mount
.mount(
"/static",
Static::new(Path::new("./static"))
);
// .mount("/gui-api/", gui_api::get_router())
// .mount("/api", rest_chain)
// .mount("/", views_handler);
mount
}<|fim▁end|> | Iron::new(get_mount())
.http(ip_port)
.expect("starting HTTP server FAILED") |
<|file_name|>serializer.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
class Serializer(object):
schemaless = True
encapsulate = True
registry = {}
def __init__(self, query_params, pretty=False, **kwargs):
self.pretty = pretty
self._query_params = query_params
self._fileName = None
self._lastModified = None
self._extra_args = kwargs
@classmethod
def register(cls, tag, serializer):
cls.registry[tag] = serializer
@classmethod
def getAllFormats(cls):
return list(cls.registry)
@classmethod
def create(cls, dformat, query_params=None, **kwargs):
"""
A serializer factory
"""
query_params = query_params or {}
serializer = cls.registry.get(dformat)
if serializer:
return serializer(query_params, **kwargs)
else:
raise Exception("Serializer for '%s' does not exist!" % dformat)
def getMIMEType(self):
return self._mime
def set_headers(self, response):
response.content_type = self.getMIMEType()
def __call__(self, obj, *args, **kwargs):
self._obj = obj
self._data = self._execute(obj, *args, **kwargs)
return self._data<|fim▁hole|>
from indico.web.http_api.metadata.json import JSONSerializer
from indico.web.http_api.metadata.xml import XMLSerializer<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __author__ = 'elijahethun' |
<|file_name|>is-js.js<|end_file_name|><|fim▁begin|>export default function isJavascript(scriptTag) {
// TODO: add a console warning if the script tag doesn't have an attribute?
// seems like it's required for some parts of ember consumption<|fim▁hole|><|fim▁end|> | let type = scriptTag.attributes.type ? scriptTag.attributes.type.value : 'text/javascript';
return /(?:application|text)\/javascript/i.test(type);
} |
<|file_name|>pulse_channel.rs<|end_file_name|><|fim▁begin|>use apu::envelope::Envelope;
use apu::timer::{Timer, TimerCycle};
use apu::length_counter::LengthCounter;
use apu::sweep::{Sweep, Complement, SweepCycle};
use memory::Memory;
/* duty cycles for the square wave
for example, duty cycle 1 generates the following square wave:
_ _ _ _
| | | |
| | | |
_| |_ _ _ _ _ _| |_ _ _ _ _
0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7 .......
*/
static DUTY_CYCLES: [[u8; 8]; 4] = [
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 0, 0, 0],
[1, 0, 0, 1, 1, 1, 1, 1],
];
// selected duty cycle and the current position
struct Duty {
duty_cycle: usize,
duty_position: usize,
}
impl Duty {
fn new() -> Duty {
Duty { duty_cycle: 0, duty_position: 0 }
}
fn cycle(&mut self) {
if self.duty_position > 0 {
self.duty_position -= 1;
} else {
self.duty_position = 7;
}
}
}
pub struct PulseChannel {
duty: Duty,
length_counter: LengthCounter,
timer: Timer,
pub envelope: Envelope,
pub sweep: Sweep,
enabled: bool,
}
impl Memory for PulseChannel {
fn read(&mut self, address: u16) -> u8 {
panic!("Invalid read attempt of pulse channel register {}", address);
}
fn write(&mut self, address: u16, value: u8) {
if address == 0x4000 || address == 0x4004 {
let duty_cycle = (0b1100_0000 & value) >> 6;
let length_counter_halt = (0b0010_0000 & value) != 0;
let constant_volume_envelope_flag = (0b0001_0000 & value) != 0;
let volume_envelope_divider_period = (0b0000_1111 & value);
self.duty.duty_cycle = duty_cycle as usize;
self.length_counter.halt(length_counter_halt);
self.envelope.loop_flag(length_counter_halt);
self.envelope.set_constant_volume(constant_volume_envelope_flag);
self.envelope.set_constant_volume_or_envelope_period(
volume_envelope_divider_period);
} else if address == 0x4001 || address == 0x4005 {
let sweep_enable = (0b1000_0000 & value) != 0;
let divider_period = (0b0111_0000 & value) >> 4;
let negate_flag = (0b0000_1000 & value) != 0;
let shift_count = (0b0000_0111 & value);
self.sweep.enabled = sweep_enable;
self.sweep.length = divider_period + 1;
self.sweep.negate = negate_flag;
self.sweep.shift = shift_count;
self.sweep.reload = true;
} else if address == 0x04002 || address == 0x4006 {
let timer_low_bits = value;
self.timer.set_low_bits(value);
} else if address == 0x4003 || address == 0x4007 {
let length_counter_load = (0b1111_1000 & value) >> 3;
let timer_high_bits = (0b0000_0111 & value);
self.length_counter.load(length_counter_load);
self.envelope.restart_envelope();<|fim▁hole|> } else {
panic!("Invald write to pulse channel address {:0x}",
address);
}
}
}
impl PulseChannel {
pub fn new(complement: Complement) -> PulseChannel {
PulseChannel {
duty: Duty::new(),
length_counter: LengthCounter::new(),
timer: Timer::new(),
envelope: Envelope::new(),
sweep: Sweep::new(complement),
enabled: false,
}
}
pub fn enable_channel(&mut self, enabled: bool) {
self.enabled = enabled;
self.length_counter.enable(enabled);
}
pub fn cycle_timer(&mut self) {
if self.timer.cycle() == TimerCycle::ZeroCycle {
self.duty.cycle();
}
}
pub fn cycle_envelope(&mut self) {
self.envelope.cycle();
}
pub fn cycle_length_counter(&mut self) {
self.length_counter.cycle();
}
pub fn cycle_sweep_unit(&mut self) {
if self.sweep.cycle() == SweepCycle::ZeroCycle {
let change = self.sweep.sweep_amount(self.timer.length);
if change > 2047 {
return;
}
self.timer.length = (self.timer.length as i16 + change) as u16
& 0b0000_0111_1111_1111;
}
}
pub fn length_counter_nonzero(&self) -> bool {
self.length_counter.counter > 0
}
pub fn output(&self) -> f64 {
if !self.enabled
|| self.length_counter.silenced()
|| self.timer.length < 8
|| self.sweep.last_change > 2047 {
return 0.0;
}
let volume = self.envelope.volume() as f64;
let duty_val = DUTY_CYCLES[self.duty.duty_cycle][self.duty.duty_position];
volume * duty_val as f64
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_channel() -> PulseChannel {
let mut channel = PulseChannel::new(Complement::Two);
channel.duty = Duty::new();
channel.duty.duty_cycle = 0;
channel.duty.duty_position = 1;
channel.timer.set_period(20);
channel.enable_channel(true);
channel.envelope.set_constant_volume(true);
channel.envelope.set_constant_volume_or_envelope_period(5);
assert_eq!(
DUTY_CYCLES[channel.duty.duty_cycle][channel.duty.duty_position],
1);
channel
}
#[test]
fn output_is_zero_if_length_counter_silences_channel() {
let mut channel = create_test_channel();
channel.length_counter.counter = 5;
channel.length_counter.counter = 0;
assert_eq!(channel.output(), 0.0);
}
#[test]
fn output_is_envelope_value_if_length_counter_does_not_silence_channel() {
let mut channel = create_test_channel();
channel.length_counter.counter = 5;
channel.length_counter.counter = 2;
assert_eq!(channel.output(), 5.0);
}
#[test]
fn writing_to_0x4003_loads_length_counter() {
let mut channel = create_test_channel();
let val = (6 & 0b0001_1111) << 3;
channel.write(0x4003, val);
assert_eq!(channel.length_counter.length, 80);
assert_eq!(channel.length_counter.counter, 80);
}
#[test]
fn writing_to_0x4007_loads_length_counter() {
let mut channel = create_test_channel();
let val = (8 & 0b0001_1111) << 3;
channel.write(0x4007, val);
assert_eq!(channel.length_counter.length, 160);
assert_eq!(channel.length_counter.counter, 160);
}
#[test]
fn writing_to_0x4000_sets_length_counter_halt_flag() {
let mut channel = create_test_channel();
assert!(!channel.length_counter.halted());
let val = 0b0010_0000;
channel.write(0x4000, val);
assert!(channel.length_counter.halted());
}
#[test]
fn writing_to_0x4004_sets_length_counter_halt_flag() {
let mut channel = create_test_channel();
assert!(!channel.length_counter.halted());
let val = 0b0010_0000;
channel.write(0x4004, val);
assert!(channel.length_counter.halted());
}
#[test]
fn enabling_channel_enables_length_counter() {
let mut channel = create_test_channel();
channel.enable_channel(false);
assert!(!channel.length_counter.enabled());
channel.enable_channel(true);
assert!(channel.length_counter.enabled());
}
#[test]
fn disabling_channel_disables_length_counter() {
let mut channel = create_test_channel();
channel.enable_channel(true);
assert!(channel.length_counter.enabled());
channel.enable_channel(false);
assert!(!channel.length_counter.enabled());
}
#[test]
fn cycle_length_counter_method_actually_cycles_length_counter() {
let mut channel = create_test_channel();
channel.length_counter.halt(false);
channel.length_counter.length = 4;
channel.length_counter.counter = 4;
channel.cycle_length_counter();
assert_eq!(channel.length_counter.counter, 3);
}
}<|fim▁end|> | self.timer.set_high_bits(timer_high_bits);
self.duty.duty_position = 0;
|
<|file_name|>soln.py<|end_file_name|><|fim▁begin|>class Solution:
def minPathSum(self, grid: List[List[int]]) -> int:
row = len(grid)
col = len(grid[0])
dp = [[0]*col for i in range(row)]
minPath = 0
return self.findPath(grid, row-1, col-1, dp)
def findPath(self, grid, i, j, dp):
#print(i,j, minPath)
if dp[i][j]:
return dp[i][j]
if i == 0 and j == 0:
dp[i][j] = grid[i][j]
return grid[i][j]<|fim▁hole|> elif j == 0:
return self.findPath(grid, max(0,i-1),j, dp)+grid[i][j] #element on top
else:
a = self.findPath(grid, i,max(0,j-1), dp)+grid[i][j] #element to left
b = self.findPath(grid, max(0,i-1),j, dp)+grid[i][j] #element on top
dp[i][j] = min(a,b)
return dp[i][j]<|fim▁end|> | elif i == 0:
dp[i][j] = grid[i][j] + self.findPath(grid, i,max(0,j-1), dp)
return dp[i][j] #element to left |
<|file_name|>ui.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import Method %>
// CSS Basic User Interface Module Level 1
// https://drafts.csswg.org/css-ui-3/
<% data.new_style_struct("UI", inherited=False, gecko_name="UIReset") %>
// TODO spec says that UAs should not support this
// we should probably remove from gecko (https://bugzilla.mozilla.org/show_bug.cgi?id=1328331)
${helpers.single_keyword("ime-mode", "auto normal active disabled inactive",
products="gecko", gecko_ffi_name="mIMEMode",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-ui/#input-method-editor")}
${helpers.single_keyword("-moz-user-select", "auto text none all element elements" +
" toggle tri-state -moz-all -moz-text",
products="gecko",
alias="-webkit-user-select",
gecko_ffi_name="mUserSelect",
gecko_enum_prefix="StyleUserSelect",
gecko_strip_moz_prefix=False,
aliases="-moz-none=none",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-ui-4/#propdef-user-select")}
${helpers.single_keyword("-moz-window-dragging", "default drag no-drag", products="gecko",
gecko_ffi_name="mWindowDragging",
gecko_enum_prefix="StyleWindowDragging",
animation_value_type="discrete",
spec="None (Nonstandard Firefox-only property)")}
${helpers.single_keyword("-moz-window-shadow", "none default menu tooltip sheet", products="gecko",
gecko_ffi_name="mWindowShadow",
gecko_constant_prefix="NS_STYLE_WINDOW_SHADOW",
animation_value_type="discrete",
internal=True,
spec="None (Nonstandard internal property)")}
<%helpers:longhand name="-moz-force-broken-image-icon"
products="gecko"
animation_value_type="discrete"
spec="None (Nonstandard Firefox-only property)">
use std::fmt;
use style_traits::ToCss;
pub mod computed_value {
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue)]
pub struct T(pub bool);
}
pub use self::computed_value::T as SpecifiedValue;
impl ToCss for computed_value::T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str(if self.0 { "1" } else { "0" })
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T(false)
}
#[inline]
pub fn get_initial_specified_value() -> SpecifiedValue {
computed_value::T(false)
}
pub fn parse<'i, 't>(_context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<SpecifiedValue, ParseError<'i>> {
match input.expect_integer()? {
0 => Ok(computed_value::T(false)),
1 => Ok(computed_value::T(true)),<|fim▁hole|>
impl From<u8> for SpecifiedValue {
fn from(bits: u8) -> SpecifiedValue {
SpecifiedValue(bits == 1)
}
}
impl From<SpecifiedValue> for u8 {
fn from(v: SpecifiedValue) -> u8 {
match v.0 {
true => 1u8,
false => 0u8,
}
}
}
</%helpers:longhand><|fim▁end|> | _ => Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
}
} |
<|file_name|>bots_ordereddict.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|># Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other<|fim▁end|> | # Copyright (c) 2009 Raymond Hettinger
#
|
<|file_name|>kontrabant.py<|end_file_name|><|fim▁begin|>## Unquill: Copyright (C) 2003 Janez Demsar
##
## During development I peeked a lot at Unquill from John Elliott, 1996-2000.
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import pickle
import time
from PyQt5 import QtCore, QtWidgets, QtWidgets
from random import randint
class Quill:
class Event:
NIL, LOC, MSG, OBJ, SWAP, PLC = tuple(range(100, 106))
cond_ops = [("AT", "data.location_no == param1"),
("NOT AT", "data.location_no != param1"),
("AT GT", "data.location_no > param1"),
("AT LT", "data.location_no < param1"),
("PRESENT",
"data.objects[param1].location == data.location_no"),
("ABSENT",
"data.objects[param1].location != data.location_no"),
("WORN",
"data.objects[param1].location == data.Object.WORN"),
("NOT WORN",
"data.objects[param1].location != data.Object.WORN"),
("CARRIED",
"data.objects[param1].location == data.Object.CARRIED"),
("NOT CARR",
"data.objects[param1].location != data.Object.CARRIED"),
("CHANCE", "param1 < randint(1, 100)"),
("ZERO", "not data.flags[param1]"),
("NOT ZERO", "data.flags[param1]"),
("EQ", "data.flags[param1]==param2"),
("GT", "data.flags[param1]>param2"),
("LT", "data.flags[param1]<param2")]
ptas = {
0: (["INVEN", "DESC", "QUIT", "END", "DONE", "OK",
"ANYKEY", "SAVE", "LOAD", "TURNS", "SCORE",
"PAUSE", "GOTO", "MESSAGE", "REMOVE", "GET",
"DROP", "WEAR", "DESTROY", "CREATE", "SWAP",
"SET", "CLEAR", "PLUS", "MINUS", "LET", "BEEP"],
[0] * 11 + [1] * 9 + [2, 1, 1] + [2]*16,
[NIL] * 12 + [LOC, MSG] + [OBJ] * 6 + [SWAP] + [NIL] * 18),
5: (["INVEN", "DESC", "QUIT", "END", "DONE", "OK",
"ANYKEY", "SAVE", "LOAD", "TURNS", "SCORE",
"CLS", "DROPALL", "PAUSE", "PAPER", "INK",
"BORDER", "GOTO", "MESSAGE", "REMOVE", "GET",
"DROP", "WEAR", "DESTROY", "CREATE", "SWAP",
"PLACE", "SET", "CLEAR", "PLUS", "MINUS",
"LET", "BEEP"],
[0] * 13 + [1] * 12 + [2, 2, 1, 1] + [2] * 10,
[NIL] * 17 + [LOC, MSG] + [OBJ] * 6 + [SWAP, PLC] + [NIL]*12),
7: (["INVEN", "DESC", "QUIT", "END", "DONE", "OK",
"ANYKEY", "SAVE", "LOAD", "TURNS", "SCORE",
"CLS", "DROPALL", "AUTOG", "AUTOD", "AUTOW",
"AUTOR", "PAUSE", "PAPER", "INK", "BORDER",
"GOTO", "MESSAGE", "REMOVE", "GET", "DROP",
"WEAR", "DESTROY", "CREATE", "SWAP", "PLACE",
"SET", "CLEAR", "PLUS", "MINUS", "LET", "BEEP"],
[0] * 17 + [1] * 12 + [2, 2, 1] + [2] * 7,
[NIL] * 21 + [LOC, MSG] + [OBJ] * 6 + [SWAP, PLC] + [NIL] * 8)}
def __init__(self, sna, ptr, dbver=0):
self.act_ops, self.nparams, self.types = self.ptas[dbver]
self.word1 = sna[ptr]
self.word2 = sna[ptr + 1]
p = sna[ptr + 2] + 256 * sna[ptr + 3]
self.conditions = []
while sna[p] != 0xff:
opcode = sna[p]
param1 = sna[p + 1]
if opcode > 12:
param2 = sna[p + 2]
p += 3
else:
param2 = None
p += 2
self.conditions.append((opcode, param1, param2))
p += 1
self.actions = []
while sna[p] != 0xff:
opcode = sna[p]
nparams = self.nparams[opcode]
params = tuple(sna[p + 1:p + 1 + nparams])
self.actions.append((opcode, params))
p += 1 + nparams
# returns: -1 for error,
# 0 for not matching,
# 1 for matching and done (no further processing),
# 2 for matching, but process further
def __call__(self, data, system, word1, word2):
def match(w, sw):
return w == sw or (not w and sw == 255)
if system or match(word1, self.word1) and match(word2, self.word2):
for op, param1, param2 in self.conditions:
if not eval(self.cond_ops[op][1]):
return 0
for action in self.actions:
meth = getattr(data,
"do_" + self.act_ops[action[0]].lower())
res = meth(*action[1])
if res:
return res
return 2
class Location:
def __init__(self, description, conn=None):
self.description = description
self.connections = conn or {}
class Object:
INVALID, CARRIED, WORN, NOT_CREATED = 0xff, 0xfe, 0xfd, 0xfc
def __init__(self, description, initial=NOT_CREATED):
self.description = description
self.initial = self.location = initial
#######################################
# Actions
def do_get(self, param1):
loc = self.objects[param1].location
if loc == self.Object.WORN or loc == self.Object.CARRIED:
self.printout("To vendar že nosim!")
return -1
elif loc != self.location_no:
self.printout("Saj ni tukaj.")
return -1
elif self.flags[1] == self.nobjects_carry:
return -1
else:
self.objects[param1].location = self.Object.CARRIED
self.flags[1] += 1
def do_wear(self, param1):
loc = self.objects[param1].location
if loc == self.Object.WORN:
self.printout("To vendar že nosim!")
return -1
elif loc != self.Object.CARRIED:
self.printout("Tega sploh nimam!")
return -1
else:
self.objects[param1].location = self.Object.WORN
def do_drop(self, param1):
loc = self.objects[param1].location
if (loc == self.Object.WORN) or (loc == self.Object.CARRIED):
self.objects[param1].location = self.location_no
else:
self.printout("Tega sploh nimam.")
return -1
def do_remove(self, param1):
loc = self.objects[param1].location
if loc != self.Object.WORN:
self.printout("Tega sploh ne nosim!")
return -1
else:
self.objects[param1].location = self.Object.CARRIED
def do_dropall(self):
for obj in self.objects:
if obj.location == self.Object.WORN or \
obj.location == self.Object.CARRIED:
obj.location = self.location_no
self.flags[1] = 0
def do_goto(self, locno):
self.location = self.locations[locno]
self.location_no = locno
self.flags[2] = locno
def do_create(self, objno):
loc = self.objects[objno].location
if loc == self.Object.WORN or loc == self.Object.CARRIED:
self.flags[1] -= 1
self.objects[objno].location = self.location_no
def do_destroy(self, objno):
loc = self.objects[objno].location
if loc == self.Object.WORN or loc == self.Object.CARRIED:
self.flags[1] -= 1
self.objects[objno].location = self.Object.NOT_CREATED
def do_place(self, objno, locno):
loc = self.objects[objno].location
if loc == self.Object.WORN or loc == self.Object.CARRIED:
self.flags[1] -= 1
self.objects[objno].location = locno
def do_print(self, flagno):
if flagno > 47:
self.printout(self.flags[flagno] + 256 * self.flags[flagno+1])
else:
self.printout(self.flags[flagno])
def do_plus(self, flagno, no):
self.flags[flagno] += no
if self.flags[flagno] > 255:
if flagno > 47:
self.flags[flagno] -= 256
self.flags[flagno + 1] = (self.flags[flagno + 1] + 1) % 256
else:
self.flags[flagno] = 255
def do_minus(self, flagno, no):
self.flags[flagno] -= no
if self.flags[flagno] < 0:
if flagno > 47:
self.flags[flagno] += 256
self.flags[flagno + 1] -= 1
if self.flags[flagno] == -1:
self.flags[flagno] = 0
else:
self.flags[flagno] = 0
def do_inven(self):
inv = ""
for obj in self.objects:
if obj.location == Quill.Object.CARRIED:
inv += "<LI>%s</LI>" % obj.description
elif obj.location == Quill.Object.WORN:
inv += "<LI>%s (nosim)</LI>" % obj.description
if inv:
inv = "Prenašam pa tole:<UL>"+inv+"</UL"
else:
inv = "Prenašam pa tole:<UL>pravzaprav nič</UL"
self.printout(inv)
def do_message(self, msgno):
self.printout(self.messages[msgno])
do_mes = do_message
def do_set(self, flagno):
self.flags[flagno] = 255
def do_clear(self, flagno):
self.flags[flagno] = 0
def do_let(self, flagno, no):
self.flags[flagno] = no
def do_add(self, flg1, flg2):
return self.do_plus(flg1, self.flags[flg2])
def do_sum(self, flg1, flg2):
return self.do_minus(flg1, self.flags[flg2])
def do_swap(self, obj1, obj2):
self.objects[obj1].location, self.objects[obj2].location = \
self.objects[obj2].location, self.objects[obj1].location
def do_desc(self):
self.update_location()
<|fim▁hole|>
def do_end(self):
self.anykey()
self.reset()
self.update_location()
def do_ok(self):
self.printout("OK")
return 1
@staticmethod
def do_done():
return 1
def do_anykey(self):
self.anykey()
def do_save(self):
self.printout("Shranjevati pa še ne znam ...")
def do_load(self):
self.printout("Nalagati pa znam ...")
def do_star(self, _):
self.printout("'STAR' ni implementiran")
def do_jsr(self, *_):
self.printout("'JSR' ni implementiran")
def do_sound(self, lsb, msg):
pass
def do_beep(self, lsb, msg):
pass
def do_turns(self):
self.printout("Ukazov dal si %4i zares<br>" % self.turns)
def do_score(self):
self.printout("Nabral si %i odstotkov<br>" % self.flags[30])
@staticmethod
def do_pause(s50):
time.sleep(s50/50)
def do_cls(self):
pass
#######################################
# Initialization from an .sna file
def __init__(self, name="kontra.sna", dbver=0):
def single_string(ptr):
# TODO: Simplify
s = ""
while sna[ptr] != 0xe0:
s += chr(255 - sna[ptr])
ptr += 1
return s
def word(ptr):
return sna[ptr] + 256 * sna[ptr + 1]
def get_sign_ptr():
sign_ptr = -1
while True:
sign_ptr = sna.find(b"\x10", sign_ptr + 1)
if sign_ptr == -1:
raise ValueError("Quill signature not found")
if sna[sign_ptr+2:sign_ptr+12:2] == b"\x11\x12\x13\x14\x15":
return sign_ptr
def read_vocabulary():
vocabulary = {}
index_to_word = []
pv = self.pvocabulary
while sna[pv]:
index = sna[pv + 4]
w = "".join(chr(255 - x) for x in sna[pv:pv + 4]).strip()
vocabulary[w] = index
if index >= len(index_to_word):
index_to_word += [None] * (index - len(index_to_word) + 1)
if not index_to_word[index]:
index_to_word[index] = w
pv += 5
return vocabulary, index_to_word
def get_cond_table(ptr):
events = []
while sna[ptr]:
events.append(self.Event(sna, ptr))
ptr += 4
return events
colors = ["#000000", "#0000ff", "#ff0000", "#ff00ff", "#00ff00",
"#00ffff", "#ffff00", "#ffffff"]
replacs = {"&": "&", "<": "<", ">": ">", "\x60": "£",
"\x7f": "©", "\x95": "č", "\x94": "š", "\xa0": "ž",
"\x92": "Č", "\xa2": "Š", "\x90": "Ž"}
# How would these codes be reset?
# codes = {"\x12": "<big>", "\x13": "<b>", "\x14": "<i>", "\x15": "<u>"}
def get_items(ptr, n):
items = []
for i in range(n):
s = ""
xpos = 0
while 1:
c = chr(255 - sna[ptr])
ptr += 1
if c in replacs:
s += replacs[c]
xpos += 1
elif c >= ' ':
s += c
xpos += 1
elif c == "\x1f":
break
elif c == "\x06":
if 255 - sna[ptr] == 6:
s += "<P>"
xpos = 0
ptr += 1
else:
s += " "
xpos = 0
elif c == "\x10": # INK
cl = 255 - sna[ptr]
ptr += 1
if cl < 8:
s += "<FONT COLOR=%s>" % colors[cl]
elif c == "\x11": # PAPER
ptr += 1
# elif c in codes:
# if sna[ptr] != 255:
# s += "<%s>" % codes[c]
# else:
# s += "</%s>" % codes[c]
# ptr += 1
if xpos == 32:
if sna[ptr] != ' ':
s += " "
xpos = 0
items.append(s)
return items
def read_connections():
ptr = word(self.pconnections)
for location in self.locations:
while sna[ptr] != 0xff:
location.connections[sna[ptr]] = sna[ptr + 1]
ptr += 2
ptr += 1
def read_object_positions():
ptr = self.pobject_locations
for i in range(len(self.objects)):
self.objects[i].initial = sna[ptr + i]
sna = b"\x00" * (16384 - 27) + open(name, "rb").read()
ptr = get_sign_ptr() + 13
self.nobjects_carry = sna[ptr]
self.nobjects = sna[ptr+1]
self.nlocations = sna[ptr+2]
self.nmessages = sna[ptr+3]
if dbver:
ptr += 1
self.nsystem_messages = sna[ptr+3]
self.pdictionary = ptr + 29
self.presponse = word(ptr+4)
self.pprocess = word(ptr+6)
self.pobjects = word(ptr+8)
self.plocations = word(ptr+10)
self.pmessages = word(ptr+12)
off = 2 if dbver else 0
self.pconnections = word(ptr + 14 + off)
self.pvocabulary = word(ptr+16 + off)
self.pobject_locations = word(ptr+18 + off)
if dbver:
psystem_messages = word(ptr+14)
self.system_messages = \
get_items(word(psystem_messages), self.nsystem_messages)
self.pobject_map = word(ptr+22)
else:
self.system_messages = [single_string(ptr) for ptr in [
27132, 27152, 27175, 27209, 27238, 27260, 27317, 27349, 27368,
27390, 27397, 27451, 27492, 27525, 27551, 27568, 27573, 27584,
27590, 27613, 27645, 27666, 27681, 27707, 27726]]
self.pobject_map = None
self.vocabulary, self.index_to_word = read_vocabulary()
self.dir_codes = [self.vocabulary[i]
for i in ["SZ", "S", "SV", "Z", "V", "JZ", "J", "JV",
"NOTE", "VEN", "GOR", "DOL"]]
self.responses = get_cond_table(self.presponse)
self.process = get_cond_table(self.pprocess)
self.objects = [Quill.Object(x)
for x in get_items(word(self.pobjects), self.nobjects)]
read_object_positions()
self.locations = [Quill.Location(x)
for x in get_items(word(self.plocations),
self.nlocations)]
read_connections()
self.messages = get_items(word(self.pmessages), self.nmessages)
self.location = self.locations[1]
self.location_no = 1
self.flags = [0]*64
self.flags[1] = 255
self.flags[2] = self.location_no
self.cheat_locations = {}
self.turns = 0
self.izpisano = ""
self.dlg = self.izpis = self.ukazna = None
self.setup_ui()
self.goljufija_const()
self.reset()
#######################################
# Processing
def reset(self):
self.flags[2] = self.location_no = 0
self.location = self.locations[self.location_no]
self.turns = 0
for obj in self.objects:
obj.location = obj.initial
self.update_location()
self.process_events(self.process, 1)
self.goljufija()
def update_location(self):
self.izpisano = ""
if self.flags[0]:
self.set_location_description(
"Temno je kot v rogu. Nič ne vidim.", (0,) * 12)
return
desc = self.location.description
inv = [obj.description for obj in self.objects
if obj.location == self.location_no]
if len(inv) == 1:
desc += "<br>Vidim tudi " + inv[0] + "<br>"
elif inv:
desc += "<br>Vidim tudi: " + "".join("<br>- %s" % i for i in inv)
self.set_location_description(
desc, [direct in self.location.connections
for direct in self.dir_codes])
#######################################
# GUI
def setup_ui(self):
goljufam = True
dlg = self.dlg = QtWidgets.QWidget()
dlg.setWindowTitle("Kontrabant")
dlg.setEnabled(True)
dlg.resize(1024 if goljufam else 544, 380)
dlg.setLayout(QtWidgets.QHBoxLayout())
vbox1 = QtWidgets.QWidget()
vbox1.setFixedWidth(350)
vbox1.setLayout(QtWidgets.QVBoxLayout())
dlg.layout().addWidget(vbox1)
self.izpis = QtWidgets.QTextEdit()
self.izpis.setReadOnly(True)
self.izpis.setMinimumHeight(290)
self.izpis.setFocusPolicy(QtCore.Qt.NoFocus)
self.izpis.setStyleSheet(
"font-family: Arial; font-size: 14; color: white; background: blue")
self.izpisano = ""
self.ukazna = QtWidgets.QLineEdit()
self.ukazna.setFocus()
self.ukazna.returnPressed.connect(self.user_command)
vbox1.layout().addWidget(self.izpis)
vbox1.layout().addWidget(self.ukazna)
dlg.show()
tabs = QtWidgets.QTabWidget()
tabs.setMinimumSize(350, 290)
dlg.layout().addWidget(tabs)
self.g_lokacija = QtWidgets.QTreeWidget()
tabs.addTab(self.g_lokacija, "Lokacija")
self.g_lokacija.setHeaderHidden(True)
self.g_predmeti = QtWidgets.QTreeWidget()
tabs.addTab(self.g_predmeti, "Predmeti")
self.g_predmeti.setColumnCount(3)
# GPredmeti->setColumnAlignment(1, AlignHCenter);
# GPredmeti->setColumnAlignment(2, AlignHCenter);
self.g_predmeti.setColumnWidth(0, 340)
# self.g_predmeti.setColumnWidthMode(0, QListView::Manual);
self.g_predmeti.setSortingEnabled(True)
self.g_dogodki = QtWidgets.QTreeWidget()
tabs.addTab(self.g_dogodki, "Dogodki")
self.g_dogodki.setColumnCount(1)
self.g_dogodki.setHeaderHidden(True)
self.g_lokacije = QtWidgets.QTreeWidget()
tabs.addTab(self.g_lokacije, "Lokacije")
self.g_dogodki.setHeaderHidden(True)
self.g_zastavice = QtWidgets.QTreeWidget()
tabs.addTab(self.g_zastavice, "Zastavice")
self.g_zastavice.setColumnCount(1)
self.g_zastavice.setHeaderHidden(True)
self.g_sporocila = QtWidgets.QTreeWidget()
tabs.addTab(self.g_sporocila, "Ukazi")
self.g_sporocila.setColumnCount(1)
self.g_predmeti.setColumnWidth(0, 100)
self.g_sporocila.setHeaderHidden(True)
#######################################
# Controller
def process_events(self, table, system, word1=None, word2=None):
match = 0
for event in table:
res = event(self, system, word1, word2)
if res in [-1, 1]:
return res
elif res:
match = 1
return match
def user_command(self):
command = self.ukazna.text().upper()
if not command:
return
self.ukazna.setText("")
self.printout('<font color="yellow">> %s</font>' % command)
self.turns += 1
commsplit = command.split()
if commsplit and (commsplit[0] in ["SHRA", "SAVE"]):
self.save()
return
if commsplit and (commsplit[0] in ["NALO", "LOAD"]):
self.load()
self.goljufija()
return
trans = []
for w in commsplit:
t = self.vocabulary.get(w[:4], None)
if t:
trans.append(t)
if not len(trans):
self.printout("Tega sploh ne razumem. "
"Poskusi povedati kako drugače.")
elif len(trans) == 1 and trans[0] in self.location.connections:
self.flags[2] = self.location_no = \
self.location.connections[trans[0]]
self.location = self.locations[self.location_no]
self.update_location()
else:
if len(trans) == 1:
m = self.process_events(self.responses, 0, trans[0])
else:
m = self.process_events(self.responses, 0, trans[0], trans[1])
if m == 0:
if len(trans) == 1 and trans[0] < 16:
self.printout("Mar ne vidiš, da v to smer ni poti?")
else:
self.printout("Tega pa ne morem.")
self.process_events(self.process, 1)
self.goljufija()
def save_position(self, fname):
f = open(fname, "wb")
pickle.dump(self.flags, f, 1)
pickle.dump([o.location for o in self.objects], f, 1)
def load_position(self, fname):
f = open(fname, "rb")
self.flags = pickle.load(f)
object_locations = pickle.load(f)
self.location_no = self.flags[2]
self.location = self.locations[self.location_no]
for r in range(len(object_locations)):
self.objects[r].location = object_locations[r]
self.update_location()
def printout(self, msg):
self.izpisano += msg + "<br>"
self.izpis.setHtml(self.izpisano)
self.izpis.scrollContentsBy(0, 30000)
def anykey(self):
return
QtWidgets.QMessageBox.information(
None, "Čakam...", "Pritisni OK, pa bova nadaljevala")
def set_location_description(self, msg, dirs):
self.printout(msg)
#######################################
# Cheating
def ldesc(self, n):
return self.locations[n].description[:40]
def ldesci(self, n):
return self.ldesc(n), n
def lidesc(self, n):
return n, self.ldesc(n)
def repr_action(self, event, system, skipat=0, adddict=""):
ldesci = self.ldesci
lidesc = self.lidesc
if not system:
if event.word2 != 255:
tc = " ".join((self.index_to_word[event.word1],
self.index_to_word[event.word2], adddict))
elif event.word1 != 255:
tc = " ".join((self.index_to_word[event.word1], adddict))
else:
tc = adddict
else:
tc = adddict
ta = []
for op, param1, param2 in event.conditions:
if self.Event.cond_ops[op][0] == "AT":
if skipat:
continue
else:
if tc:
tc += " [AT %s (%i)]" % ldesci(param1)
else:
tc = "AT %s (%i)" % ldesci(param1)
else:
s = "--> %s " % self.Event.cond_ops[op][0]
if param1:
if op < 4:
s += "%i (%s...) " % lidesc(param1)
elif op < 10:
s += "%i (%s) " % (param1,
self.objects[param1].description)
elif op < 13:
s += "%i " % param1
else:
s += "%i %i " % (param1, param2)
ta.append(s)
for action in event.actions:
tt = event.act_ops[action[0]]
atype = event.types[action[0]]
param1, param2 = (action[1] + (None, None))[:2]
if atype == self.Event.LOC:
tt += " %i (%s...)" % lidesc(param1)
elif atype == self.Event.MSG:
tt += " '%s'" % self.messages[param1]
elif atype == self.Event.OBJ:
tt += " '%s' (%i)" % (
self.objects[param1].description, param1)
elif atype == self.Event.SWAP:
tt += " '%s' (%i) '%s' (%i)" % (
self.objects[param1].description, param1,
self.objects[param2].description, param2)
elif event.nparams[action[0]] == 1:
tt += " %i" % param1
elif event.nparams[action[0]] == 2:
tt += " %i %i" % (param1, param2)
ta.append(tt)
return tc, ta, not tc
@staticmethod
def parse_tree(tree_widget, tree):
tree_widget.clear()
for state, events in tree:
it = QtWidgets.QTreeWidgetItem(state)
tree_widget.addTopLevelItem(it)
for event in events:
text, subnodes, is_open = (event + (None, None))[:3]
if isinstance(text, str):
it2 = QtWidgets.QTreeWidgetItem([text])
it.addChild(it2)
if subnodes:
it2.addChildren([QtWidgets.QTreeWidgetItem([i])
for i in subnodes])
it2.setExpanded(True)
else:
it.addChildren(QtWidgets.QTreeWidgetItem([i]) for i in text)
def goljufija_const(self):
repr_act = self.repr_action
ldesci = self.ldesci
def getlocations():
def process_events(loc, table, system):
acts, spec_exits, spec_approaches = [], [], []
for event in table:
for op, param1, param2 in event.conditions:
if op <= 1 and param1 == loc:
for action in event.actions:
if event.act_ops[action[0]] == "GOTO":
if action[1][0] != loc:
spec_exits.append(
repr_act(event, system, 1,
"-> %s (%i)"
% ldesci(action[1][0])))
else:
spec_approaches.append(
repr_act(event, system, 1,
"<- %s (%i)"
% ldesci(param1)))
break
else:
# It is not an exit
acts.append(repr_act(event, system, 0))
break
else:
# There is no 'AT location';
# check whether this can be a special approach
for action in event.actions:
if event.act_ops[action[0]] == "GOTO" and \
action[1][0] == loc:
spec_approaches.append(repr_act(event, system))
break
# There is an 'AT location';
# check whether this is an exit event
return acts, spec_exits, spec_approaches
def process_exits(loc):
return ["%s -> %s (%i)" %
((self.index_to_word[d],) + ldesci(n))
for d, n in self.locations[loc].connections.items()]
def process_approaches(loc):
app = []
for src, location in enumerate(self.locations):
if loc in list(location.connections.values()):
for d, n in location.connections.items():
if n == loc:
app.append("%s (%i) -> %s" %
(ldesci(src) +
(self.index_to_word[d], )))
return app
self.cheat_locations = {}
for i in range(len(self.locations)):
exits = process_exits(i)
approaches = process_approaches(i)
responses, se, sa = process_events(i, self.responses, 0)
exits += se
approaches += sa
processes, se, sa = process_events(i, self.process, 1)
exits += se
approaches += sa
self.cheat_locations[i] = (responses, processes)
it = QtWidgets.QTreeWidgetItem(
["%s (%i)" % (self.locations[i].description, i)])
self.g_lokacije.addTopLevelItem(it)
for name, content in (
("Vhodi", approaches), ("Izhodi", exits),
("Ukazi", responses), ("Dogodki", processes)):
if not content:
continue
it2 = QtWidgets.QTreeWidgetItem([name])
it.addChild(it2)
for con in content:
if isinstance(con, str):
it3 = QtWidgets.QTreeWidgetItem([con])
else:
it3 = QtWidgets.QTreeWidgetItem([con[0]])
it3.addChildren([QtWidgets.QTreeWidgetItem([i])
for i in con[1]])
it3.setExpanded(True)
it2.addChild(it3)
it2.setExpanded(True)
def getmessages():
def process_events(msg_no, table, system):
acts = []
for event in table:
for action in event.actions:
if event.act_ops[action[0]][:3] == "MES" and \
action[1][0] == msg_no:
break
else:
continue
acts.append(repr_act(event, system))
return acts
return [("%s (%i)" % (self.messages[i], i),
process_events(i, self.responses, 0) +
process_events(i, self.process, 1))
for i in range(len(self.messages))]
def add_event_to_tree(tree, event, skip_at=0):
tc, ta, isopen = repr_act(event, skip_at)
it = QtWidgets.QTreeWidgetItem([tc])
tree.addTopLevelItem(it)
it.addChildren([QtWidgets.QTreeWidgetItem([i]) for i in ta])
def get_responses():
acts = []
trivial = {self.vocabulary["DAJ"]: "DROP",
self.vocabulary["VZEM"]: "GET",
self.vocabulary["OBLE"]: "WEAR",
self.vocabulary["SLEC"]: "REMOVE"}
for event in self.responses:
if (not event.conditions and len(event.actions) == 2 and
event.act_ops[event.actions[1][0]] in ["OK", "DONE"] and
trivial.get(event.word1, None) ==
event.act_ops[event.actions[0][0]]):
continue
if event.word1 < 16:
for op, param1, param2 in event.conditions:
if not op:
break
else:
self.g_sporocila.addTopLevelItem(
QtWidgets.QTreeWidgetItem([repr_act(event, 0)]))
continue
add_event_to_tree(self.g_sporocila, event)
def get_process():
for event in self.process:
add_event_to_tree(self.g_dogodki, event, 1)
return (getlocations(), getmessages(),
get_responses(), get_process(), None)
def goljufija(self):
repr_act = self.repr_action
def getlocation():
self.g_lokacija.clear()
conn = list(self.location.connections.items())
if conn:
it = QtWidgets.QTreeWidgetItem(["Izhodi"])
self.g_lokacija.addTopLevelItem(it)
it.addChildren([QtWidgets.QTreeWidgetItem(
["%s: %s (%i)" % (
self.index_to_word[dire],
self.locations[loc].description[:40], loc)])
for dire, loc in conn])
it.setExpanded(True)
responses, processes = self.cheat_locations[self.location_no]
if responses:
it = QtWidgets.QTreeWidgetItem(["Ukazi"])
self.g_lokacija.addTopLevelItem(it)
for content in responses:
it2 = QtWidgets.QTreeWidgetItem([content[0]])
it.addChild(it2)
it2.addChildren([QtWidgets.QTreeWidgetItem([i])
for i in content[1]])
it2.setExpanded(True)
it.setExpanded(True)
if processes:
it = QtWidgets.QTreeWidgetItem(["Dogodki"])
self.g_lokacija.addTopLevelItem(it)
for content in processes:
it2 = QtWidgets.QTreeWidgetItem([content[0]])
it.addChild(it2)
it2.addChildren([QtWidgets.QTreeWidgetItem([i])
for i in content[1]])
it2.setExpanded(True)
it.setExpanded(True)
objlocs = {self.Object.CARRIED: "imam",
self.Object.WORN: "nosim",
self.Object.NOT_CREATED: "ne obstaja",
self.Object.INVALID: "ne obstaja"}
def getobjects():
def process_events(object_no, table, system):
acts = []
trivial = {self.vocabulary["DAJ"]: "DROP",
self.vocabulary["VZEM"]: "GET",
self.vocabulary["OBLE"]: "WEAR",
self.vocabulary["SLEC"]: "REMOVE"}
for event in table:
if not system and not event.conditions and \
len(event.actions) == 2 and \
event.act_ops[event.actions[1][0]] in ["OK",
"DONE"] \
and trivial.get(event.word1, None) == \
event.act_ops[event.actions[0][0]]:
continue
for op, param1, param2 in event.conditions:
if 4 <= op <= 9 and param1 == object_no:
break
else:
for action in event.actions:
atype = event.types[action[0]]
if (atype in [event.OBJ, event.SWAP] and
action[1][0] == object_no or
atype == self.Event.SWAP and
action[1][1] == object_no):
break
else:
continue # not interesting, does not mention
# object_no neither in conditions nor
# in actions
acts.append(repr_act(event, system))
return acts
def objloc(objno):
loc = self.objects[objno].location
if loc < 0xfc:
return str(loc)
else:
return objlocs[loc]
if not hasattr(self, "cheatobjects"):
self.cheatobjects = [([self.objects[i].description, str(i),
objloc(i)],
process_events(i, self.responses, 0) +
process_events(i, self.process, 1))
for i in range(len(self.objects))]
else:
for i in range(len(self.objects)):
self.cheatobjects[i][0][2] = objloc(i)
return self.cheatobjects
def getflags():
flops = [Quill.Event.ptas[0][0].index(x)
for x in ["PLUS", "MINUS", "SET", "CLEAR", "LET"]]
def process_events(flag_no, table, system):
acts = []
for event in table:
for op, param1, param2 in event.conditions:
if op >= 11 and param1 == flag_no:
break
else:
for action in event.actions:
if action[0] in flops and flag_no == action[1][0]:
break
else:
continue # not interesting, does not mention the
# flag neither in conditions nor in action
acts.append(repr_act(event, system))
return acts
if not hasattr(self, "cheatflags"):
self.cheatflags = [(["%i = %i" % (i, self.flags[i])],
process_events(i, self.responses, 0) +
process_events(i, self.process, 1))
for i in range(len(self.flags))]
else:
self.cheatflags = [(["%i = %i" % (i, self.flags[i])],
self.cheatflags[i][1])
for i in range(len(self.flags))]
return self.cheatflags[:3] + [x for x in self.cheatflags[3:]
if x[1]]
getlocation()
self.parse_tree(self.g_zastavice, getflags())
self.parse_tree(self.g_predmeti, getobjects())
app = QtWidgets.QApplication([])
q = Quill()
app.exec()<|fim▁end|> | def do_quit(self):
self.reset()
self.update_location() |
<|file_name|>installForTest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
__author__ = 'CGS'
import os, shutil, sys, distutils.core, subprocess
# Some configuration needed for this file
apps_directory = ""
apps = {"variants": "apps/variants"}
PRODUCTION = False
# TODO: better management of errors
# Some basic checks
if os.getuid() != 0:
sys.exit("This program requires super user privileges.")
if len(sys.argv) <= 1:
sys.exit("Please, give the name of the app you want to install. Choose among the followings: " +
str(apps.keys()))
if sys.argv[0] != "installCGSapps.py" and "/" in sys.argv[0]:
# If the script was not launch in the current directory, we have to make some modifications
tmp = sys.argv[0].split("/")
script_name = tmp.pop()
app_directory_prefix = sys.argv[0].replace("/"+script_name,"/")
else:
app_directory_prefix = ""
# We take the folder where hue is installed
try:
hue_directory = subprocess.Popen("whereis hue", stdin=False, shell=True, stdout=subprocess.PIPE)
hue_directory = str(hue_directory.communicate()[0]).split(" ")[2].strip()
except:
hue_directory = "/usr/lib/hue"
if not os.path.exists(hue_directory) and "HUE_DIRECTORY" in os.environ:
hue_directory = os.environ["HUE_DIRECTORY"]
if os.path.exists(hue_directory) and not os.path.exists(hue_directory+"/myapps"):
try:
os.makedirs(hue_directory+"/myapps")
except:
sys.exit("Impossible to create the folder 'myapps' in '"+hue_directory+"'.")
apps_directory = hue_directory + "/myapps"
# Some basic checks first
if not os.path.exists(hue_directory):
sys.exit("This installation file did not find the hue directory, please create a HUE_DIRECTORY environment"
"variable.")
# We install each application
aborted = 0
for i in xrange(1, len(sys.argv)):
app_name = sys.argv[i]
if not app_name in apps:
sys.exit("Invalid app name. Choose among the followings: "+str(apps.keys()))
if not os.path.exists(app_directory_prefix+apps[app_name]):
sys.exit("It seems the source of the app '"+app_name+"' is missing from the uncompressed zip.")
<|fim▁hole|> if os.path.exists(app_directory):
if PRODUCTION == True:
reinstall = raw_input("It seems the '"+app_name+"' already exists. Do you want to reinstall it [Y/n]?")
else:
reinstall = "Y"
if reinstall != "Y" and reinstall != "y":
print("Installation of '"+app_name+"' aborted.")
aborted += 1
continue
else:
try:
shutil.rmtree(app_directory)
except Exception as e:
print(e.message)
sys.exit("Impossible to delete the folder "+app_directory+". Check the access rights.")
# We create the app
# TODO: we do not catch correctly the errors of 'subprocess'
try:
print("Creating the app '"+app_name+"'...")
app_install = subprocess.Popen("cd " + apps_directory + " && " + hue_directory +
"/build/env/bin/hue create_desktop_app " + app_name,
stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
app_install = subprocess.Popen("cd " + apps_directory + " && python " + hue_directory +
"/tools/app_reg/app_reg.py --install " + app_name,
stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
except Exception as e:
print(e.message)
sys.exit("Error while creating the app...")
"""
# We copy the content of the application to the new directory
app_src = app_directory_prefix+apps[app_name]
try:
print("Copying source code to app folder...")
distutils.dir_util.copy_tree(app_src, app_directory)
except:
sys.exit("Impossible to copy data from '"+app_src+"' to '"+app_directory+"'.")
# We restart hue
try:
app_install = subprocess.Popen("service hue restart", stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
except Exception as e:
print(e.message)
sys.exit("Error while restarting hue.")
# The happy end
if aborted == 0:
print("Installation successful.")
elif aborted != len(sys.argv) - 1:
print("Installation of the 'non-aborted' apps successful.")<|fim▁end|> | app_directory = apps_directory+"/"+app_name
"""
# We try to delete the eventual old folder |
<|file_name|>shadertool.js<|end_file_name|><|fim▁begin|>var ShaderTool = new (function ShaderTool(){
function catchReady(fn) {
var L = 'loading';
if (document.readyState != L){
fn();
} else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', fn);
} else {
document.attachEvent('onreadystatechange', function() {
if (document.readyState != L){
fn();
}
});
}
}
this.VERSION = '0.01';
this.modules = {};
this.helpers = {};
this.classes = {};
var self = this;
catchReady(function(){
self.modules.GUIHelper.init();
self.modules.UniformControls.init();
self.modules.Editor.init();
self.modules.Rendering.init();
self.modules.SaveController.init();
self.modules.PopupManager.init();
document.documentElement.className = '_ready';
});
})();
// Utils
ShaderTool.Utils = {
trim: function( string ){
return string.replace(/^\s+|\s+$/g, '');
},
isSet: function( object ){
return typeof object != 'undefined' && object != null
},
isArray: function( object ){
var str = Object.prototype.toString.call(object);
return str == '[object Array]' || str == '[object Float32Array]';
// return Object.prototype.toString.call(object) === '[object Array]';
},
isArrayLike: function( object ){
if( this.isArray(object) ){
return true
}
if( typeof object.length == 'number' && typeof object[0] != 'undefined' && typeof object[object.length] != 'undefined'){
return true;
}
return false;
},
isArrayLike: function( object ){
if(this.isArray(object)){ return true; }
if(this.isObject(object) && this.isNumber(object.length) ){ return true; }
return false;
},
isNumber: function( object ){
return typeof object == 'number' && !isNaN(object);
},
isFunction: function( object ){
return typeof object == 'function';
},
isObject: function( object ){
return typeof object == 'object';
},
isString: function( object ){
return typeof object == 'string';
},
createNamedObject: function( name, props ){
return internals.createNamedObject( name, props );
},
testCallback: function( callback, applyArguments, context ){
if(this.isFunction(callback)){
return callback.apply(context, applyArguments || []);
}
return null;
},
copy: function( from, to ){
for(var i in from){ to[i] = from[i]; }
return to;
},
delegate: function( context, method ){
return function delegated(){
for(var argumentsLength = arguments.length, args = new Array(argumentsLength), k=0; k<argumentsLength; k++){
args[k] = arguments[k];
}
return method.apply( context, args );
}
},
debounce: function(func, wait, immediate) {
var timeout;
return function() {
var context = this, args = arguments;
var later = function() {
timeout = null;
if (!immediate){
func.apply(context, args);
}
};
var callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if (callNow){
func.apply(context, args);
}
};
},
throttle: function(func, ms) {
var isThrottled = false, savedArgs, savedThis;
function wrapper() {
if (isThrottled) {
savedArgs = arguments;
savedThis = this;
return;
}
func.apply(this, arguments);
isThrottled = true;
setTimeout(function() {
isThrottled = false;
if (savedArgs) {
wrapper.apply(savedThis, savedArgs);
savedArgs = savedThis = null;
}
}, ms);
}
return wrapper;
},
now: function(){
var P = 'performance';
if (window[P] && window[P]['now']) {
this.now = function(){ return window.performance.now() }
} else {
this.now = function(){ return +(new Date()) }
}
return this.now();
},
isFunction: function( object ){
return typeof object == 'function';
},
isNumberKey: function(e){
var charCode = (e.which) ? e.which : e.keyCode;
if (charCode == 46) {
//Check if the text already contains the . character
if (txt.value.indexOf('.') === -1) {
return true;
} else {
return false;
}
} else {
// if (charCode > 31 && (charCode < 48 || charCode > 57)){
if(charCode > 31 && (charCode < 48 || charCode > 57) && !(charCode == 46 || charCode == 8)){
if(charCode < 96 && charCode > 105){
return false;
}
}
}
return true;
},
toDecimalString: function( string ){
if(this.isNumber(string)){
return string;
}
if(string.substr(0,1) == '0'){
if(string.substr(1,1) != '.'){
string = '0.' + string.substr(1, string.length);
}
}
return string == '0.' ? '0' : string;
},
/*
hexToRgb: function(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? [
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
] : [];
}
*/
};
// Callback (Signal?)
ShaderTool.Utils.Callback = (function(){
// Callback == Signal ?
function Callback() {
this._handlers = [];
var self = this;
this.callShim = function(){
self.call.apply(self, arguments);
}
}
Callback.prototype = {
_throwError: function() {
throw new TypeError('Callback handler must be function!');
},
add: function(handler, context) {
if (typeof handler != 'function') {
this._throwError();
return;
}
this.remove(handler);
this._handlers.push({handler:handler, context: context});
},
remove: function(handler) {
if (typeof handler != 'function') {
this._throwError();
return;
}
var totalHandlers = this._handlers.length;
for (var k = 0; k < totalHandlers; k++) {
if (handler === this._handlers[k].handler) {
this._handlers.splice(k, 1);
return;
}
}
},
call: function() {
var totalHandlers = this._handlers.length;
for (var k = 0; k < totalHandlers; k++) {
var handlerData = this._handlers[k];
handlerData.handler.apply(handlerData.context || null, arguments);
}
}
};
return Callback;
})();
ShaderTool.Utils.Float32Array = (function(){
return typeof Float32Array === 'function' ? Float32Array : Array;
})();
ShaderTool.Utils.DOMUtils = (function(){
function addSingleEventListener(element, eventName, handler){
if (element.addEventListener) {
element.addEventListener(eventName, handler);
} else {
element.attachEvent('on' + eventName, function(e){
handler.apply(element,[e]);
});
}
}
var tempDiv = document.createElement('div');
function DOMUtils(){};
DOMUtils.prototype = {
addEventListener : function(element, eventName, handler){
if(ShaderTool.Utils.isArrayLike(element)){
var totalElements = element.length;
for(var k=0; k<totalElements; k++){
this.addEventListener(element[k], eventName, handler);
}
} else {
var eventName = ShaderTool.Utils.isArray(eventName) ? eventName : eventName.split(' ').join('|').split(',').join('|').split('|');
if(eventName.length > 1){
var totalEvents = eventName.length;
for(var k=0; k<totalEvents; k++){
addSingleEventListener(element, eventName[k], handler );
}
} else {
addSingleEventListener(element, eventName[0], handler);
}
}
},
addClass : function(element, className){
if (element.classList){
element.classList.add(className);
} else {
element.className += SPACE + className;
}
},
removeClass : function(element, className){
if (element.classList){
element.classList.remove(className);
} else{
element.className = element.className.replace(new RegExp('(^|\\b)' + className.split(SPACE).join('|') + '(\\b|$)', 'gi'), SPACE);
}
},
injectCSS: function( cssText ){
try{
var styleElement = document.createElement('style');
styleElement.type = 'text/css';
if (styleElement.styleSheet) {
styleElement.styleSheet.cssText = cssText;
} else {
styleElement.appendChild(document.createTextNode(cssText));
}
document.getElementsByTagName('head')[0].appendChild(styleElement);
return true;
} catch( e ){
return false;
}
},
createFromHTML: function( html ){
tempDiv.innerHTML = html.trim();
var result = tempDiv.childNodes;
if(result.length > 1){
tempDiv.innerHTML = '<div>' + html.trim() + '<div/>'
result = tempDiv.childNodes;
}
return result[0];
}
}
return new DOMUtils();
})();
// Helpers
// LSHelper
ShaderTool.helpers.LSHelper = (function(){
var ALLOW_WORK = window.localStorage != null || window.sessionStorage != null;
function LSHelper(){
this._storage = window.localStorage || window.sessionStorage;
}
LSHelper.prototype = {
setItem: function( key, data ){
if( !ALLOW_WORK ){ return null }
var json = JSON.stringify(data)
this._storage.setItem( key, json );
return json;
},
getItem: function( key ){
if( !ALLOW_WORK ){ return null }
return JSON.parse(this._storage.getItem( key ))
},
clearItem: function( key ){
if( !ALLOW_WORK ){ return null }
this._storage.removeItem( key )
}
}
return new LSHelper();
})();
// FSHelper
ShaderTool.helpers.FSHelper = (function(){
function FSHelper(){};
FSHelper.prototype = {
request: function( element ){
if (element.requestFullscreen) {
element.requestFullscreen();
} else if (element.mozRequestFullScreen) {
element.mozRequestFullScreen();
} else if (element.webkitRequestFullScreen) {
element.webkitRequestFullScreen(Element.ALLOW_KEYBOARD_INPUT);
}
},
exit: function(){
if (document.cancelFullScreen) {
document.cancelFullScreen();
} else if (document.mozCancelFullScreen) {
document.mozCancelFullScreen();
} else if (document.webkitCancelFullScreen) {
document.webkitCancelFullScreen();
}
}
}
return new FSHelper();
})();
// Ticker
ShaderTool.helpers.Ticker = (function(){
var raf;
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame = window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame){
raf = function( callback ) {
var currTime = Utils.now();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout( function(){
callback(currTime + timeToCall);
}, timeToCall);
lastTime = currTime + timeToCall;
return id;
};
} else {
raf = function( callback ){
return window.requestAnimationFrame( callback );
}
}
function Ticker(){
this.onTick = new ShaderTool.Utils.Callback();
var activeState = true;
var applyArgs = [];
var listeners = [];
var prevTime = ShaderTool.Utils.now();
var elapsedTime = 0;
var timeScale = 1;
var self = this;
var skippedFrames = 0;
var maxSkipFrames = 3;
this.stop = this.pause = this.sleep = function(){
activeState = false;
return this;
}
this.start = this.wake = function(){
activeState = true;
return this;
}
this.reset = function(){
elapsedTime = 0;
}
this.timeScale = function( value ){
if(ShaderTool.Utils.isSet(value)){ timeScale = value; }
return timeScale;
}
this.toggle = function(){
return (activeState ? this.stop() : this.start());
}
this.isActive = function(){
return activeState;
}
this.getTime = function(){
return elapsedTime;
}
function tickHandler( nowTime ){
var delta = (nowTime - prevTime) * timeScale;
prevTime = nowTime;
if(skippedFrames < maxSkipFrames){
skippedFrames++;
} else {
if(activeState){
elapsedTime += delta;
self.onTick.call(delta, elapsedTime)
}
}<|fim▁hole|> };
return new Ticker();
})();
// Modules
// Future module
ShaderTool.modules.GUIHelper = (function(){
function GUIHelper(){}
GUIHelper.prototype = {
init: function(){
console.log('ShaderTool.modules.GUIHelper.init')
},
showError: function( message ){
console.error('GUIHelper: ' + message)
}
}
return new GUIHelper();
})();
// Editor
ShaderTool.modules.Editor = (function(){
function Editor(){}
Editor.prototype = {
init: function(){
console.log('ShaderTool.modules.Editor.init');
this._container = document.getElementById('st-editor');
this._editor = ace.edit(this._container);
this._editor.getSession().setMode('ace/mode/glsl');
// https://ace.c9.io/build/kitchen-sink.html
// this._editor.getSession().setTheme();
this._editor.$blockScrolling = Infinity;
this.onChange = new ShaderTool.Utils.Callback();
var self = this;
//this._editor.on('change', function(){
//self.onChange.call();
//});
this._editor.on('change', ShaderTool.Utils.throttle(function(){
if(!self._skipCallChange){
self.onChange.call();
}
}, 1000 / 60 * 10));
},
getData: function(){
return this._editor.getSession().getValue();
},
setData: function( value, skipCallChangeFlag ){
this._skipCallChange = skipCallChangeFlag;
this._editor.getSession().setValue( value );
this._skipCallChange = false;
if(!skipCallChangeFlag){
this.onChange.call();
}
},
clear: function(){
this.setValue('');
},
// future methods:
//lock: function(){},
//unlock: function(){},
//load: function( url ){}
}
return new Editor();
})();
ShaderTool.modules.Rendering = (function(){
var VERTEX_SOURCE = 'attribute vec2 av2_vtx;varying vec2 vv2_v;void main(){vv2_v = av2_vtx;gl_Position = vec4(av2_vtx, 0., 1.);}';
function Rendering(){}
Rendering.prototype = {
init: function(){
console.log('ShaderTool.modules.Rendering.init');
this._canvas = document.getElementById('st-canvas');
this._context = D3.createContextOnCanvas(this._canvas);
this._initSceneControls();
this.onChange = new ShaderTool.Utils.Callback();
// this._sourceChanged = true;
var fragmentSource = 'precision mediump float;\n';
fragmentSource += 'uniform sampler2D us2_source;\n';
fragmentSource += 'uniform float uf_time;\n';
fragmentSource += 'uniform vec2 uv2_resolution;\n';
fragmentSource += 'void main() {\n';
fragmentSource += '\tgl_FragColor = \n';
// fragmentSource += 'vec4(gl_FragCoord.xy / uv2_resolution, sin(uf_time), 1.);\n';
fragmentSource += '\t\ttexture2D(us2_source, gl_FragCoord.xy / uv2_resolution);\n';
fragmentSource += '}\n';
this._program = this._context.createProgram({
vertex: VERTEX_SOURCE,
fragment: fragmentSource
});
this._buffer = this._context.createVertexBuffer().upload(new ShaderTool.Utils.Float32Array([1,-1,1,1,-1,-1,-1,1]));
this._resolution = null;
this._texture = null;
this._framebuffer = null;
this._writePosition = 0;
this._source = {
program: this._program,
attributes: {
'av2_vtx': {
buffer: this._buffer,
size: 2,
type: this._context.AttribType.Float,
offset: 0
}
},
uniforms: {
'us2_source': this._context.UniformSampler(this._texture)
},
mode: this._context.Primitive.TriangleStrip,
count: 4
};
this._rasterizers = [];
this._rasterizers.push(new ShaderTool.classes.Rasterizer( this._context ));
// this._updateSource();
ShaderTool.modules.Editor.onChange.add(this._updateSource, this);
ShaderTool.modules.UniformControls.onChangeUniformList.add(this._updateSource, this);
ShaderTool.modules.UniformControls.onChangeUniformValue.add(this._updateUniforms, this);
ShaderTool.helpers.Ticker.onTick.add(this._render, this);
},
_updateSource: function( skipCallChangeFlag ){
var uniformSource = ShaderTool.modules.UniformControls.getUniformsCode();
var shaderSource = ShaderTool.modules.Editor.getData();
var fullSource = 'precision mediump float;\n\n' + uniformSource + '\n\n\n' + shaderSource;
var totalRasterizers = this._rasterizers.length;
for(var k=0; k<totalRasterizers; k++){
var rasterizer = this._rasterizers[k];
rasterizer.updateSource(fullSource);
}
this._updateUniforms( skipCallChangeFlag );
},
_updateUniforms: function( skipCallChangeFlag ){
var uniforms = ShaderTool.modules.UniformControls.getUniformsData( this._context );
var totalRasterizers = this._rasterizers.length;
for(var k=0; k<totalRasterizers; k++){
var rasterizer = this._rasterizers[k];
rasterizer.updateUniforms(uniforms);
}
if(!skipCallChangeFlag){
this.onChange.call();
}
},
_setResolution: function (width, height) {
if (!this._resolution) {
this._texture = [
this._context.createTexture().uploadEmpty(this._context.TextureFormat.RGBA_8, width, height),
this._context.createTexture().uploadEmpty(this._context.TextureFormat.RGBA_8, width, height)
];
framebuffer = [
this._context.createFramebuffer().attachColor(this._texture[1]),
this._context.createFramebuffer().attachColor(this._texture[0])
];
} else if (this._resolution[0] !== width || this._resolution[1] !== height) {
this._texture[0].uploadEmpty(this._context.TextureFormat.RGBA_8, width, height);
this._texture[1].uploadEmpty(this._context.TextureFormat.RGBA_8, width, height);
}
this._resolution = [width, height];
},
_initSceneControls: function(){
var self = this;
this.dom = {};
this.dom.playButton = document.getElementById('st-play');
this.dom.pauseButton = document.getElementById('st-pause');
this.dom.rewindButton = document.getElementById('st-rewind');
this.dom.fullscreenButton = document.getElementById('st-fullscreen');
this.dom.timescaleRange = document.getElementById('st-timescale');
this.dom.renderWidthLabel = document.getElementById('st-renderwidth');
this.dom.renderHeightLabel = document.getElementById('st-renderheight');
this.dom.sceneTimeLabel = document.getElementById('st-scenetime');
function setPlayingState( state ){
if(state){
ShaderTool.helpers.Ticker.start();
self.dom.playButton.style.display = 'none';
self.dom.pauseButton.style.display = '';
} else {
ShaderTool.helpers.Ticker.stop();
self.dom.playButton.style.display = '';
self.dom.pauseButton.style.display = 'none';
}
}
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.playButton, 'mousedown', function( e ){
e.preventDefault();
setPlayingState( true );
});
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.pauseButton, 'mousedown', function( e ){
e.preventDefault();
setPlayingState( false );
});
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.rewindButton, 'mousedown', function( e ){
e.preventDefault();
ShaderTool.helpers.Ticker.reset();
});
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.fullscreenButton, 'mousedown', function( e ){
e.preventDefault();
ShaderTool.helpers.FSHelper.request(self._canvas);
});
ShaderTool.Utils.DOMUtils.addEventListener(this._canvas, 'dblclick', function( e ){
e.preventDefault();
ShaderTool.helpers.FSHelper.exit();
});
this.dom.timescaleRange.setAttribute('step', '0.001');
this.dom.timescaleRange.setAttribute('min', '0.001');
this.dom.timescaleRange.setAttribute('max', '10');
this.dom.timescaleRange.setAttribute('value', '1');
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.timescaleRange, 'input change', function( e ){
ShaderTool.helpers.Ticker.timeScale( parseFloat(self.dom.timescaleRange.value) )
});
setPlayingState( true );
},
_render: function( delta, elapsedTime ){
// To seconds:
delta = delta * 0.001;
elapsedTime = elapsedTime * 0.001;
this.dom.sceneTimeLabel.innerHTML = elapsedTime.toFixed(2);;
if (this._canvas.clientWidth !== this._canvas.width ||
this._canvas.clientHeight !== this._canvas.height) {
var pixelRatio = window.devicePixelRatio || 1;
var cWidth = this._canvas.width = this._canvas.clientWidth * pixelRatio;
var cHeight = this._canvas.height = this._canvas.clientHeight * pixelRatio;
this._setResolution(cWidth, cHeight);
this.dom.renderWidthLabel.innerHTML = cWidth + 'px';
this.dom.renderHeightLabel.innerHTML = cHeight + 'px';
}
var previosFrame = this._texture[this._writePosition];
var resolution = this._resolution;
var destination = { framebuffer: framebuffer[this._writePosition] };
var totalRasterizers = this._rasterizers.length;
for(var k=0; k<totalRasterizers; k++){
var rasterizer = this._rasterizers[k];
rasterizer.render(elapsedTime, previosFrame, resolution, destination);
}
if (!this._resolution) {
return;
}
this._writePosition = (this._writePosition + 1) & 1;
this._source.uniforms['uf_time'] = this._context.UniformFloat( elapsedTime );
this._source.uniforms['uv2_resolution'] = this._context.UniformVec2( this._resolution );
this._source.uniforms['us2_source'] = this._context.UniformSampler( this._texture[this._writePosition] );
this._context.rasterize(this._source);
},
getData: function(){
return {
uniforms: ShaderTool.modules.UniformControls.getData(),
source: ShaderTool.modules.Editor.getData()
}
},
setData: function( data, skipCallChangeFlag ){
ShaderTool.modules.UniformControls.setData( data.uniforms, true );
ShaderTool.modules.Editor.setData( data.source, true );
this._updateSource( skipCallChangeFlag );
ShaderTool.helpers.Ticker.reset();
if(!skipCallChangeFlag){
this.onChange.call();
}
}
}
return new Rendering();
})();
// Controls
ShaderTool.modules.UniformControls = (function(){
function UniformControls(){}
UniformControls.prototype = {
init: function(){
console.log('ShaderTool.modules.UniformControls.init');
this.onChangeUniformList = new ShaderTool.Utils.Callback();
this.onChangeUniformValue = new ShaderTool.Utils.Callback();
this._changed = true;
this._callChangeUniformList = function(){
this._changed = true;
this.onChangeUniformList.call();
}
this._callChangeUniformValue = function(){
this._changed = true;
this.onChangeUniformValue.call();
}
this._container = document.getElementById('st-uniforms-container');
this._controls = [];
this._uniforms = {};
this._createMethods = {};
this._createMethods[UniformControls.FLOAT] = this._createFloat;
this._createMethods[UniformControls.VEC2] = this._createVec2;
this._createMethods[UniformControls.VEC3] = this._createVec3;
this._createMethods[UniformControls.VEC4] = this._createVec4;
this._createMethods[UniformControls.COLOR3] = this._createColor3;
this._createMethods[UniformControls.COLOR4] = this._createColor4;
// Templates:
this._templates = {};
var totalTypes = UniformControls.TYPES.length;
for(var k=0; k<totalTypes; k++){
var type = UniformControls.TYPES[k]
var templateElement = document.getElementById('st-template-control-' + type);
if(templateElement){
this._templates[type] = templateElement.innerHTML;
templateElement.parentNode.removeChild(templateElement);
} else {
console.warn('No template html for ' + type + ' type!');
}
}
this._container.innerHTML = ''; // Clear container
// Tests:
/*
for(var k=0; k<totalTypes; k++){
this._createControl('myControl' + (k+1), UniformControls.TYPES[k], null, true );
}
//uniform float slide;
//uniform vec3 color1;
this._createControl('slide', UniformControls.FLOAT, [{max: 10, value: 10}], true );
// this._createControl('color1', UniformControls.COLOR3, null, true );
this._createControl('color1', UniformControls.VEC3, [{value:1},{},{}], true );
this._createControl('test', UniformControls.FLOAT, null, true );
this._createControl('test2', UniformControls.FLOAT, [{value: 1}], true );
this._createControl('test3', UniformControls.FLOAT, [{ value: 1 }], true );
//
//this._callChangeUniformList();
//this._callChangeUniformValue();
*/
this._initCreateControls();
},
/* Public methods */
getUniformsCode: function(){
var result = [];
var totalControls = this._controls.length;
for(var k=0; k<totalControls; k++){
result.push(this._controls[k].code);
}
return result.join('\n');
},
getUniformsData: function( context ){
if(!this._changed){
return this._uniforms;
}
this._changed = false;
this._uniforms = {};
var totalControls = this._controls.length;
for(var k=0; k<totalControls; k++){
var control = this._controls[k];
var value = control.getUniformValue();
if(control.type == UniformControls.FLOAT){
this._uniforms[control.name] = context.UniformFloat(value);
} else if(control.type == UniformControls.VEC2){
this._uniforms[control.name] = context.UniformVec2(value);
} else if(control.type == UniformControls.VEC3 || control.type == UniformControls.COLOR3){
this._uniforms[control.name] = context.UniformVec3(value);
} else if(control.type == UniformControls.VEC4 || control.type == UniformControls.COLOR4){
this._uniforms[control.name] = context.UniformVec4(value);
}
}
return this._uniforms;
},
getData: function(){
var uniforms = [];
var totalControls = this._controls.length;
for(var k=0; k<totalControls; k++){
var control = this._controls[k];
uniforms.push({
name: control.name,
type: control.type,
data: control.data
})
}
return uniforms;
},
setData: function( uniforms, skipCallChangeFlag){
this._clearControls( skipCallChangeFlag );
// TODO;
var totalUniforms = uniforms.length;
for(var k=0; k<totalUniforms; k++){
var uniformData = uniforms[k];
this._createControl(uniformData.name, uniformData.type, uniformData.data, true)
}
if(!skipCallChangeFlag){
this._callChangeUniformList();
}
},
/* Private methods */
_checkNewUniformName: function( name ){
// TODO;
return name != '';
},
_initCreateControls: function(){
var addUniformNameInput = document.getElementById('st-add-uniform-name');
var addUniformTypeSelect = document.getElementById('st-add-uniform-type');
var addUniformSubmit = document.getElementById('st-add-uniform-submit');
var self = this;
ShaderTool.Utils.DOMUtils.addEventListener(addUniformSubmit, 'click', function( e ){
e.preventDefault();
var name = addUniformNameInput.value;
if( !self._checkNewUniformName(name) ){
// TODO: Show info about incorrect uniforn name?
addUniformNameInput.focus();
} else {
var type = addUniformTypeSelect.value;
self._createControl( name, type, null, false );
addUniformNameInput.value = '';
}
});
},
_createControl: function( name, type, initialData, skipCallChangeFlag ){
this._changed = true;
var self = this;
var control;
var elementTemplate = this._templates[type];
if( typeof elementTemplate == 'undefined' ){
console.error('No control template for type ' + type);
return;
}
var element = ShaderTool.Utils.DOMUtils.createFromHTML(elementTemplate);
var createMethod = this._createMethods[type];
if( createMethod ){
initialData = ShaderTool.Utils.isArray(initialData) ? initialData : [];
control = createMethod.apply(this, [name, element, initialData] );
} else {
throw new ShaderTool.Exception('Unknown uniform control type: ' + type);
return null;
}
control.name = name;
control.type = type;
control.element = element;
this._controls.push(control);
this._container.appendChild(element);
// name element
var nameElement = element.querySelector('[data-uniform-name]');
if(nameElement){
nameElement.setAttribute('title', 'Uniform ' + name + ' settings');
nameElement.innerHTML = name;
ShaderTool.Utils.DOMUtils.addEventListener(nameElement, 'dblclick', function( e ){
e.preventDefault();
alert('Show uniform rename dialog?')
});
}
// delete element
var deleteElement = element.querySelector('[data-uniform-delete]');
if(deleteElement){
ShaderTool.Utils.DOMUtils.addEventListener(deleteElement, 'click', function( e ){
e.preventDefault();
if (confirm('Delete uniform?')) {
self._removeControl( control );
}
});
}
if(!skipCallChangeFlag){
this._callChangeUniformList();
}
},
_removeControl: function( control, skipCallChangeFlag ){
var totalControls = this._controls.length;
for(var k=0; k<totalControls; k++){
if(this._controls[k] === control){
this._controls.splice(k, 1);
control.element.parentNode.removeChild( control.element );
break;
}
}
if(!skipCallChangeFlag){
this._callChangeUniformList();
}
},
_clearControls: function(skipCallChangeFlag){
var c = 0;
for(var k=0;k<this._controls.length; k++){
c++;
if(c > 100){
return;
}
this._removeControl( this._controls[k], true );
k--;
}
if(!skipCallChangeFlag){
this._callChangeUniformList();
}
},
_createFloat: function( name, element, initialData ){
var self = this;
var saveData = [ this._prepareRangeData( initialData[0]) ];
var uniformValue = saveData[0].value;
this._initRangeElementGroup(element, '1', saveData[0], function(){
uniformValue = saveData[0].value;
self._callChangeUniformValue();
});
return {
code: 'uniform float ' + name + ';',
data: saveData,
getUniformValue: function(){
return uniformValue;
}
}
},
_createVec2: function( name, element, initialData ){
var self = this;
var saveData = [
this._prepareRangeData( initialData[0] ),
this._prepareRangeData( initialData[1] )
];
var uniformValue = [saveData[0].value, saveData[1].value];
this._initRangeElementGroup(element, '1', saveData[0], function(){
uniformValue[0] = saveData[0].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '2', saveData[1], function(){
uniformValue[1] = saveData[1].value;
self._callChangeUniformValue();
});
return {
code: 'uniform vec2 ' + name + ';',
data: saveData,
getUniformValue: function(){
return uniformValue;
}
}
},
_createVec3: function( name, element, initialData ){
var self = this;
var saveData = [
this._prepareRangeData( initialData[0] ),
this._prepareRangeData( initialData[1] ),
this._prepareRangeData( initialData[2] )
];
var uniformValue = [saveData[0].value, saveData[1].value, saveData[2].value];
this._initRangeElementGroup(element, '1', saveData[0], function(){
uniformValue[0] = saveData[0].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '2', saveData[1], function(){
uniformValue[1] = saveData[1].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '3', saveData[2], function(){
uniformValue[2] = saveData[2].value;
self._callChangeUniformValue();
});
return {
code: 'uniform vec3 ' + name + ';',
data: saveData,
getUniformValue: function(){
return uniformValue;
}
}
},
_createVec4: function( name, element, initialData ){
var self = this;
var saveData = [
this._prepareRangeData( initialData[0] ),
this._prepareRangeData( initialData[1] ),
this._prepareRangeData( initialData[2] ),
this._prepareRangeData( initialData[3] )
];
var uniformValue = [saveData[0].value, saveData[1].value, saveData[2].value, saveData[3].value];
this._initRangeElementGroup(element, '1', saveData[0], function(){
uniformValue[0] = saveData[0].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '2', saveData[1], function(){
uniformValue[1] = saveData[1].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '3', saveData[2], function(){
uniformValue[2] = saveData[2].value;
self._callChangeUniformValue();
});
this._initRangeElementGroup(element, '4', saveData[3], function(){
uniformValue[3] = saveData[3].value;
self._callChangeUniformValue();
});
return {
code: 'uniform vec4 ' + name + ';',
data: saveData,
getUniformValue: function(){
return uniformValue;
}
}
},
_createColor3: function( name, element, initialData ){
var self = this;
var saveData = this._prepareColorData(initialData, false)
this._initColorSelectElementGroup( element, false, saveData, function(){
self._callChangeUniformValue();
})
return {
code: 'uniform vec3 ' + name + ';',
data: saveData,
getUniformValue: function(){
return saveData;
}
}
},
_createColor4: function( name, element, initialData ){
var self = this;
var saveData = this._prepareColorData(initialData, true);
this._initColorSelectElementGroup( element, true, saveData, function(){
self._callChangeUniformValue();
})
return {
code: 'uniform vec4 ' + name + ';',
data: saveData,
getUniformValue: function(){
return saveData;
}
}
},
_prepareColorData: function( inputData, vec4Format ){
inputData = ShaderTool.Utils.isArray( inputData ) ? inputData : [];
var resultData = vec4Format ? [0,0,0,1] : [0,0,0];
var counter = vec4Format ? 4 : 3;
for(var k=0; k<counter;k++){
var inputComponent = inputData[k];
if( typeof inputComponent != 'undefined' ){
resultData[k] = inputComponent;
}
}
return resultData;
},
_prepareRangeData: function( inputData ){
inputData = typeof inputData == 'undefined' ? {} : inputData;
var resultData = { value: 0, min: 0, max: 1 };
for(var i in resultData){
if(typeof inputData[i] != 'undefined'){
resultData[i] = inputData[i];
}
}
return resultData;
},
_componentToHex: function(c){
var hex = c.toString(16);
return hex.length == 1 ? '0' + hex : hex;
},
_hexFromRGB: function(r, g, b){
return '#' + this._componentToHex(r) + this._componentToHex(g) + this._componentToHex(b);
},
_initColorSelectElementGroup: function( element, useAlpha, initialData, changeHandler ){
var colorElement = element.querySelector('[data-color]');
colorElement.value = this._hexFromRGB(initialData[0] * 256 << 0, initialData[1] * 256 << 0, initialData[2] * 256 << 0);
ShaderTool.Utils.DOMUtils.addEventListener(colorElement, 'change', function( e ){
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(colorElement.value);
initialData[0] = parseInt( result[1], 16 ) / 256;
initialData[1] = parseInt( result[2], 16 ) / 256;
initialData[2] = parseInt( result[3], 16 ) / 256;
changeHandler();
});
if(useAlpha){
var rangeElement = element.querySelector('[data-range]');
rangeElement.setAttribute('min', '0');
rangeElement.setAttribute('max', '1');
rangeElement.setAttribute('step', '0.001');
rangeElement.setAttribute('value', initialData[3] );
ShaderTool.Utils.DOMUtils.addEventListener(rangeElement, 'input', function( e ){
initialData[3] = parseFloat(rangeElement.value);
changeHandler();
})
}
},
_initRangeElementGroup: function( element, attrIndex, initialData, changeHandler, stepValue ){
var minValue = initialData.min;
var maxValue = initialData.max;
var minElement = element.querySelector('[data-range-min-' + attrIndex + ']');// || document.createElement('input');
var maxElement = element.querySelector('[data-range-max-' + attrIndex + ']');// || document.createElement('input');
var rangeElement = element.querySelector('[data-range-' + attrIndex + ']');
var valueElement = element.querySelector('[data-range-value-' + attrIndex + ']') || document.createElement('div');
rangeElement.setAttribute('step', typeof stepValue != 'undefined' ? stepValue : '0.0001');
var prevMinValue;
var prevMaxValue;
minElement.setAttribute('title', 'Minimum value');
maxElement.setAttribute('title', 'Maximum value');
prevMinValue = minElement.value = valueElement.innerHTML = minValue;
prevMaxValue = maxElement.value = maxValue;
rangeElement.value = initialData.value;
ShaderTool.Utils.DOMUtils.addEventListener(rangeElement, 'input', function( e ){
if(minElement.value == ''){
minElement.value = prevMinValue;
}
if(maxElement.value == ''){
maxElement.value = prevMaxValue;
}
if(minValue > maxValue){
prevMinValue = minElement.value = maxValue;
prevMaxValue = maxElement.value = minValue;
}
valueElement.innerHTML = rangeElement.value;
initialData.min = minValue;
initialData.max = maxValue;
initialData.value = parseFloat(rangeElement.value);
changeHandler( initialData );
});
function updateRangeSettings(){
if(minElement.value == '' || maxElement.value == ''){
return;
}
prevMinValue = minElement.value;
prevMaxValue = maxElement.value;
minValue = ShaderTool.Utils.toDecimalString(minElement.value);
maxValue = ShaderTool.Utils.toDecimalString(maxElement.value);
var min = minValue = parseFloat(minValue);
var max = maxValue = parseFloat(maxValue);
if(min > max){
max = [min, min = max][0];
}
rangeElement.setAttribute('min', min);
rangeElement.setAttribute('max', max);
initialData.min = min;
initialData.max = max;
}
ShaderTool.Utils.DOMUtils.addEventListener([minElement, maxElement], 'keydown input change', function( e ){
if(!ShaderTool.Utils.isNumberKey( e )){
e.preventDefault();
return false;
}
updateRangeSettings();
});
updateRangeSettings();
}
}
UniformControls.FLOAT = 'float';
UniformControls.VEC2 = 'vec2';
UniformControls.VEC3 = 'vec3';
UniformControls.VEC4 = 'vec4';
UniformControls.COLOR3 = 'color3';
UniformControls.COLOR4 = 'color4';
UniformControls.TYPES = [UniformControls.FLOAT, UniformControls.VEC2, UniformControls.VEC3, UniformControls.VEC4, UniformControls.COLOR3, UniformControls.COLOR4];
return new UniformControls();
})();
// SaveController
ShaderTool.modules.SaveController = (function(){
var DEFAULT_CODE = '{"uniforms":[{"name":"bgcolor","type":"color3","data":[0.99609375,0.8046875,0.56640625]}],"source":"void main() {\\n gl_FragColor = vec4(bgcolor, 1.);\\n}"}';
function SaveController(){}
SaveController.prototype = {
init: function(){
console.log('ShaderTool.modules.SaveController.init');
var savedData = ShaderTool.helpers.LSHelper.getItem('lastShaderData');
if(savedData){
ShaderTool.modules.Rendering.setData(savedData, true);
} else {
ShaderTool.modules.Rendering.setData(JSON.parse(DEFAULT_CODE), true);
}
this._initSaveDialogs();
ShaderTool.modules.Rendering.onChange.add( this._saveLocalState, this);
this._saveLocalState();
},
_initSaveDialogs: function(){
this.dom = {};
this.dom.setCodeInput = document.getElementById('st-set-code-input');
this.dom.setCodeSubmit = document.getElementById('st-set-code-submit');
this.dom.getCodeInput = document.getElementById('st-get-code-input');
var self = this;
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.setCodeSubmit, 'click', function( e ){
var code = self.dom.setCodeInput.value;
if(code != ''){
ShaderTool.modules.Rendering.setData(JSON.parse(code), true)
}
})
},
_saveLocalState: function(){
var saveData = ShaderTool.modules.Rendering.getData();
ShaderTool.helpers.LSHelper.setItem('lastShaderData', saveData);
this.dom.getCodeInput.value = JSON.stringify(saveData);
}
}
return new SaveController();
})();
ShaderTool.modules.PopupManager = (function(){
var OPENED_CLASS_NAME = '_opened';
function PopupManager(){}
PopupManager.prototype = {
init: function(){
console.log('ShaderTool.modules.PopupManager.init');
this.dom = {};
this.dom.overlay = document.getElementById('st-popup-overlay');
this._opened = false;
var self = this;
ShaderTool.Utils.DOMUtils.addEventListener(this.dom.overlay, 'mousedown', function( e ){
if( e.target === self.dom.overlay ){
self.close();
}
})
var openers = document.querySelectorAll('[data-popup-opener]');
ShaderTool.Utils.DOMUtils.addEventListener(openers, 'click', function( e ){
self.open( this.getAttribute('data-popup-opener') )
})
},
open: function( popupName ){
this.close();
var popup = this.dom.overlay.querySelector(popupName);
if( popup ){
this._opened = true;
this._currentPopup = popup;
ShaderTool.Utils.DOMUtils.addClass(this._currentPopup, OPENED_CLASS_NAME);
ShaderTool.Utils.DOMUtils.addClass(this.dom.overlay, OPENED_CLASS_NAME);
} else {
// TODO;
}
},
close: function(){
if(!this._opened){
return;
}
this._opened = false;
ShaderTool.Utils.DOMUtils.removeClass(this.dom.overlay, OPENED_CLASS_NAME);
ShaderTool.Utils.DOMUtils.removeClass(this._currentPopup, OPENED_CLASS_NAME);
}
}
return new PopupManager();
})();
// classes
ShaderTool.classes.Rasterizer = (function(){
var VERTEX_SOURCE = 'attribute vec2 av2_vtx;varying vec2 vv2_v;void main(){vv2_v = av2_vtx;gl_Position = vec4(av2_vtx, 0., 1.);}';
function Rasterizer( context ){
this._context = context;
this._program = null;
this._prevProgram = null;
this._buffer = this._context.createVertexBuffer().upload(new ShaderTool.Utils.Float32Array([1,-1,1,1,-1,-1,-1,1]));
this._source = {
program: this._program,
attributes: {
'av2_vtx': {
buffer: this._buffer,
size: 2,
type: this._context.AttribType.Float,
offset: 0
}
},
uniforms: {},
mode: this._context.Primitive.TriangleStrip,
count: 4
};
}
Rasterizer.prototype = {
updateSource: function (fragmentSource) {
var savePrevProgramFlag = true;
try{
var newProgram = this._context.createProgram({
vertex: VERTEX_SOURCE,
fragment: fragmentSource
});
this._source.program = newProgram;
} catch( e ){
console.warn('Error updating Rasterizer fragmentSource: ' + e.message);
savePrevProgramFlag = false;
if(this._prevProgram){
this._source.program = this._prevProgram;
}
}
if(savePrevProgramFlag){
this._prevProgram = newProgram;
}
},
updateUniforms: function(uniforms){
this._source.uniforms = uniforms;
},
render: function ( elapsedTime, frame, resolution, destination ) {
this._source.uniforms['us2_frame'] = this._context.UniformSampler( frame );
this._source.uniforms['uv2_resolution'] = this._context.UniformVec2( resolution );
this._source.uniforms['uf_time'] = this._context.UniformFloat( elapsedTime);
this._context.rasterize(this._source, null, destination);
}
}
return Rasterizer;
})();<|fim▁end|> |
raf( tickHandler );
}
raf( tickHandler ); |
<|file_name|>funnel3d.src.js<|end_file_name|><|fim▁begin|>/**
* @license Highcharts JS v8.2.2 (2020-10-22)
* @module highcharts/modules/funnel3d
* @requires highcharts
* @requires highcharts/highcharts-3d
* @requires highcharts/modules/cylinder
*
* Highcharts funnel module
*
* (c) 2010-2019 Kacper Madej
*<|fim▁hole|><|fim▁end|> | * License: www.highcharts.com/license
*/
'use strict';
import '../../Series/Funnel3DSeries.js'; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.