prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>shannon.rs<|end_file_name|><|fim▁begin|>use super::Entropy;
/// Implementation of Shannon entropy
pub struct Shannon {
frequency: Vec<f64>,
probabilities: Vec<f64>,
entropy: Option<f64>,
data_len: usize,
}
impl Shannon {
pub fn new() -> Self {
Self {
frequency: vec![0.0; 256],
probabilities: vec![0.0; 256],
entropy: None,
data_len: 0,
}
}
/// simple method to immediately return entropy for a blob of data
///
/// # Examples
///
/// ```
/// use entropy_rs::Shannon;
/// let data = vec![0,1,2,3,4,5];
/// assert_eq!(2.584962500721156, Shannon::quick(&data));
///
/// ```
///
pub fn quick<T: AsRef<[u8]>>(data: T) -> f64 {
let mut quick = Self::new();
quick.input(data);
quick.calculate()
}
}
impl Entropy for Shannon {<|fim▁hole|> });
}
fn calculate(&mut self) -> f64 {
if let Some(entropy) = self.entropy {
entropy
} else {
let mut entropy = 0.0;
for i in 0..256 {
if self.frequency[i] != 0.0 {
self.probabilities[i] = self.frequency[i] / self.data_len as f64;
entropy += self.probabilities[i] * self.probabilities[i].log(2.0_f64);
}
}
entropy *= -1.0;
self.entropy = Some(entropy);
entropy
}
}
fn reset(&mut self) {
*self = Shannon::new();
}
}
#[cfg(test)]
mod tests {
#[test]
fn shannon_test_quick() {
use crate::Shannon;
assert_eq!(Shannon::quick(vec![0, 1, 2, 3, 4, 5]) as f32, 2.5849626_f32);
}
}<|fim▁end|>
|
fn input<T: AsRef<[u8]>>(&mut self, data: T) {
self.data_len += data.as_ref().len();
data.as_ref().into_iter().for_each(|v| {
self.frequency[*v as usize] += 1.0_f64;
|
<|file_name|>16.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export { Error16 as default } from "../../";
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Just re-export everything from the other files
export * from './base-table-name';
export * from './compound-constraint';
export * from './constraint';
export * from './constraint-type';
export * from './default-value';
export * from './error-response';
export * from './filter-operation';
export * from './filter';
export * from './master-table-name';<|fim▁hole|>export * from './raw-constraint';
export * from './paginated-response';
export * from './session-ping';
export * from './special-default-value';
export * from './sql-row';
export * from './table-data-type';
export * from './table-header';
export * from './table-insert';
export * from './table-meta';
export * from './table-tier';
export * from './transformed-name';<|fim▁end|>
| |
<|file_name|>vision-gen.go<|end_file_name|><|fim▁begin|>// Package vision provides access to the Google Cloud Vision API.
//
// See https://cloud.google.com/vision/
//
// Usage example:
//
// import "google.golang.org/api/vision/v1"
// ...
// visionService, err := vision.New(oauthHttpClient)
package vision // import "google.golang.org/api/vision/v1"
import (
"bytes"
"encoding/json"
"errors"
"fmt"
context "golang.org/x/net/context"
ctxhttp "golang.org/x/net/context/ctxhttp"
gensupport "google.golang.org/api/gensupport"
googleapi "google.golang.org/api/googleapi"
"io"
"net/http"
"net/url"
"strconv"
"strings"
)
// Always reference these packages, just in case the auto-generated code
// below doesn't.
var _ = bytes.NewBuffer
var _ = strconv.Itoa
var _ = fmt.Sprintf
var _ = json.NewDecoder
var _ = io.Copy
var _ = url.Parse
var _ = gensupport.MarshalJSON
var _ = googleapi.Version
var _ = errors.New
var _ = strings.Replace
var _ = context.Canceled
var _ = ctxhttp.Do
const apiId = "vision:v1"
const apiName = "vision"
const apiVersion = "v1"
const basePath = "https://vision.googleapis.com/"
// OAuth2 scopes used by this API.
const (<|fim▁hole|>
func New(client *http.Client) (*Service, error) {
if client == nil {
return nil, errors.New("client is nil")
}
s := &Service{client: client, BasePath: basePath}
s.Images = NewImagesService(s)
return s, nil
}
type Service struct {
client *http.Client
BasePath string // API endpoint base URL
UserAgent string // optional additional User-Agent fragment
Images *ImagesService
}
func (s *Service) userAgent() string {
if s.UserAgent == "" {
return googleapi.UserAgent
}
return googleapi.UserAgent + " " + s.UserAgent
}
func NewImagesService(s *Service) *ImagesService {
rs := &ImagesService{s: s}
return rs
}
type ImagesService struct {
s *Service
}
// AnnotateImageRequest: Request for performing Google Cloud Vision API
// tasks over a user-provided
// image, with user-requested features.
type AnnotateImageRequest struct {
// Features: Requested features.
Features []*Feature `json:"features,omitempty"`
// Image: The image to be processed.
Image *Image `json:"image,omitempty"`
// ImageContext: Additional context that may accompany the image.
ImageContext *ImageContext `json:"imageContext,omitempty"`
// ForceSendFields is a list of field names (e.g. "Features") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Features") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *AnnotateImageRequest) MarshalJSON() ([]byte, error) {
type noMethod AnnotateImageRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// AnnotateImageResponse: Response to an image annotation request.
type AnnotateImageResponse struct {
// CropHintsAnnotation: If present, crop hints have completed
// successfully.
CropHintsAnnotation *CropHintsAnnotation `json:"cropHintsAnnotation,omitempty"`
// Error: If set, represents the error message for the operation.
// Note that filled-in image annotations are guaranteed to be
// correct, even when `error` is set.
Error *Status `json:"error,omitempty"`
// FaceAnnotations: If present, face detection has completed
// successfully.
FaceAnnotations []*FaceAnnotation `json:"faceAnnotations,omitempty"`
// FullTextAnnotation: If present, text (OCR) detection or document
// (OCR) text detection has
// completed successfully.
// This annotation provides the structural hierarchy for the OCR
// detected
// text.
FullTextAnnotation *TextAnnotation `json:"fullTextAnnotation,omitempty"`
// ImagePropertiesAnnotation: If present, image properties were
// extracted successfully.
ImagePropertiesAnnotation *ImageProperties `json:"imagePropertiesAnnotation,omitempty"`
// LabelAnnotations: If present, label detection has completed
// successfully.
LabelAnnotations []*EntityAnnotation `json:"labelAnnotations,omitempty"`
// LandmarkAnnotations: If present, landmark detection has completed
// successfully.
LandmarkAnnotations []*EntityAnnotation `json:"landmarkAnnotations,omitempty"`
// LogoAnnotations: If present, logo detection has completed
// successfully.
LogoAnnotations []*EntityAnnotation `json:"logoAnnotations,omitempty"`
// SafeSearchAnnotation: If present, safe-search annotation has
// completed successfully.
SafeSearchAnnotation *SafeSearchAnnotation `json:"safeSearchAnnotation,omitempty"`
// TextAnnotations: If present, text (OCR) detection has completed
// successfully.
TextAnnotations []*EntityAnnotation `json:"textAnnotations,omitempty"`
// WebDetection: If present, web detection has completed successfully.
WebDetection *WebDetection `json:"webDetection,omitempty"`
// ForceSendFields is a list of field names (e.g. "CropHintsAnnotation")
// to unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CropHintsAnnotation") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *AnnotateImageResponse) MarshalJSON() ([]byte, error) {
type noMethod AnnotateImageResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BatchAnnotateImagesRequest: Multiple image annotation requests are
// batched into a single service call.
type BatchAnnotateImagesRequest struct {
// Requests: Individual image annotation requests for this batch.
Requests []*AnnotateImageRequest `json:"requests,omitempty"`
// ForceSendFields is a list of field names (e.g. "Requests") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Requests") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BatchAnnotateImagesRequest) MarshalJSON() ([]byte, error) {
type noMethod BatchAnnotateImagesRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BatchAnnotateImagesResponse: Response to a batch image annotation
// request.
type BatchAnnotateImagesResponse struct {
// Responses: Individual responses to image annotation requests within
// the batch.
Responses []*AnnotateImageResponse `json:"responses,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Responses") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Responses") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BatchAnnotateImagesResponse) MarshalJSON() ([]byte, error) {
type noMethod BatchAnnotateImagesResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Block: Logical element on the page.
type Block struct {
// BlockType: Detected block type (text, image etc) for this block.
//
// Possible values:
// "UNKNOWN" - Unknown block type.
// "TEXT" - Regular text block.
// "TABLE" - Table block.
// "PICTURE" - Image block.
// "RULER" - Horizontal/vertical line box.
// "BARCODE" - Barcode block.
BlockType string `json:"blockType,omitempty"`
// BoundingBox: The bounding box for the block.
// The vertices are in the order of top-left, top-right,
// bottom-right,
// bottom-left. When a rotation of the bounding box is detected the
// rotation
// is represented as around the top-left corner as defined when the text
// is
// read in the 'natural' orientation.
// For example:
// * when the text is horizontal it might look like:
// 0----1
// | |
// 3----2
// * when it's rotated 180 degrees around the top-left corner it
// becomes:
// 2----3
// | |
// 1----0
// and the vertice order will still be (0, 1, 2, 3).
BoundingBox *BoundingPoly `json:"boundingBox,omitempty"`
// Paragraphs: List of paragraphs in this block (if this blocks is of
// type text).
Paragraphs []*Paragraph `json:"paragraphs,omitempty"`
// Property: Additional information detected for the block.
Property *TextProperty `json:"property,omitempty"`
// ForceSendFields is a list of field names (e.g. "BlockType") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BlockType") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Block) MarshalJSON() ([]byte, error) {
type noMethod Block
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BoundingPoly: A bounding polygon for the detected image annotation.
type BoundingPoly struct {
// Vertices: The bounding polygon vertices.
Vertices []*Vertex `json:"vertices,omitempty"`
// ForceSendFields is a list of field names (e.g. "Vertices") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Vertices") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BoundingPoly) MarshalJSON() ([]byte, error) {
type noMethod BoundingPoly
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Color: Represents a color in the RGBA color space. This
// representation is designed
// for simplicity of conversion to/from color representations in
// various
// languages over compactness; for example, the fields of this
// representation
// can be trivially provided to the constructor of "java.awt.Color" in
// Java; it
// can also be trivially provided to UIColor's
// "+colorWithRed:green:blue:alpha"
// method in iOS; and, with just a little work, it can be easily
// formatted into
// a CSS "rgba()" string in JavaScript, as well. Here are some
// examples:
//
// Example (Java):
//
// import com.google.type.Color;
//
// // ...
// public static java.awt.Color fromProto(Color protocolor) {
// float alpha = protocolor.hasAlpha()
// ? protocolor.getAlpha().getValue()
// : 1.0;
//
// return new java.awt.Color(
// protocolor.getRed(),
// protocolor.getGreen(),
// protocolor.getBlue(),
// alpha);
// }
//
// public static Color toProto(java.awt.Color color) {
// float red = (float) color.getRed();
// float green = (float) color.getGreen();
// float blue = (float) color.getBlue();
// float denominator = 255.0;
// Color.Builder resultBuilder =
// Color
// .newBuilder()
// .setRed(red / denominator)
// .setGreen(green / denominator)
// .setBlue(blue / denominator);
// int alpha = color.getAlpha();
// if (alpha != 255) {
// result.setAlpha(
// FloatValue
// .newBuilder()
// .setValue(((float) alpha) / denominator)
// .build());
// }
// return resultBuilder.build();
// }
// // ...
//
// Example (iOS / Obj-C):
//
// // ...
// static UIColor* fromProto(Color* protocolor) {
// float red = [protocolor red];
// float green = [protocolor green];
// float blue = [protocolor blue];
// FloatValue* alpha_wrapper = [protocolor alpha];
// float alpha = 1.0;
// if (alpha_wrapper != nil) {
// alpha = [alpha_wrapper value];
// }
// return [UIColor colorWithRed:red green:green blue:blue
// alpha:alpha];
// }
//
// static Color* toProto(UIColor* color) {
// CGFloat red, green, blue, alpha;
// if (![color getRed:&red green:&green blue:&blue
// alpha:&alpha]) {
// return nil;
// }
// Color* result = [Color alloc] init];
// [result setRed:red];
// [result setGreen:green];
// [result setBlue:blue];
// if (alpha <= 0.9999) {
// [result setAlpha:floatWrapperWithValue(alpha)];
// }
// [result autorelease];
// return result;
// }
// // ...
//
// Example (JavaScript):
//
// // ...
//
// var protoToCssColor = function(rgb_color) {
// var redFrac = rgb_color.red || 0.0;
// var greenFrac = rgb_color.green || 0.0;
// var blueFrac = rgb_color.blue || 0.0;
// var red = Math.floor(redFrac * 255);
// var green = Math.floor(greenFrac * 255);
// var blue = Math.floor(blueFrac * 255);
//
// if (!('alpha' in rgb_color)) {
// return rgbToCssColor_(red, green, blue);
// }
//
// var alphaFrac = rgb_color.alpha.value || 0.0;
// var rgbParams = [red, green, blue].join(',');
// return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');
// };
//
// var rgbToCssColor_ = function(red, green, blue) {
// var rgbNumber = new Number((red << 16) | (green << 8) | blue);
// var hexString = rgbNumber.toString(16);
// var missingZeros = 6 - hexString.length;
// var resultBuilder = ['#'];
// for (var i = 0; i < missingZeros; i++) {
// resultBuilder.push('0');
// }
// resultBuilder.push(hexString);
// return resultBuilder.join('');
// };
//
// // ...
type Color struct {
// Alpha: The fraction of this color that should be applied to the
// pixel. That is,
// the final pixel color is defined by the equation:
//
// pixel color = alpha * (this color) + (1.0 - alpha) * (background
// color)
//
// This means that a value of 1.0 corresponds to a solid color,
// whereas
// a value of 0.0 corresponds to a completely transparent color.
// This
// uses a wrapper message rather than a simple float scalar so that it
// is
// possible to distinguish between a default value and the value being
// unset.
// If omitted, this color object is to be rendered as a solid color
// (as if the alpha value had been explicitly given with a value of
// 1.0).
Alpha float64 `json:"alpha,omitempty"`
// Blue: The amount of blue in the color as a value in the interval [0,
// 1].
Blue float64 `json:"blue,omitempty"`
// Green: The amount of green in the color as a value in the interval
// [0, 1].
Green float64 `json:"green,omitempty"`
// Red: The amount of red in the color as a value in the interval [0,
// 1].
Red float64 `json:"red,omitempty"`
// ForceSendFields is a list of field names (e.g. "Alpha") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Alpha") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Color) MarshalJSON() ([]byte, error) {
type noMethod Color
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *Color) UnmarshalJSON(data []byte) error {
type noMethod Color
var s1 struct {
Alpha gensupport.JSONFloat64 `json:"alpha"`
Blue gensupport.JSONFloat64 `json:"blue"`
Green gensupport.JSONFloat64 `json:"green"`
Red gensupport.JSONFloat64 `json:"red"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Alpha = float64(s1.Alpha)
s.Blue = float64(s1.Blue)
s.Green = float64(s1.Green)
s.Red = float64(s1.Red)
return nil
}
// ColorInfo: Color information consists of RGB channels, score, and the
// fraction of
// the image that the color occupies in the image.
type ColorInfo struct {
// Color: RGB components of the color.
Color *Color `json:"color,omitempty"`
// PixelFraction: The fraction of pixels the color occupies in the
// image.
// Value in range [0, 1].
PixelFraction float64 `json:"pixelFraction,omitempty"`
// Score: Image-specific score for this color. Value in range [0, 1].
Score float64 `json:"score,omitempty"`
// ForceSendFields is a list of field names (e.g. "Color") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Color") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ColorInfo) MarshalJSON() ([]byte, error) {
type noMethod ColorInfo
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *ColorInfo) UnmarshalJSON(data []byte) error {
type noMethod ColorInfo
var s1 struct {
PixelFraction gensupport.JSONFloat64 `json:"pixelFraction"`
Score gensupport.JSONFloat64 `json:"score"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.PixelFraction = float64(s1.PixelFraction)
s.Score = float64(s1.Score)
return nil
}
// CropHint: Single crop hint that is used to generate a new crop when
// serving an image.
type CropHint struct {
// BoundingPoly: The bounding polygon for the crop region. The
// coordinates of the bounding
// box are in the original image's scale, as returned in `ImageParams`.
BoundingPoly *BoundingPoly `json:"boundingPoly,omitempty"`
// Confidence: Confidence of this being a salient region. Range [0, 1].
Confidence float64 `json:"confidence,omitempty"`
// ImportanceFraction: Fraction of importance of this salient region
// with respect to the original
// image.
ImportanceFraction float64 `json:"importanceFraction,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoundingPoly") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoundingPoly") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *CropHint) MarshalJSON() ([]byte, error) {
type noMethod CropHint
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *CropHint) UnmarshalJSON(data []byte) error {
type noMethod CropHint
var s1 struct {
Confidence gensupport.JSONFloat64 `json:"confidence"`
ImportanceFraction gensupport.JSONFloat64 `json:"importanceFraction"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Confidence = float64(s1.Confidence)
s.ImportanceFraction = float64(s1.ImportanceFraction)
return nil
}
// CropHintsAnnotation: Set of crop hints that are used to generate new
// crops when serving images.
type CropHintsAnnotation struct {
// CropHints: Crop hint results.
CropHints []*CropHint `json:"cropHints,omitempty"`
// ForceSendFields is a list of field names (e.g. "CropHints") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CropHints") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *CropHintsAnnotation) MarshalJSON() ([]byte, error) {
type noMethod CropHintsAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// CropHintsParams: Parameters for crop hints annotation request.
type CropHintsParams struct {
// AspectRatios: Aspect ratios in floats, representing the ratio of the
// width to the height
// of the image. For example, if the desired aspect ratio is 4/3,
// the
// corresponding float value should be 1.33333. If not specified,
// the
// best possible crop is returned. The number of provided aspect ratios
// is
// limited to a maximum of 16; any aspect ratios provided after the 16th
// are
// ignored.
AspectRatios []float64 `json:"aspectRatios,omitempty"`
// ForceSendFields is a list of field names (e.g. "AspectRatios") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AspectRatios") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *CropHintsParams) MarshalJSON() ([]byte, error) {
type noMethod CropHintsParams
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// DetectedBreak: Detected start or end of a structural component.
type DetectedBreak struct {
// IsPrefix: True if break prepends the element.
IsPrefix bool `json:"isPrefix,omitempty"`
// Type: Detected break type.
//
// Possible values:
// "UNKNOWN" - Unknown break label type.
// "SPACE" - Regular space.
// "SURE_SPACE" - Sure space (very wide).
// "EOL_SURE_SPACE" - Line-wrapping break.
// "HYPHEN" - End-line hyphen that is not present in text; does not
// co-occur with
// `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
// "LINE_BREAK" - Line break that ends a paragraph.
Type string `json:"type,omitempty"`
// ForceSendFields is a list of field names (e.g. "IsPrefix") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "IsPrefix") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *DetectedBreak) MarshalJSON() ([]byte, error) {
type noMethod DetectedBreak
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// DetectedLanguage: Detected language for a structural component.
type DetectedLanguage struct {
// Confidence: Confidence of detected language. Range [0, 1].
Confidence float64 `json:"confidence,omitempty"`
// LanguageCode: The BCP-47 language code, such as "en-US" or "sr-Latn".
// For more
// information,
// see
// http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
LanguageCode string `json:"languageCode,omitempty"`
// ForceSendFields is a list of field names (e.g. "Confidence") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Confidence") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *DetectedLanguage) MarshalJSON() ([]byte, error) {
type noMethod DetectedLanguage
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *DetectedLanguage) UnmarshalJSON(data []byte) error {
type noMethod DetectedLanguage
var s1 struct {
Confidence gensupport.JSONFloat64 `json:"confidence"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Confidence = float64(s1.Confidence)
return nil
}
// DominantColorsAnnotation: Set of dominant colors and their
// corresponding scores.
type DominantColorsAnnotation struct {
// Colors: RGB color values with their score and pixel fraction.
Colors []*ColorInfo `json:"colors,omitempty"`
// ForceSendFields is a list of field names (e.g. "Colors") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Colors") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *DominantColorsAnnotation) MarshalJSON() ([]byte, error) {
type noMethod DominantColorsAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// EntityAnnotation: Set of detected entity features.
type EntityAnnotation struct {
// BoundingPoly: Image region to which this entity belongs. Currently
// not produced
// for `LABEL_DETECTION` features. For `TEXT_DETECTION` (OCR),
// `boundingPoly`s
// are produced for the entire text detected in an image region,
// followed by
// `boundingPoly`s for each word within the detected text.
BoundingPoly *BoundingPoly `json:"boundingPoly,omitempty"`
// Confidence: The accuracy of the entity detection in an image.
// For example, for an image in which the "Eiffel Tower" entity is
// detected,
// this field represents the confidence that there is a tower in the
// query
// image. Range [0, 1].
Confidence float64 `json:"confidence,omitempty"`
// Description: Entity textual description, expressed in its `locale`
// language.
Description string `json:"description,omitempty"`
// Locale: The language code for the locale in which the entity
// textual
// `description` is expressed.
Locale string `json:"locale,omitempty"`
// Locations: The location information for the detected entity.
// Multiple
// `LocationInfo` elements can be present because one location
// may
// indicate the location of the scene in the image, and another
// location
// may indicate the location of the place where the image was
// taken.
// Location information is usually present for landmarks.
Locations []*LocationInfo `json:"locations,omitempty"`
// Mid: Opaque entity ID. Some IDs may be available in
// [Google Knowledge Graph Search
// API](https://developers.google.com/knowledge-graph/).
Mid string `json:"mid,omitempty"`
// Properties: Some entities may have optional user-supplied `Property`
// (name/value)
// fields, such a score or string that qualifies the entity.
Properties []*Property `json:"properties,omitempty"`
// Score: Overall score of the result. Range [0, 1].
Score float64 `json:"score,omitempty"`
// Topicality: The relevancy of the ICA (Image Content Annotation) label
// to the
// image. For example, the relevancy of "tower" is likely higher to an
// image
// containing the detected "Eiffel Tower" than to an image containing
// a
// detected distant towering building, even though the confidence
// that
// there is a tower in each image may be the same. Range [0, 1].
Topicality float64 `json:"topicality,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoundingPoly") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoundingPoly") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *EntityAnnotation) MarshalJSON() ([]byte, error) {
type noMethod EntityAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *EntityAnnotation) UnmarshalJSON(data []byte) error {
type noMethod EntityAnnotation
var s1 struct {
Confidence gensupport.JSONFloat64 `json:"confidence"`
Score gensupport.JSONFloat64 `json:"score"`
Topicality gensupport.JSONFloat64 `json:"topicality"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Confidence = float64(s1.Confidence)
s.Score = float64(s1.Score)
s.Topicality = float64(s1.Topicality)
return nil
}
// FaceAnnotation: A face annotation object contains the results of face
// detection.
type FaceAnnotation struct {
// AngerLikelihood: Anger likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
AngerLikelihood string `json:"angerLikelihood,omitempty"`
// BlurredLikelihood: Blurred likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
BlurredLikelihood string `json:"blurredLikelihood,omitempty"`
// BoundingPoly: The bounding polygon around the face. The coordinates
// of the bounding box
// are in the original image's scale, as returned in `ImageParams`.
// The bounding box is computed to "frame" the face in accordance with
// human
// expectations. It is based on the landmarker results.
// Note that one or more x and/or y coordinates may not be generated in
// the
// `BoundingPoly` (the polygon will be unbounded) if only a partial
// face
// appears in the image to be annotated.
BoundingPoly *BoundingPoly `json:"boundingPoly,omitempty"`
// DetectionConfidence: Detection confidence. Range [0, 1].
DetectionConfidence float64 `json:"detectionConfidence,omitempty"`
// FdBoundingPoly: The `fd_bounding_poly` bounding polygon is tighter
// than the
// `boundingPoly`, and encloses only the skin part of the face.
// Typically, it
// is used to eliminate the face from any image analysis that detects
// the
// "amount of skin" visible in an image. It is not based on
// the
// landmarker results, only on the initial face detection, hence
// the <code>fd</code> (face detection) prefix.
FdBoundingPoly *BoundingPoly `json:"fdBoundingPoly,omitempty"`
// HeadwearLikelihood: Headwear likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
HeadwearLikelihood string `json:"headwearLikelihood,omitempty"`
// JoyLikelihood: Joy likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
JoyLikelihood string `json:"joyLikelihood,omitempty"`
// LandmarkingConfidence: Face landmarking confidence. Range [0, 1].
LandmarkingConfidence float64 `json:"landmarkingConfidence,omitempty"`
// Landmarks: Detected face landmarks.
Landmarks []*Landmark `json:"landmarks,omitempty"`
// PanAngle: Yaw angle, which indicates the leftward/rightward angle
// that the face is
// pointing relative to the vertical plane perpendicular to the image.
// Range
// [-180,180].
PanAngle float64 `json:"panAngle,omitempty"`
// RollAngle: Roll angle, which indicates the amount of
// clockwise/anti-clockwise rotation
// of the face relative to the image vertical about the axis
// perpendicular to
// the face. Range [-180,180].
RollAngle float64 `json:"rollAngle,omitempty"`
// SorrowLikelihood: Sorrow likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
SorrowLikelihood string `json:"sorrowLikelihood,omitempty"`
// SurpriseLikelihood: Surprise likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
SurpriseLikelihood string `json:"surpriseLikelihood,omitempty"`
// TiltAngle: Pitch angle, which indicates the upwards/downwards angle
// that the face is
// pointing relative to the image's horizontal plane. Range [-180,180].
TiltAngle float64 `json:"tiltAngle,omitempty"`
// UnderExposedLikelihood: Under-exposed likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
UnderExposedLikelihood string `json:"underExposedLikelihood,omitempty"`
// ForceSendFields is a list of field names (e.g. "AngerLikelihood") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AngerLikelihood") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *FaceAnnotation) MarshalJSON() ([]byte, error) {
type noMethod FaceAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *FaceAnnotation) UnmarshalJSON(data []byte) error {
type noMethod FaceAnnotation
var s1 struct {
DetectionConfidence gensupport.JSONFloat64 `json:"detectionConfidence"`
LandmarkingConfidence gensupport.JSONFloat64 `json:"landmarkingConfidence"`
PanAngle gensupport.JSONFloat64 `json:"panAngle"`
RollAngle gensupport.JSONFloat64 `json:"rollAngle"`
TiltAngle gensupport.JSONFloat64 `json:"tiltAngle"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.DetectionConfidence = float64(s1.DetectionConfidence)
s.LandmarkingConfidence = float64(s1.LandmarkingConfidence)
s.PanAngle = float64(s1.PanAngle)
s.RollAngle = float64(s1.RollAngle)
s.TiltAngle = float64(s1.TiltAngle)
return nil
}
// Feature: Users describe the type of Google Cloud Vision API tasks to
// perform over
// images by using *Feature*s. Each Feature indicates a type of
// image
// detection task to perform. Features encode the Cloud Vision
// API
// vertical to operate on and the number of top-scoring results to
// return.
type Feature struct {
// MaxResults: Maximum number of results of this type.
MaxResults int64 `json:"maxResults,omitempty"`
// Type: The feature type.
//
// Possible values:
// "TYPE_UNSPECIFIED" - Unspecified feature type.
// "FACE_DETECTION" - Run face detection.
// "LANDMARK_DETECTION" - Run landmark detection.
// "LOGO_DETECTION" - Run logo detection.
// "LABEL_DETECTION" - Run label detection.
// "TEXT_DETECTION" - Run OCR.
// "DOCUMENT_TEXT_DETECTION" - Run dense text document OCR. Takes
// precedence when both
// DOCUMENT_TEXT_DETECTION and TEXT_DETECTION are present.
// "SAFE_SEARCH_DETECTION" - Run computer vision models to compute
// image safe-search properties.
// "IMAGE_PROPERTIES" - Compute a set of image properties, such as the
// image's dominant colors.
// "CROP_HINTS" - Run crop hints.
// "WEB_DETECTION" - Run web detection.
Type string `json:"type,omitempty"`
// ForceSendFields is a list of field names (e.g. "MaxResults") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "MaxResults") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Feature) MarshalJSON() ([]byte, error) {
type noMethod Feature
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Image: Client image to perform Google Cloud Vision API tasks over.
type Image struct {
// Content: Image content, represented as a stream of bytes.
// Note: as with all `bytes` fields, protobuffers use a pure
// binary
// representation, whereas JSON representations use base64.
Content string `json:"content,omitempty"`
// Source: Google Cloud Storage image location. If both `content` and
// `source`
// are provided for an image, `content` takes precedence and is
// used to perform the image annotation request.
Source *ImageSource `json:"source,omitempty"`
// ForceSendFields is a list of field names (e.g. "Content") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Content") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Image) MarshalJSON() ([]byte, error) {
type noMethod Image
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ImageContext: Image context and/or feature-specific parameters.
type ImageContext struct {
// CropHintsParams: Parameters for crop hints annotation request.
CropHintsParams *CropHintsParams `json:"cropHintsParams,omitempty"`
// LanguageHints: List of languages to use for TEXT_DETECTION. In most
// cases, an empty value
// yields the best results since it enables automatic language
// detection. For
// languages based on the Latin alphabet, setting `language_hints` is
// not
// needed. In rare cases, when the language of the text in the image is
// known,
// setting a hint will help get better results (although it will be
// a
// significant hindrance if the hint is wrong). Text detection returns
// an
// error if one or more of the specified languages is not one of
// the
// [supported languages](/vision/docs/languages).
LanguageHints []string `json:"languageHints,omitempty"`
// LatLongRect: lat/long rectangle that specifies the location of the
// image.
LatLongRect *LatLongRect `json:"latLongRect,omitempty"`
// ForceSendFields is a list of field names (e.g. "CropHintsParams") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CropHintsParams") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *ImageContext) MarshalJSON() ([]byte, error) {
type noMethod ImageContext
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ImageProperties: Stores image properties, such as dominant colors.
type ImageProperties struct {
// DominantColors: If present, dominant colors completed successfully.
DominantColors *DominantColorsAnnotation `json:"dominantColors,omitempty"`
// ForceSendFields is a list of field names (e.g. "DominantColors") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "DominantColors") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *ImageProperties) MarshalJSON() ([]byte, error) {
type noMethod ImageProperties
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ImageSource: External image source (Google Cloud Storage image
// location).
type ImageSource struct {
// GcsImageUri: NOTE: For new code `image_uri` below is
// preferred.
// Google Cloud Storage image URI, which must be in the following
// form:
// `gs://bucket_name/object_name` (for details, see
// [Google Cloud Storage
// Request
// URIs](https://cloud.google.com/storage/docs/reference-uris)).
//
// NOTE: Cloud Storage object versioning is not supported.
GcsImageUri string `json:"gcsImageUri,omitempty"`
// ImageUri: Image URI which supports:
// 1) Google Cloud Storage image URI, which must be in the following
// form:
// `gs://bucket_name/object_name` (for details, see
// [Google Cloud Storage
// Request
// URIs](https://cloud.google.com/storage/docs/reference-uris)).
//
// NOTE: Cloud Storage object versioning is not supported.
// 2) Publicly accessible image HTTP/HTTPS URL.
// This is preferred over the legacy `gcs_image_uri` above. When
// both
// `gcs_image_uri` and `image_uri` are specified, `image_uri`
// takes
// precedence.
ImageUri string `json:"imageUri,omitempty"`
// ForceSendFields is a list of field names (e.g. "GcsImageUri") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "GcsImageUri") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ImageSource) MarshalJSON() ([]byte, error) {
type noMethod ImageSource
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Landmark: A face-specific landmark (for example, a face
// feature).
// Landmark positions may fall outside the bounds of the image
// if the face is near one or more edges of the image.
// Therefore it is NOT guaranteed that `0 <= x < width` or
// `0 <= y < height`.
type Landmark struct {
// Position: Face landmark position.
Position *Position `json:"position,omitempty"`
// Type: Face landmark type.
//
// Possible values:
// "UNKNOWN_LANDMARK" - Unknown face landmark detected. Should not be
// filled.
// "LEFT_EYE" - Left eye.
// "RIGHT_EYE" - Right eye.
// "LEFT_OF_LEFT_EYEBROW" - Left of left eyebrow.
// "RIGHT_OF_LEFT_EYEBROW" - Right of left eyebrow.
// "LEFT_OF_RIGHT_EYEBROW" - Left of right eyebrow.
// "RIGHT_OF_RIGHT_EYEBROW" - Right of right eyebrow.
// "MIDPOINT_BETWEEN_EYES" - Midpoint between eyes.
// "NOSE_TIP" - Nose tip.
// "UPPER_LIP" - Upper lip.
// "LOWER_LIP" - Lower lip.
// "MOUTH_LEFT" - Mouth left.
// "MOUTH_RIGHT" - Mouth right.
// "MOUTH_CENTER" - Mouth center.
// "NOSE_BOTTOM_RIGHT" - Nose, bottom right.
// "NOSE_BOTTOM_LEFT" - Nose, bottom left.
// "NOSE_BOTTOM_CENTER" - Nose, bottom center.
// "LEFT_EYE_TOP_BOUNDARY" - Left eye, top boundary.
// "LEFT_EYE_RIGHT_CORNER" - Left eye, right corner.
// "LEFT_EYE_BOTTOM_BOUNDARY" - Left eye, bottom boundary.
// "LEFT_EYE_LEFT_CORNER" - Left eye, left corner.
// "RIGHT_EYE_TOP_BOUNDARY" - Right eye, top boundary.
// "RIGHT_EYE_RIGHT_CORNER" - Right eye, right corner.
// "RIGHT_EYE_BOTTOM_BOUNDARY" - Right eye, bottom boundary.
// "RIGHT_EYE_LEFT_CORNER" - Right eye, left corner.
// "LEFT_EYEBROW_UPPER_MIDPOINT" - Left eyebrow, upper midpoint.
// "RIGHT_EYEBROW_UPPER_MIDPOINT" - Right eyebrow, upper midpoint.
// "LEFT_EAR_TRAGION" - Left ear tragion.
// "RIGHT_EAR_TRAGION" - Right ear tragion.
// "LEFT_EYE_PUPIL" - Left eye pupil.
// "RIGHT_EYE_PUPIL" - Right eye pupil.
// "FOREHEAD_GLABELLA" - Forehead glabella.
// "CHIN_GNATHION" - Chin gnathion.
// "CHIN_LEFT_GONION" - Chin left gonion.
// "CHIN_RIGHT_GONION" - Chin right gonion.
Type string `json:"type,omitempty"`
// ForceSendFields is a list of field names (e.g. "Position") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Position") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Landmark) MarshalJSON() ([]byte, error) {
type noMethod Landmark
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// LatLng: An object representing a latitude/longitude pair. This is
// expressed as a pair
// of doubles representing degrees latitude and degrees longitude.
// Unless
// specified otherwise, this must conform to the
// <a
// href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
// st
// andard</a>. Values must be within normalized ranges.
//
// Example of normalization code in Python:
//
// def NormalizeLongitude(longitude):
// """Wraps decimal degrees longitude to [-180.0, 180.0]."""
// q, r = divmod(longitude, 360.0)
// if r > 180.0 or (r == 180.0 and q <= -1.0):
// return r - 360.0
// return r
//
// def NormalizeLatLng(latitude, longitude):
// """Wraps decimal degrees latitude and longitude to
// [-90.0, 90.0] and [-180.0, 180.0], respectively."""
// r = latitude % 360.0
// if r <= 90.0:
// return r, NormalizeLongitude(longitude)
// elif r >= 270.0:
// return r - 360, NormalizeLongitude(longitude)
// else:
// return 180 - r, NormalizeLongitude(longitude + 180.0)
//
// assert 180.0 == NormalizeLongitude(180.0)
// assert -180.0 == NormalizeLongitude(-180.0)
// assert -179.0 == NormalizeLongitude(181.0)
// assert (0.0, 0.0) == NormalizeLatLng(360.0, 0.0)
// assert (0.0, 0.0) == NormalizeLatLng(-360.0, 0.0)
// assert (85.0, 180.0) == NormalizeLatLng(95.0, 0.0)
// assert (-85.0, -170.0) == NormalizeLatLng(-95.0, 10.0)
// assert (90.0, 10.0) == NormalizeLatLng(90.0, 10.0)
// assert (-90.0, -10.0) == NormalizeLatLng(-90.0, -10.0)
// assert (0.0, -170.0) == NormalizeLatLng(-180.0, 10.0)
// assert (0.0, -170.0) == NormalizeLatLng(180.0, 10.0)
// assert (-90.0, 10.0) == NormalizeLatLng(270.0, 10.0)
// assert (90.0, 10.0) == NormalizeLatLng(-270.0, 10.0)
//
// The code in logs/storage/validator/logs_validator_traits.cc treats
// this type
// as if it were annotated as ST_LOCATION.
type LatLng struct {
// Latitude: The latitude in degrees. It must be in the range [-90.0,
// +90.0].
Latitude float64 `json:"latitude,omitempty"`
// Longitude: The longitude in degrees. It must be in the range [-180.0,
// +180.0].
Longitude float64 `json:"longitude,omitempty"`
// ForceSendFields is a list of field names (e.g. "Latitude") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Latitude") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *LatLng) MarshalJSON() ([]byte, error) {
type noMethod LatLng
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *LatLng) UnmarshalJSON(data []byte) error {
type noMethod LatLng
var s1 struct {
Latitude gensupport.JSONFloat64 `json:"latitude"`
Longitude gensupport.JSONFloat64 `json:"longitude"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Latitude = float64(s1.Latitude)
s.Longitude = float64(s1.Longitude)
return nil
}
// LatLongRect: Rectangle determined by min and max `LatLng` pairs.
type LatLongRect struct {
// MaxLatLng: Max lat/long pair.
MaxLatLng *LatLng `json:"maxLatLng,omitempty"`
// MinLatLng: Min lat/long pair.
MinLatLng *LatLng `json:"minLatLng,omitempty"`
// ForceSendFields is a list of field names (e.g. "MaxLatLng") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "MaxLatLng") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *LatLongRect) MarshalJSON() ([]byte, error) {
type noMethod LatLongRect
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// LocationInfo: Detected entity location information.
type LocationInfo struct {
// LatLng: lat/long location coordinates.
LatLng *LatLng `json:"latLng,omitempty"`
// ForceSendFields is a list of field names (e.g. "LatLng") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "LatLng") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *LocationInfo) MarshalJSON() ([]byte, error) {
type noMethod LocationInfo
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Page: Detected page from OCR.
type Page struct {
// Blocks: List of blocks of text, images etc on this page.
Blocks []*Block `json:"blocks,omitempty"`
// Height: Page height in pixels.
Height int64 `json:"height,omitempty"`
// Property: Additional information detected on the page.
Property *TextProperty `json:"property,omitempty"`
// Width: Page width in pixels.
Width int64 `json:"width,omitempty"`
// ForceSendFields is a list of field names (e.g. "Blocks") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Blocks") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Page) MarshalJSON() ([]byte, error) {
type noMethod Page
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Paragraph: Structural unit of text representing a number of words in
// certain order.
type Paragraph struct {
// BoundingBox: The bounding box for the paragraph.
// The vertices are in the order of top-left, top-right,
// bottom-right,
// bottom-left. When a rotation of the bounding box is detected the
// rotation
// is represented as around the top-left corner as defined when the text
// is
// read in the 'natural' orientation.
// For example:
// * when the text is horizontal it might look like:
// 0----1
// | |
// 3----2
// * when it's rotated 180 degrees around the top-left corner it
// becomes:
// 2----3
// | |
// 1----0
// and the vertice order will still be (0, 1, 2, 3).
BoundingBox *BoundingPoly `json:"boundingBox,omitempty"`
// Property: Additional information detected for the paragraph.
Property *TextProperty `json:"property,omitempty"`
// Words: List of words in this paragraph.
Words []*Word `json:"words,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoundingBox") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoundingBox") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Paragraph) MarshalJSON() ([]byte, error) {
type noMethod Paragraph
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Position: A 3D position in the image, used primarily for Face
// detection landmarks.
// A valid Position must have both x and y coordinates.
// The position coordinates are in the same scale as the original image.
type Position struct {
// X: X coordinate.
X float64 `json:"x,omitempty"`
// Y: Y coordinate.
Y float64 `json:"y,omitempty"`
// Z: Z coordinate (or depth).
Z float64 `json:"z,omitempty"`
// ForceSendFields is a list of field names (e.g. "X") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "X") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Position) MarshalJSON() ([]byte, error) {
type noMethod Position
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *Position) UnmarshalJSON(data []byte) error {
type noMethod Position
var s1 struct {
X gensupport.JSONFloat64 `json:"x"`
Y gensupport.JSONFloat64 `json:"y"`
Z gensupport.JSONFloat64 `json:"z"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.X = float64(s1.X)
s.Y = float64(s1.Y)
s.Z = float64(s1.Z)
return nil
}
// Property: A `Property` consists of a user-supplied name/value pair.
type Property struct {
// Name: Name of the property.
Name string `json:"name,omitempty"`
// Uint64Value: Value of numeric properties.
Uint64Value uint64 `json:"uint64Value,omitempty,string"`
// Value: Value of the property.
Value string `json:"value,omitempty"`
// ForceSendFields is a list of field names (e.g. "Name") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Name") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Property) MarshalJSON() ([]byte, error) {
type noMethod Property
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// SafeSearchAnnotation: Set of features pertaining to the image,
// computed by computer vision
// methods over safe-search verticals (for example, adult, spoof,
// medical,
// violence).
type SafeSearchAnnotation struct {
// Adult: Represents the adult content likelihood for the image.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
Adult string `json:"adult,omitempty"`
// Medical: Likelihood that this is a medical image.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
Medical string `json:"medical,omitempty"`
// Spoof: Spoof likelihood. The likelihood that an modification
// was made to the image's canonical version to make it appear
// funny or offensive.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
Spoof string `json:"spoof,omitempty"`
// Violence: Violence likelihood.
//
// Possible values:
// "UNKNOWN" - Unknown likelihood.
// "VERY_UNLIKELY" - It is very unlikely that the image belongs to the
// specified vertical.
// "UNLIKELY" - It is unlikely that the image belongs to the specified
// vertical.
// "POSSIBLE" - It is possible that the image belongs to the specified
// vertical.
// "LIKELY" - It is likely that the image belongs to the specified
// vertical.
// "VERY_LIKELY" - It is very likely that the image belongs to the
// specified vertical.
Violence string `json:"violence,omitempty"`
// ForceSendFields is a list of field names (e.g. "Adult") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Adult") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *SafeSearchAnnotation) MarshalJSON() ([]byte, error) {
type noMethod SafeSearchAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Status: The `Status` type defines a logical error model that is
// suitable for different
// programming environments, including REST APIs and RPC APIs. It is
// used by
// [gRPC](https://github.com/grpc). The error model is designed to
// be:
//
// - Simple to use and understand for most users
// - Flexible enough to meet unexpected needs
//
// # Overview
//
// The `Status` message contains three pieces of data: error code, error
// message,
// and error details. The error code should be an enum value
// of
// google.rpc.Code, but it may accept additional error codes if needed.
// The
// error message should be a developer-facing English message that
// helps
// developers *understand* and *resolve* the error. If a localized
// user-facing
// error message is needed, put the localized message in the error
// details or
// localize it in the client. The optional error details may contain
// arbitrary
// information about the error. There is a predefined set of error
// detail types
// in the package `google.rpc` which can be used for common error
// conditions.
//
// # Language mapping
//
// The `Status` message is the logical representation of the error
// model, but it
// is not necessarily the actual wire format. When the `Status` message
// is
// exposed in different client libraries and different wire protocols,
// it can be
// mapped differently. For example, it will likely be mapped to some
// exceptions
// in Java, but more likely mapped to some error codes in C.
//
// # Other uses
//
// The error model and the `Status` message can be used in a variety
// of
// environments, either with or without APIs, to provide a
// consistent developer experience across different
// environments.
//
// Example uses of this error model include:
//
// - Partial errors. If a service needs to return partial errors to the
// client,
// it may embed the `Status` in the normal response to indicate the
// partial
// errors.
//
// - Workflow errors. A typical workflow has multiple steps. Each step
// may
// have a `Status` message for error reporting purpose.
//
// - Batch operations. If a client uses batch request and batch
// response, the
// `Status` message should be used directly inside batch response,
// one for
// each error sub-response.
//
// - Asynchronous operations. If an API call embeds asynchronous
// operation
// results in its response, the status of those operations should
// be
// represented directly using the `Status` message.
//
// - Logging. If some API errors are stored in logs, the message
// `Status` could
// be used directly after any stripping needed for security/privacy
// reasons.
type Status struct {
// Code: The status code, which should be an enum value of
// google.rpc.Code.
Code int64 `json:"code,omitempty"`
// Details: A list of messages that carry the error details. There will
// be a
// common set of message types for APIs to use.
Details []googleapi.RawMessage `json:"details,omitempty"`
// Message: A developer-facing error message, which should be in
// English. Any
// user-facing error message should be localized and sent in
// the
// google.rpc.Status.details field, or localized by the client.
Message string `json:"message,omitempty"`
// ForceSendFields is a list of field names (e.g. "Code") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Code") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Status) MarshalJSON() ([]byte, error) {
type noMethod Status
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Symbol: A single symbol representation.
type Symbol struct {
// BoundingBox: The bounding box for the symbol.
// The vertices are in the order of top-left, top-right,
// bottom-right,
// bottom-left. When a rotation of the bounding box is detected the
// rotation
// is represented as around the top-left corner as defined when the text
// is
// read in the 'natural' orientation.
// For example:
// * when the text is horizontal it might look like:
// 0----1
// | |
// 3----2
// * when it's rotated 180 degrees around the top-left corner it
// becomes:
// 2----3
// | |
// 1----0
// and the vertice order will still be (0, 1, 2, 3).
BoundingBox *BoundingPoly `json:"boundingBox,omitempty"`
// Property: Additional information detected for the symbol.
Property *TextProperty `json:"property,omitempty"`
// Text: The actual UTF-8 representation of the symbol.
Text string `json:"text,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoundingBox") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoundingBox") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Symbol) MarshalJSON() ([]byte, error) {
type noMethod Symbol
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TextAnnotation: TextAnnotation contains a structured representation
// of OCR extracted text.
// The hierarchy of an OCR extracted text structure is like this:
// TextAnnotation -> Page -> Block -> Paragraph -> Word ->
// Symbol
// Each structural component, starting from Page, may further have their
// own
// properties. Properties describe detected languages, breaks etc..
// Please
// refer to the google.cloud.vision.v1.TextAnnotation.TextProperty
// message
// definition below for more detail.
type TextAnnotation struct {
// Pages: List of pages detected by OCR.
Pages []*Page `json:"pages,omitempty"`
// Text: UTF-8 text detected on the pages.
Text string `json:"text,omitempty"`
// ForceSendFields is a list of field names (e.g. "Pages") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Pages") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TextAnnotation) MarshalJSON() ([]byte, error) {
type noMethod TextAnnotation
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TextProperty: Additional information detected on the structural
// component.
type TextProperty struct {
// DetectedBreak: Detected start or end of a text segment.
DetectedBreak *DetectedBreak `json:"detectedBreak,omitempty"`
// DetectedLanguages: A list of detected languages together with
// confidence.
DetectedLanguages []*DetectedLanguage `json:"detectedLanguages,omitempty"`
// ForceSendFields is a list of field names (e.g. "DetectedBreak") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "DetectedBreak") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TextProperty) MarshalJSON() ([]byte, error) {
type noMethod TextProperty
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Vertex: A vertex represents a 2D point in the image.
// NOTE: the vertex coordinates are in the same scale as the original
// image.
type Vertex struct {
// X: X coordinate.
X int64 `json:"x,omitempty"`
// Y: Y coordinate.
Y int64 `json:"y,omitempty"`
// ForceSendFields is a list of field names (e.g. "X") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "X") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Vertex) MarshalJSON() ([]byte, error) {
type noMethod Vertex
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// WebDetection: Relevant information for the image from the Internet.
type WebDetection struct {
// FullMatchingImages: Fully matching images from the Internet.
// Can include resized copies of the query image.
FullMatchingImages []*WebImage `json:"fullMatchingImages,omitempty"`
// PagesWithMatchingImages: Web pages containing the matching images
// from the Internet.
PagesWithMatchingImages []*WebPage `json:"pagesWithMatchingImages,omitempty"`
// PartialMatchingImages: Partial matching images from the
// Internet.
// Those images are similar enough to share some key-point features.
// For
// example an original image will likely have partial matching for its
// crops.
PartialMatchingImages []*WebImage `json:"partialMatchingImages,omitempty"`
// VisuallySimilarImages: The visually similar image results.
VisuallySimilarImages []*WebImage `json:"visuallySimilarImages,omitempty"`
// WebEntities: Deduced entities from similar images on the Internet.
WebEntities []*WebEntity `json:"webEntities,omitempty"`
// ForceSendFields is a list of field names (e.g. "FullMatchingImages")
// to unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "FullMatchingImages") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *WebDetection) MarshalJSON() ([]byte, error) {
type noMethod WebDetection
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// WebEntity: Entity deduced from similar images on the Internet.
type WebEntity struct {
// Description: Canonical description of the entity, in English.
Description string `json:"description,omitempty"`
// EntityId: Opaque entity ID.
EntityId string `json:"entityId,omitempty"`
// Score: Overall relevancy score for the entity.
// Not normalized and not comparable across different image queries.
Score float64 `json:"score,omitempty"`
// ForceSendFields is a list of field names (e.g. "Description") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Description") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *WebEntity) MarshalJSON() ([]byte, error) {
type noMethod WebEntity
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *WebEntity) UnmarshalJSON(data []byte) error {
type noMethod WebEntity
var s1 struct {
Score gensupport.JSONFloat64 `json:"score"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Score = float64(s1.Score)
return nil
}
// WebImage: Metadata for online images.
type WebImage struct {
// Score: Overall relevancy score for the image.
// Not normalized and not comparable across different image queries.
Score float64 `json:"score,omitempty"`
// Url: The result image URL.
Url string `json:"url,omitempty"`
// ForceSendFields is a list of field names (e.g. "Score") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Score") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *WebImage) MarshalJSON() ([]byte, error) {
type noMethod WebImage
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *WebImage) UnmarshalJSON(data []byte) error {
type noMethod WebImage
var s1 struct {
Score gensupport.JSONFloat64 `json:"score"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Score = float64(s1.Score)
return nil
}
// WebPage: Metadata for web pages.
type WebPage struct {
// Score: Overall relevancy score for the web page.
// Not normalized and not comparable across different image queries.
Score float64 `json:"score,omitempty"`
// Url: The result web page URL.
Url string `json:"url,omitempty"`
// ForceSendFields is a list of field names (e.g. "Score") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Score") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *WebPage) MarshalJSON() ([]byte, error) {
type noMethod WebPage
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *WebPage) UnmarshalJSON(data []byte) error {
type noMethod WebPage
var s1 struct {
Score gensupport.JSONFloat64 `json:"score"`
*noMethod
}
s1.noMethod = (*noMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.Score = float64(s1.Score)
return nil
}
// Word: A word representation.
type Word struct {
// BoundingBox: The bounding box for the word.
// The vertices are in the order of top-left, top-right,
// bottom-right,
// bottom-left. When a rotation of the bounding box is detected the
// rotation
// is represented as around the top-left corner as defined when the text
// is
// read in the 'natural' orientation.
// For example:
// * when the text is horizontal it might look like:
// 0----1
// | |
// 3----2
// * when it's rotated 180 degrees around the top-left corner it
// becomes:
// 2----3
// | |
// 1----0
// and the vertice order will still be (0, 1, 2, 3).
BoundingBox *BoundingPoly `json:"boundingBox,omitempty"`
// Property: Additional information detected for the word.
Property *TextProperty `json:"property,omitempty"`
// Symbols: List of symbols in the word.
// The order of the symbols follows the natural reading order.
Symbols []*Symbol `json:"symbols,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoundingBox") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoundingBox") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Word) MarshalJSON() ([]byte, error) {
type noMethod Word
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// method id "vision.images.annotate":
type ImagesAnnotateCall struct {
s *Service
batchannotateimagesrequest *BatchAnnotateImagesRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Annotate: Run image detection and annotation for a batch of images.
func (r *ImagesService) Annotate(batchannotateimagesrequest *BatchAnnotateImagesRequest) *ImagesAnnotateCall {
c := &ImagesAnnotateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.batchannotateimagesrequest = batchannotateimagesrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ImagesAnnotateCall) Fields(s ...googleapi.Field) *ImagesAnnotateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ImagesAnnotateCall) Context(ctx context.Context) *ImagesAnnotateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ImagesAnnotateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ImagesAnnotateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.batchannotateimagesrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "v1/images:annotate")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "vision.images.annotate" call.
// Exactly one of *BatchAnnotateImagesResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *BatchAnnotateImagesResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ImagesAnnotateCall) Do(opts ...googleapi.CallOption) (*BatchAnnotateImagesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &BatchAnnotateImagesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Run image detection and annotation for a batch of images.",
// "flatPath": "v1/images:annotate",
// "httpMethod": "POST",
// "id": "vision.images.annotate",
// "parameterOrder": [],
// "parameters": {},
// "path": "v1/images:annotate",
// "request": {
// "$ref": "BatchAnnotateImagesRequest"
// },
// "response": {
// "$ref": "BatchAnnotateImagesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}<|fim▁end|>
|
// View and manage your data across Google Cloud Platform services
CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
)
|
<|file_name|>Workspace.js<|end_file_name|><|fim▁begin|>import { bindActionCreators } from 'redux';<|fim▁hole|>import Workspace from '../components/Workspace';
function mapStateToProps(state) {
return {
workspace: state.workspace
};
}
function mapDispatchToProps(dispatch) {
return bindActionCreators({ ...actionCreators, setActiveComponent }, dispatch);
}
export default connect(mapStateToProps, mapDispatchToProps)(Workspace);<|fim▁end|>
|
import { connect } from 'react-redux';
import * as actionCreators from '../actions/workspace';
import { setActiveComponent } from '../actions/FileSystemActions';
|
<|file_name|>sequences.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8; -*-
#
# @file sequences
# @brief collgate
# @author Frédéric SCHERMA (INRA UMR1095)
# @date 2018-01-09
# @copyright Copyright (c) 2018 INRA/CIRAD<|fim▁hole|>
def fixture(fixture_manager, factory_manager):
acc_seq = "CREATE SEQUENCE IF NOT EXISTS accession_naming_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1;"
bat_seq = "CREATE SEQUENCE IF NOT EXISTS batch_naming_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1;"
from django.db import connection
with connection.cursor() as cursor:
cursor.execute(acc_seq)
cursor.execute(bat_seq)<|fim▁end|>
|
# @license MIT (see LICENSE file)
# @details
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding:utf8 -*-
from setuptools import setup
import picuplib<|fim▁hole|>
setup(
name = 'picuplib',
packages = ['picuplib'],
version = picuplib.__version__,
description = 'Picflash upload library',
author = 'Arvedui',
author_email = '[email protected]',
url = 'https://github.com/Arvedui/picuplib',
install_requires=['requests', 'requests-toolbelt'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Software Development :: Libraries',
'Intended Audience :: Developers',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)<|fim▁end|>
| |
<|file_name|>update_doc_utils.py<|end_file_name|><|fim▁begin|>##############################################################################
# Name: misc/scripts/update_doc_utils.py
# Purpose: base utilities for others update_doc_*.py scripts
# Created: 2007-08-1
# RCS-ID: $Id$
# Copyright: (c) 2007 Francesco Montorsi
# Licence: wxWindows licence
##############################################################################
import sys, os, glob, distutils.file_util
DOCS_PATH="../../docs/latex/wx"
# Calls the given callback with the name of a documented class, its .tex related file,
# the content of that .tex file and the number of the line of the relative \class tag,
# for all documented class in DOCS_PATH. If the callback returns false the processing is stopped.
# Returns the number of .tex files processed.
def scanTexFiles(callback):
count = 0
for f in glob.glob(DOCS_PATH + '/*.tex'):
file = open(f, "r")
if not file:
print "could not open %s" % f
continue
print "opened file %s" % f
count = count + 1
# search \class tags<|fim▁hole|> content = file.readlines()
classdecl = 0
for i in range(len(content)):
line = content[i]
if "\class{" in line:
classdecl = classdecl + 1
# polish the class name
classname = line
classname = classname[classname.find("\class{"):]
classname = classname[classname.find("{")+1:classname.find("}")]
print " the class declared is named '%s'" % classname
# process this \class
if not callback(classname, f, content, i):
return count
print " file %s contains %d class declarations" % (f, classdecl)
return count<|fim▁end|>
| |
<|file_name|>run.py<|end_file_name|><|fim▁begin|><|fim▁hole|> p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
out = p.stdout.read().strip()
return out<|fim▁end|>
|
import subprocess
def runBash(cmd):
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import base64
import cPickle as pickle
from django.db import models<|fim▁hole|>from django.utils.hashcompat import md5_constructor
class SessionManager(models.Manager):
def encode(self, session_dict):
"""
Returns the given session dictionary pickled and encoded as a string.
"""
pickled = pickle.dumps(session_dict)
pickled_md5 = md5_constructor(pickled + settings.SECRET_KEY).hexdigest()
return base64.encodestring(pickled + pickled_md5)
def save(self, session_key, session_dict, expire_date):
s = self.model(session_key, self.encode(session_dict), expire_date)
if session_dict:
s.save()
else:
s.delete() # Clear sessions with no data.
return s
class Session(models.Model):
"""
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
The Django sessions framework is entirely cookie-based. It does
not fall back to putting session IDs in URLs. This is an intentional
design decision. Not only does that behavior make URLs ugly, it makes
your site vulnerable to session-ID theft via the "Referer" header.
For complete documentation on using Sessions in your code, consult
the sessions documentation that is shipped with Django (also available
on the Django website).
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expire date'))
objects = SessionManager()
class Meta:
db_table = 'django_session'
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
encoded_data = base64.decodestring(self.session_data)
pickled, tamper_check = encoded_data[:-32], encoded_data[-32:]
if md5_constructor(pickled + settings.SECRET_KEY).hexdigest() != tamper_check:
from django.core.exceptions import SuspiciousOperation
raise SuspiciousOperation("User tampered with session cookie.")
try:
return pickle.loads(pickled)
# Unpickling can cause a variety of exceptions. If something happens,
# just return an empty dictionary (an empty session).
except:
return {}<|fim▁end|>
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
|
<|file_name|>flag.py<|end_file_name|><|fim▁begin|>import sys
#Se le pasa la flag deseada, y devuelve lo que hay que escribir en el binario. CUIDADO CON LAS BACKSLASHES; hay que escaparlas
<|fim▁hole|> print "Syntax: python2 flag.py <FLAG>"
sys.exit(0)
flag = sys.argv[1]
i = 0
j = len(flag)-1
l = j
flag2 = ""
while (i<l+1):
if i <= l/2:
c = 7
else:
c = 10
flag2 += chr(ord(flag[j])+c)
i = i+1
j = j-1
print flag2<|fim▁end|>
|
if len(sys.argv) != 2:
|
<|file_name|>analyzer.py<|end_file_name|><|fim▁begin|>def get_related_fields(model):
pass
def get_table_size(model):<|fim▁hole|><|fim▁end|>
|
pass
def get_row_size(model):
pass
|
<|file_name|>mongodb.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
Collects all number values from the db.serverStatus() command, other
values are ignored.
#### Dependencies
* pymongo
"""
import diamond.collector
from diamond.collector import str_to_bool
import re
import zlib
try:
import pymongo
pymongo # workaround for pyflakes issue #13
except ImportError:
pymongo = None
try:
from pymongo import ReadPreference
ReadPreference # workaround for pyflakes issue #13
except ImportError:
ReadPreference = None
class MongoDBCollector(diamond.collector.Collector):
MAX_CRC32 = 4294967295
def __init__(self, *args, **kwargs):
self.__totals = {}
super(MongoDBCollector, self).__init__(*args, **kwargs)
def get_default_config_help(self):
config_help = super(MongoDBCollector, self).get_default_config_help()
config_help.update({
'hosts': 'Array of hostname(:port) elements to get metrics from'
'Set an alias by prefixing host:port with alias@',
'host': 'A single hostname(:port) to get metrics from'
' (can be used instead of hosts and overrides it)',
'user': 'Username for authenticated login (optional)',
'passwd': 'Password for authenticated login (optional)',
'databases': 'A regex of which databases to gather metrics for.'
' Defaults to all databases.',
'ignore_collections': 'A regex of which collections to ignore.'
' MapReduce temporary collections (tmp.mr.*)'
' are ignored by default.',
'collection_sample_rate': 'Only send stats for a consistent subset '
'of collections. This is applied after collections are ignored via'
' ignore_collections Sampling uses crc32 so it is consistent across'
' replicas. Value between 0 and 1. Default is 1',
'network_timeout': 'Timeout for mongodb connection (in seconds).'
' There is no timeout by default.',
'simple': 'Only collect the same metrics as mongostat.',
'translate_collections': 'Translate dot (.) to underscores (_)'
' in collection names.',
'ssl': 'True to enable SSL connections to the MongoDB server.'
' Default is False'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MongoDBCollector, self).get_default_config()
config.update({
'path': 'mongo',
'hosts': ['localhost'],
'user': None,
'passwd': None,
'databases': '.*',
'ignore_collections': '^tmp\.mr\.',
'network_timeout': None,
'simple': 'False',
'translate_collections': 'False',
'collection_sample_rate': 1,
'ssl': False
})
return config
def collect(self):
"""Collect number values from db.serverStatus()"""
if pymongo is None:
self.log.error('Unable to import pymongo')
return
# we need this for backwards compatibility
if 'host' in self.config:
self.config['hosts'] = [self.config['host']]
# convert network_timeout to integer
if self.config['network_timeout']:
self.config['network_timeout'] = int(
self.config['network_timeout'])
# convert collection_sample_rate to float
if self.config['collection_sample_rate']:
self.config['collection_sample_rate'] = float(
self.config['collection_sample_rate'])
# use auth if given
if 'user' in self.config:
user = self.config['user']
else:
user = None
if 'passwd' in self.config:
passwd = self.config['passwd']
else:
passwd = None
for host in self.config['hosts']:
if len(self.config['hosts']) == 1:
# one host only, no need to have a prefix
base_prefix = []
else:
matches = re.search('((.+)\@)?(.+)?', host)
alias = matches.group(2)
host = matches.group(3)
if alias is None:
base_prefix = [re.sub('[:\.]', '_', host)]
else:
base_prefix = [alias]
try:
# Ensure that the SSL option is a boolean.
if type(self.config['ssl']) is str:
self.config['ssl'] = str_to_bool(self.config['ssl'])
if ReadPreference is None:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
slave_okay=True
)
else:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
read_preference=ReadPreference.SECONDARY,
)
except Exception, e:
self.log.error('Couldnt connect to mongodb: %s', e)
continue
# try auth
if user:
try:
conn.admin.authenticate(user, passwd)
except Exception, e:
self.log.error('User auth given, but could not autheticate'
+ ' with host: %s, err: %s' % (host, e))
return{}
data = conn.db.command('serverStatus')
self._publish_transformed(data, base_prefix)
if str_to_bool(self.config['simple']):
data = self._extract_simple_data(data)
self._publish_dict_with_prefix(data, base_prefix)
db_name_filter = re.compile(self.config['databases'])
ignored_collections = re.compile(self.config['ignore_collections'])
sample_threshold = self.MAX_CRC32 * self.config[
'collection_sample_rate']
for db_name in conn.database_names():
if not db_name_filter.search(db_name):
continue
db_stats = conn[db_name].command('dbStats')
db_prefix = base_prefix + ['databases', db_name]
self._publish_dict_with_prefix(db_stats, db_prefix)
for collection_name in conn[db_name].collection_names():
if ignored_collections.search(collection_name):
continue
if (self.config['collection_sample_rate'] < 1 and (
zlib.crc32(collection_name) & 0xffffffff
) > sample_threshold):
continue
collection_stats = conn[db_name].command('collstats',
collection_name)
if str_to_bool(self.config['translate_collections']):
collection_name = collection_name.replace('.', '_')
collection_prefix = db_prefix + [collection_name]
self._publish_dict_with_prefix(collection_stats,
collection_prefix)
def _publish_transformed(self, data, base_prefix):
""" Publish values of type: counter or percent """
self._publish_dict_with_prefix(data.get('opcounters', {}),
base_prefix + ['opcounters_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('opcountersRepl', {}),
base_prefix + ['opcountersRepl_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['backgroundFlushing_per_sec'],
'flushes',
data.get('backgroundFlushing', {}),
self.publish_counter)
self._publish_dict_with_prefix(data.get('network', {}),
base_prefix + ['network_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['extra_info_per_sec'],
'page_faults',
data.get('extra_info', {}),
self.publish_counter)
def get_dotted_value(data, key_name):
key_name = key_name.split('.')
for i in key_name:
data = data.get(i, {})
if not data:
return 0
return data
def compute_interval(data, total_name):
current_total = get_dotted_value(data, total_name)
total_key = '.'.join(base_prefix + [total_name])
last_total = self.__totals.get(total_key, current_total)
interval = current_total - last_total
self.__totals[total_key] = current_total
return interval
def publish_percent(value_name, total_name, data):<|fim▁hole|> value = float(get_dotted_value(data, value_name) * 100)
interval = compute_interval(data, total_name)
key = '.'.join(base_prefix + ['percent', value_name])
self.publish_counter(key, value, time_delta=bool(interval),
interval=interval)
publish_percent('globalLock.lockTime', 'globalLock.totalTime', data)
publish_percent('indexCounters.btree.misses',
'indexCounters.btree.accesses', data)
locks = data.get('locks')
if locks:
if '.' in locks:
locks['_global_'] = locks['.']
del (locks['.'])
key_prefix = '.'.join(base_prefix + ['percent'])
db_name_filter = re.compile(self.config['databases'])
interval = compute_interval(data, 'uptimeMillis')
for db_name in locks:
if not db_name_filter.search(db_name):
continue
r = get_dotted_value(
locks,
'%s.timeLockedMicros.r' % db_name)
R = get_dotted_value(
locks,
'.%s.timeLockedMicros.R' % db_name)
value = float(r + R) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.read' % db_name,
value, time_delta=bool(interval),
interval=interval)
w = get_dotted_value(
locks,
'%s.timeLockedMicros.w' % db_name)
W = get_dotted_value(
locks,
'%s.timeLockedMicros.W' % db_name)
value = float(w + W) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.write' % db_name,
value, time_delta=bool(interval), interval=interval)
def _publish_dict_with_prefix(self, dict, prefix, publishfn=None):
for key in dict:
self._publish_metrics(prefix, key, dict, publishfn)
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if not key in data:
return
value = data[key]
keys = prev_keys + [key]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(keys, new_key, value)
elif isinstance(value, int) or isinstance(value, float):
publishfn('.'.join(keys), value)
elif isinstance(value, long):
publishfn('.'.join(keys), float(value))
def _extract_simple_data(self, data):
return {
'connections': data.get('connections'),
'globalLock': data.get('globalLock'),
'indexCounters': data.get('indexCounters')
}<|fim▁end|>
| |
<|file_name|>gerritapi.rs<|end_file_name|><|fim▁begin|>use clap::{self, SubCommand, App, Arg};
use libgerrit::error::GGRResult;
use libgerrit::gerrit::Gerrit;
use config;
use libgerrit::entities;
pub fn menu<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("gerritapi")
.about("Gerrit API interface (Only fo API tests)")
.subcommand(SubCommand::with_name("changes")
.about("Change endpoint")
.subcommand(SubCommand::with_name("create")
.about("Create a change")
.arg(Arg::with_name("project")
.required(true)
.takes_value(true)
.long("project")
.short("p")
.help("The name of the project")
)
.arg(Arg::with_name("branch")
.required(true)
.takes_value(true)
.long("branch")
.short("b")
.help("The name of the target branch. The 'refs/heads/' prefix is omitted.")<|fim▁hole|> )
.arg(Arg::with_name("subject")
.required(true)
.takes_value(true)
.long("subject")
.short("s")
.help("The subject of the change (header line of the commit message).")
)
)
.subcommand(SubCommand::with_name("query")
.about("query changes")
.arg(Arg::with_name("query")
.required(true)
.takes_value(true)
.long("query")
.short("q")
.help("Query string")
)
)
.subcommand(SubCommand::with_name("listreviewers")
.about("List reviewers for a {change-id}")
.arg(Arg::with_name("changeid")
.required(true)
.takes_value(true)
.help("receive reviewer list from this {change-id}")
.index(1)
)
)
.subcommand(SubCommand::with_name("abandonchange")
.about("Abandon a change")
.arg(Arg::with_name("changeid")
.required(true)
.takes_value(true)
.help("The change id which should abandoned")
.index(1)
)
.arg(Arg::with_name("message")
.long("message")
.short("m")
.help("Abandon message")
.takes_value(true)
)
.arg(Arg::with_name("notify")
.long("notify")
.short("n")
.help("Notification hint (only v2.13). defaullt is 'none'")
.takes_value(true)
.possible_values(&["all", "none", "owner", "owner_reviewer"])
.default_value("none")
)
)
)
.subcommand(SubCommand::with_name("config")
.about("Config endpoint")
.arg(Arg::with_name("version")
.short("V")
.help("gerrit server version")
)
)
}
pub fn manage(x: &clap::ArgMatches, config: &config::Config) -> GGRResult<()> {
match x.subcommand() {
("changes", Some(y)) => { changes(y, config) },
("config", Some(y)) => { configs(y, config) },
_ => {
println!("{}", x.usage());
Ok(())
},
}
}
fn configs(y: &clap::ArgMatches, config: &config::Config) -> GGRResult<()> {
let mut gerrit = Gerrit::new(config.get_base_url());
if y.is_present("version") {
match gerrit.config().get_version() {
Ok(version) => println!("version: {:?}", version),
Err(x) => println!("Error: {:?}", x),
}
}
Ok(())
}
fn changes(y: &clap::ArgMatches, config: &config::Config) -> GGRResult<()> {
let mut gerrit = Gerrit::new(config.get_base_url());
match y.subcommand() {
("create", Some(opt)) => {
let project = opt.value_of("project").unwrap().into();
let branch = opt.value_of("branch").unwrap().into();
let subject = opt.value_of("subject").unwrap().into();
let ci = entities::ChangeInput {
project: project,
branch: branch,
subject: subject,
base_change: None,
merge: None,
new_branch: None,
status: None,
topic: None,
};
match gerrit.changes().create_change(&ci) {
Ok(changeinfo) => {
println!("Change created! Returned data");
println!("{:?}", changeinfo);
},
Err(x) => {
println!("Error: {:?}", x);
}
}
},
("query", Some(opt)) => {
let query = opt.value_of("query").unwrap();
match gerrit.changes().query_changes(Some(vec!(query)), None) {
Ok(cis) => {
for i in cis {
println!("* {:?}", i);
}
},
Err(x) => {
println!("Error: {:?}", x);
}
}
},
("listreviewers", Some(opt)) => {
let changeid = opt.value_of("changeid").unwrap();
match gerrit.changes().get_reviewers(changeid) {
Ok(reviewers) => {
for reviewer in reviewers {
println!("* {:?}", reviewer);
}
},
Err(x) => {
println!("Error: {:?}", x);
},
}
},
("abandonchange", Some(opt)) => {
let changeid = opt.value_of("changeid").unwrap();
let message = opt.value_of("message");
let notify = opt.value_of("notify");
match gerrit.changes().abandon_change(changeid, message, notify) {
Ok(ci) => {
println!("* {:?}", ci);
},
Err(x) => println!("Error: {:?}", x),
};
},
e => {
println!("unknown subcommand {}", e.0);
println!("{}", y.usage());
}
}
Ok(())
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# MIT license
#<|fim▁hole|># in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__author__ = 'XESS Corporation'
__email__ = '[email protected]'
from .distributor import distributor_class
# Export the ORDER_COL_USERFIELDS content
from .distributors_info import ORDER_COL_USERFIELDS # noqa: F401
# Import and register here the API / local / scrape modules.
from .dist_local_template import dist_local_template # noqa: F401
from .api_octopart import api_octopart # noqa: F401
from .api_partinfo_kitspace import api_partinfo_kitspace # noqa: F401
#
# Some wrappers
#
def init_distributor_dict():
distributor_class.init_dist_dict()
def get_dist_parts_info(parts, dist_list, currency):
distributor_class.get_dist_parts_info(parts, dist_list, currency)
def get_registered_apis():
return distributor_class.registered
def get_distributors_list():
''' List of distributors registered by the API modules '''
return list(distributor_class.get_distributors_iter())
def get_distributors_iter():
''' Iterator for the distributors registered by the API modules '''
return distributor_class.get_distributors_iter()
def get_distributor_info(name):
''' Gets all the information about a supported distributor.
This information comes from the list collected from the APIs, not from the fixed template. '''
return distributor_class.get_distributor_info(name)
def get_dist_name_from_label(label):
''' Returns the internal distributor name for a provided label. '''
return distributor_class.label2name.get(label.lower())
def set_distributors_logger(logger):
''' Sets the logger used by the class '''
distributor_class.logger = logger
def set_distributors_progress(cls):
''' Configures the class used to indicate progress '''
distributor_class.progress = cls
def set_api_options(api, **kwargs):
''' Configure an API (by name) '''
distributor_class.set_api_options(api, **kwargs)
def set_api_status(api, enabled):
''' Enable/Disable a particular API '''
distributor_class.set_api_status(api, enabled)
def get_api_status(api):
''' Find if an API is enabled '''
return distributor_class.get_api_status(api)
# Init distributor dict during import.
init_distributor_dict()<|fim▁end|>
|
# Copyright (C) 2018 by XESS Corporation / Hildo Guillardi Júnior
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>import { StemLeafPlot } from "../../";<|fim▁hole|><|fim▁end|>
|
export = StemLeafPlot;
|
<|file_name|>course.py<|end_file_name|><|fim▁begin|>''' -- imports from python libraries -- '''
# from datetime import datetime
import datetime
import json
''' -- imports from installed packages -- '''
from django.http import HttpResponseRedirect # , HttpResponse uncomment when to use
from django.http import HttpResponse
from django.http import Http404
from django.shortcuts import render_to_response # , render uncomment when to use
from django.template import RequestContext
from django.template import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
''' -- imports from application folders/files -- '''
from gnowsys_ndf.settings import GAPPS, MEDIA_ROOT, GSTUDIO_TASK_TYPES
from gnowsys_ndf.ndf.models import NodeJSONEncoder
from gnowsys_ndf.ndf.models import Node, AttributeType, RelationType
from gnowsys_ndf.ndf.models import node_collection, triple_collection
from gnowsys_ndf.ndf.views.file import save_file
from gnowsys_ndf.ndf.templatetags.ndf_tags import edit_drawer_widget
from gnowsys_ndf.ndf.views.methods import get_node_common_fields, parse_template_data, get_execution_time, delete_node
from gnowsys_ndf.ndf.views.notify import set_notif_val
from gnowsys_ndf.ndf.views.methods import get_property_order_with_value
from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, create_task
GST_COURSE = node_collection.one({'_type': "GSystemType", 'name': GAPPS[7]})
app = GST_COURSE
# @login_required
@get_execution_time
def course(request, group_id, course_id=None):
"""
* Renders a list of all 'courses' available within the database.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
if course_id is None:
course_ins = node_collection.find_one({'_type': "GSystemType", "name": "Course"})
if course_ins:
course_id = str(course_ins._id)
if request.method == "POST":
# Course search view
title = GST_COURSE.name
search_field = request.POST['search_field']
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(GST_COURSE._id)]},
'$or': [
{'$and': [
{'name': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
},
{'$and': [
{'tags': {'$regex': search_field, '$options': 'i'}},
{'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [{'access_policy': u"PRIVATE"}, {'created_by': request.user.id}]}
]
}
]
}
],
'group_set': {'$all': [ObjectId(group_id)]}
}).sort('last_update', -1)
# course_nodes_count = course_coll.count()
return render_to_response("ndf/course.html",
{'title': title,
'appId': app._id,
'searching': True, 'query': search_field,
'course_coll': course_coll, 'groupid': group_id, 'group_id':group_id
},
context_instance=RequestContext(request)
)
else:
# Course list view
title = GST_COURSE.name
course_coll = node_collection.find({'member_of': {'$all': [ObjectId(course_id)]},
'group_set': {'$all': [ObjectId(group_id)]},
'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [
{'access_policy': u"PRIVATE"},
{'created_by': request.user.id}
]
}
]
})
template = "ndf/course.html"
variable = RequestContext(request, {'title': title, 'course_nodes_count': course_coll.count(), 'course_coll': course_coll, 'groupid':group_id, 'appId':app._id, 'group_id':group_id})
return render_to_response(template, variable)
@login_required
@get_execution_time
def create_edit(request, group_id, node_id=None):
"""Creates/Modifies details about the given quiz-item.
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
context_variables = {'title': GST_COURSE.name,
'group_id': group_id,
'groupid': group_id
}
if node_id:
course_node = node_collection.one({'_type': u'GSystem', '_id': ObjectId(node_id)})
else:
course_node = node_collection.collection.GSystem()
available_nodes = node_collection.find({'_type': u'GSystem', 'member_of': ObjectId(GST_COURSE._id),'group_set': ObjectId(group_id) })
nodes_list = []
for each in available_nodes:
nodes_list.append(str((each.name).strip().lower()))
if request.method == "POST":
# get_node_common_fields(request, course_node, group_id, GST_COURSE)
course_node.save(is_changed=get_node_common_fields(request, course_node, group_id, GST_COURSE))
return HttpResponseRedirect(reverse('course', kwargs={'group_id': group_id}))
else:
if node_id:
context_variables['node'] = course_node
context_variables['groupid'] = group_id
context_variables['group_id'] = group_id
context_variables['appId'] = app._id
context_variables['nodes_list'] = json.dumps(nodes_list)
return render_to_response("ndf/course_create_edit.html",
context_variables,
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_detail(request, group_id, _id):
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
course_structure_exists = False
title = GST_COURSE.name
course_node = node_collection.one({"_id": ObjectId(_id)})
if course_node.collection_set:
course_structure_exists = True
return render_to_response("ndf/course_detail.html",
{'node': course_node,
'groupid': group_id,
'group_id': group_id,
'appId': app._id,
'title':title,
'course_structure_exists': course_structure_exists
},
context_instance=RequestContext(request)
)
@login_required
@get_execution_time
def course_create_edit(request, group_id, app_id, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Creates/Modifies document of given sub-types of Course(s).
"""
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
mis_admin = None
property_order_list = []
template = ""
template_prefix = "mis"
if request.user:
if auth is None:
auth = node_collection.one({
'_type': 'Author', 'name': unicode(request.user.username)
})
agency_type = auth.agency_type
agency_type_node = node_collection.one({
'_type': "GSystemType", 'name': agency_type
}, {
'collection_set': 1
})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(
node_collection.one({
"_id": eachset
}, {
'_id': 1, 'name': 1, 'type_of': 1
})
)
if app_set_id:
course_gst = node_collection.one({
'_type': "GSystemType", '_id': ObjectId(app_set_id)
}, {
'name': 1, 'type_of': 1
})
template = "ndf/" + course_gst.name.strip().lower().replace(' ', '_') \
+ "_create_edit.html"
title = course_gst.name
if app_set_instance_id:
course_gs = node_collection.one({
'_type': "GSystem", '_id': ObjectId(app_set_instance_id)
})
else:
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
property_order_list = get_property_order_with_value(course_gs)
if request.method == "POST":
# [A] Save course-node's base-field(s)
start_time = ""
if "start_time" in request.POST:
start_time = request.POST.get("start_time", "")
start_time = datetime.datetime.strptime(start_time, "%m/%Y")
end_time = ""
if "end_time" in request.POST:
end_time = request.POST.get("end_time", "")
end_time = datetime.datetime.strptime(end_time, "%m/%Y")
nussd_course_type = ""
if "nussd_course_type" in request.POST:
nussd_course_type = request.POST.get("nussd_course_type", "")
nussd_course_type = unicode(nussd_course_type)
unset_ac_options = []
if "unset-ac-options" in request.POST:
unset_ac_options = request.POST.getlist("unset-ac-options")
else:
# Just to execute loop at least once for Course Sub-Types
# other than 'Announced Course'
unset_ac_options = ["dummy"]
if course_gst.name == u"Announced Course":
announce_to_colg_list = request.POST.get(
"announce_to_colg_list", ""
)
announce_to_colg_list = [ObjectId(colg_id) for colg_id in announce_to_colg_list.split(",")]
colg_ids = []
# Parsing ObjectId -- from string format to ObjectId
for each in announce_to_colg_list:
if each and ObjectId.is_valid(each):
colg_ids.append(ObjectId(each))
# Fetching college(s)
colg_list_cur = node_collection.find({
'_id': {'$in': colg_ids}
}, {
'name': 1, 'attribute_set.enrollment_code': 1
})
if "_id" in course_gs:
# It means we are in editing mode of given Announced Course GSystem
unset_ac_options = [course_gs._id]
ac_nc_code_list = []
# Prepare a list
# 0th index (ac_node): Announced Course node,
# 1st index (nc_id): NUSSD Course node's ObjectId,
# 2nd index (nc_course_code): NUSSD Course's code
for cid in unset_ac_options:
ac_node = None
nc_id = None
nc_course_code = ""
# Here course_gst is Announced Course GSytemType's node
ac_node = node_collection.one({
'_id': ObjectId(cid), 'member_of': course_gst._id
})
# If ac_node found, means
# (1) we are dealing with creating Announced Course
# else,
# (2) we are in editing phase of Announced Course
course_node = None
if not ac_node:
# In this case, cid is of NUSSD Course GSystem
# So fetch that to extract course_code
# Set to nc_id
ac_node = None
course_node = node_collection.one({
'_id': ObjectId(cid)
})
else:
# In this case, fetch NUSSD Course from
# Announced Course GSystem's announced_for relationship
for rel in ac_node.relation_set:
if "announced_for" in rel:
course_node_ids = rel["announced_for"]
break
# Fetch NUSSD Course GSystem
if course_node_ids:
course_node = node_collection.find_one({
"_id": {"$in": course_node_ids}
})
# If course_code doesn't exists then
# set NUSSD Course GSystem's name as course_code
if course_node:
nc_id = course_node._id
for attr in course_node.attribute_set:
if "course_code" in attr:
nc_course_code = attr["course_code"]
break
if not nc_course_code:
nc_course_code = course_node.name.replace(" ", "-")
# Append to ac_nc_code_list
ac_nc_code_list.append([ac_node, nc_id, nc_course_code])
# For each selected college
# Create Announced Course GSystem
for college_node in colg_list_cur:
# Fetch Enrollment code from "enrollment_code" (Attribute)
college_enrollment_code = ""
if college_node:
for attr in college_node.attribute_set:
if attr and "enrollment_code" in attr:
college_enrollment_code = attr["enrollment_code"]
break
ann_course_id_list = []
# For each selected course to Announce
for ac_nc_code in ac_nc_code_list:
course_gs = ac_nc_code[0]
nc_id = ac_nc_code[1]
nc_course_code = ac_nc_code[2]
if not course_gs:
# Create new Announced Course GSystem
course_gs = node_collection.collection.GSystem()
course_gs.member_of.append(course_gst._id)
# Prepare name for Announced Course GSystem
c_name = unicode(
nc_course_code + "_" + college_enrollment_code + "_"
+ start_time.strftime("%b_%Y") + "-"
+ end_time.strftime("%b_%Y")
)
request.POST["name"] = c_name
is_changed = get_node_common_fields(
request, course_gs, group_id, course_gst
)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({
'_id': field_set['_id']
})
field_instance_type = type(field_instance)
if (field_instance_type in
[AttributeType, RelationType]):
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
if field_instance["name"] in ["start_time", "end_time"]:
# Course Duration
field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(course_gs._id, node_collection.collection.AttributeType(field_instance), field_value)
else:
# i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(nc_id)
# Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
# Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(course_gs._id, node_collection.collection.RelationType(field_instance), field_value)
ann_course_id_list.append(course_gs._id)
else:
is_changed = get_node_common_fields(request, course_gs, group_id, course_gst)
if is_changed:
# Remove this when publish button is setup on interface
course_gs.status = u"PUBLISHED"
course_gs.save(is_changed=is_changed)
# [B] Store AT and/or RT field(s) of given course-node
for tab_details in property_order_list:
for field_set in tab_details[1]:
# Fetch only Attribute field(s) / Relation field(s)
if '_id' in field_set:
field_instance = node_collection.one({'_id': field_set['_id']})
field_instance_type = type(field_instance)
if field_instance_type in [AttributeType, RelationType]:
field_data_type = field_set['data_type']
# Fetch field's value depending upon AT/RT
# and Parse fetched-value depending upon
# that field's data-type
if field_instance_type == AttributeType:
if "File" in field_instance["validators"]:
# Special case: AttributeTypes that require file instance as it's value in which case file document's ObjectId is used
if field_instance["name"] in request.FILES:
field_value = request.FILES[field_instance["name"]]
else:
field_value = ""
# Below 0th index is used because that function returns tuple(ObjectId, bool-value)
if field_value != '' and field_value != u'':
file_name = course_gs.name + " -- " + field_instance["altnames"]
content_org = ""
tags = ""
field_value = save_file(field_value, file_name, request.user.id, group_id, content_org, tags, oid=True)[0]
else:
# Other AttributeTypes
field_value = request.POST.get(field_instance["name"], "")
# if field_instance["name"] in ["start_time","end_time"]:
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%m/%Y")
# elif field_instance["name"] in ["start_enroll", "end_enroll"]: #Student Enrollment DUration
# field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y")
if field_instance["name"] in ["mast_tr_qualifications", "voln_tr_qualifications"]:
# Needs sepcial kind of parsing
field_value = []
tr_qualifications = request.POST.get(field_instance["name"], '')
if tr_qualifications:
qualifications_dict = {}
tr_qualifications = [qual.strip() for qual in tr_qualifications.split(",")]
for i, qual in enumerate(tr_qualifications):
if (i % 2) == 0:
if qual == "true":
qualifications_dict["mandatory"] = True
elif qual == "false":
qualifications_dict["mandatory"] = False
else:
qualifications_dict["text"] = unicode(qual)
field_value.append(qualifications_dict)
qualifications_dict = {}
elif field_instance["name"] in ["max_marks", "min_marks"]:
# Needed because both these fields' values are dependent upon evaluation_type field's value
evaluation_type = request.POST.get("evaluation_type", "")
if evaluation_type == u"Continuous":
field_value = None
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
else:
field_value = parse_template_data(field_data_type, field_value, date_format_string="%d/%m/%Y %H:%M")
course_gs_triple_instance = create_gattribute(
course_gs._id,
node_collection.collection.AttributeType(field_instance),
field_value
)
else:
#i.e if field_instance_type == RelationType
if field_instance["name"] == "announced_for":
field_value = ObjectId(cid)
#Pass ObjectId of selected Course
elif field_instance["name"] == "acourse_for_college":
field_value = college_node._id
#Pass ObjectId of selected College
course_gs_triple_instance = create_grelation(
course_gs._id,
node_collection.collection.RelationType(field_instance),
field_value
)
return HttpResponseRedirect(
reverse(
app_name.lower() + ":" + template_prefix + '_app_detail',
kwargs={
'group_id': group_id, "app_id": app_id,
"app_set_id": app_set_id
}
)
)
univ = node_collection.one({
'_type': "GSystemType", 'name': "University"
}, {
'_id': 1
})
university_cur = None
if not mis_admin:
mis_admin = node_collection.one(
{'_type': "Group", 'name': "MIS_admin"},
{'_id': 1, 'name': 1, 'group_admin': 1}
)
if univ and mis_admin:
university_cur = node_collection.find(
{'member_of': univ._id, 'group_set': mis_admin._id},
{'name': 1}
).sort('name', 1)
default_template = "ndf/course_create_edit.html"
context_variables = {
'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name,
'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'title': title,
'university_cur': university_cur,
'property_order_list': property_order_list
}
if app_set_instance_id:
course_gs.get_neighbourhood(course_gs.member_of)
context_variables['node'] = course_gs
if "Announced Course" in course_gs.member_of_names_list:
for attr in course_gs.attribute_set:
if attr:
for eachk, eachv in attr.items():
context_variables[eachk] = eachv
for rel in course_gs.relation_set:
if rel:
for eachk, eachv in rel.items():
if eachv:
get_node_name = node_collection.one({'_id': eachv[0]})
context_variables[eachk] = get_node_name.name
try:
return render_to_response(
[template, default_template],
context_variables, context_instance=RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseCreateEditViewError: This html template (" \
+ str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseCreateEditViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def mis_course_detail(request, group_id, app_id=None, app_set_id=None, app_set_instance_id=None, app_name=None):
"""
Detail view of NUSSD Course/ Announced Course
"""
# print "\n Found course_detail n gone inn this...\n\n"
auth = None
if ObjectId.is_valid(group_id) is False:
group_ins = node_collection.one({'_type': "Group", "name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app = None
if app_id is None:
app = node_collection.one({'_type': "GSystemType", 'name': app_name})
if app:
app_id = str(app._id)
else:
app = node_collection.one({'_id': ObjectId(app_id)})
app_name = app.name
# app_name = "mis"
app_set = ""
app_collection_set = []
title = ""
course_gst = None
course_gs = None
nodes = None
node = None
property_order_list = []
property_order_list_ac = []
is_link_needed = True # This is required to show Link button on interface that link's Student's/VoluntaryTeacher's node with it's corresponding Author node
template_prefix = "mis"
context_variables = {}
#Course structure collection _dict
course_collection_dict = {}
course_collection_list = []
course_structure_exists = False
if request.user:
if auth is None:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username)})
if auth:
agency_type = auth.agency_type
agency_type_node = node_collection.one({'_type': "GSystemType", 'name': agency_type}, {'collection_set': 1})
if agency_type_node:
for eachset in agency_type_node.collection_set:
app_collection_set.append(node_collection.one({"_id": eachset}, {'_id': 1, 'name': 1, 'type_of': 1}))
if app_set_id:
course_gst = node_collection.one({'_type': "GSystemType", '_id': ObjectId(app_set_id)}, {'name': 1, 'type_of': 1})
title = course_gst.name
template = "ndf/course_list.html"
if request.method == "POST":
search = request.POST.get("search", "")
classtype = request.POST.get("class", "")
# nodes = list(node_collection.find({'name':{'$regex':search, '$options': 'i'},'member_of': {'$all': [course_gst._id]}}))
nodes = node_collection.find({'member_of': course_gst._id, 'name': {'$regex': search, '$options': 'i'}})
else:
nodes = node_collection.find({'member_of': course_gst._id, 'group_set': ObjectId(group_id)})
if app_set_instance_id:
template = "ndf/course_details.html"
node = node_collection.one({'_type': "GSystem", '_id': ObjectId(app_set_instance_id)})
property_order_list = get_property_order_with_value(node)
node.get_neighbourhood(node.member_of)
if title == u"Announced Course":
property_order_list_ac = node.attribute_set
# Course structure as list of dicts
if node.collection_set:
course_structure_exists = True
context_variables = { 'groupid': group_id, 'group_id': group_id,
'app_id': app_id, 'app_name': app_name, 'app_collection_set': app_collection_set,
'app_set_id': app_set_id,
'course_gst_name': course_gst.name,
'title': title,
'course_structure_exists': course_structure_exists,
'nodes': nodes, 'node': node,
'property_order_list': property_order_list,
'property_order_list_ac': property_order_list_ac,
'is_link_needed': is_link_needed
}
try:
# print "\n template-list: ", [template, default_template]
# template = "ndf/fgh.html"
# default_template = "ndf/dsfjhk.html"
# return render_to_response([template, default_template],
return render_to_response(template,
context_variables,
context_instance = RequestContext(request)
)
except TemplateDoesNotExist as tde:
error_message = "\n CourseDetailListViewError: This html template (" + str(tde) + ") does not exists !!!\n"
raise Http404(error_message)
except Exception as e:
error_message = "\n CourseDetailListViewError: " + str(e) + " !!!\n"
raise Exception(error_message)
@login_required
@get_execution_time
def create_course_struct(request, group_id, node_id):
"""
This view is to create the structure of the Course.
A Course holds CourseSection, which further holds CourseSubSection
in their respective collection_set.
A tree depiction to this is as follows:
Course Name:
1. CourseSection1
1.1. CourseSubSection1
1.2. CourseSubSection2
2. CourseSection2
2.1. CourseSubSection3
"""
ins_objectid = ObjectId()
if ins_objectid.is_valid(group_id) is False:
group_ins = node_collection.find_one({'_type': "Group","name": group_id})
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if group_ins:
group_id = str(group_ins._id)
else:
auth = node_collection.one({'_type': 'Author', 'name': unicode(request.user.username) })
if auth:
group_id = str(auth._id)
else:
pass
app_id = None
app_set_id = None
property_order_list_cs = []
property_order_list_css = []
course_structure_exists = False
title = "Course Authoring"
course_node = node_collection.one({"_id": ObjectId(node_id)})
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_gs = node_collection.collection.GSystem()
cs_gs.member_of.append(cs_gst._id)
property_order_list_cs = get_property_order_with_value(cs_gs)
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_gs = node_collection.collection.GSystem()
css_gs.member_of.append(css_gst._id)
property_order_list_css = get_property_order_with_value(css_gs)
course_collection_list = course_node.collection_set
if course_collection_list:
course_structure_exists = True
# for attr in course_node.attribute_set:
# if attr.has_key("evaluation_type"):
# eval_type = attr["evaluation_type"]
#If evaluation_type flag is True, it is Final. If False, it is Continous
# if(eval_type==u"Final"):
# eval_type_flag = True
# else:
# eval_type_flag = False
if request.method == "GET":
app_id = request.GET.get("app_id", "")
app_set_id = request.GET.get("app_set_id", "")
return render_to_response("ndf/create_course_structure.html",
{'cnode': course_node,
'groupid': group_id,
'group_id': group_id,
'title': title,
'app_id': app_id, 'app_set_id': app_set_id,
'property_order_list': property_order_list_cs,
'property_order_list_css': property_order_list_css
},
context_instance=RequestContext(request)
)
@login_required
def save_course_section(request, group_id):
'''
Accepts:
* NUSSD Course/Course node _id
* CourseSection name
Actions:
* Creates CourseSection GSystem with name received.
* Appends this new CourseSection node id into
NUSSD Course/Course collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
cs_node_name = request.POST.get("cs_name", '')
course_node_id = request.POST.get("course_node_id", '')
cs_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSection"})
cs_new = node_collection.collection.GSystem()
cs_new.member_of.append(cs_gst._id)
cs_new.name = cs_node_name
cs_new.modified_by = int(request.user.id)
cs_new.created_by = int(request.user.id)
cs_new.contributors.append(int(request.user.id))
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
cs_new.prior_node.append(ObjectId(course_node._id))
cs_new.save()
node_collection.collection.update({'_id': course_node._id}, {'$push': {'collection_set': cs_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["cs_new_id"] = str(cs_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def save_course_sub_section(request, group_id):
'''
Accepts:
* CourseSection node _id
* CourseSubSection name
Actions:
* Creates CourseSubSection GSystem with name received.
* Appends this new CourseSubSection node id into
CourseSection collection_set
Returns:
* success (i.e True/False)
* ObjectId of CourseSubSection node
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_name = request.POST.get("css_name", '')
cs_node_id = request.POST.get("cs_node_id", '')
css_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseSubSection"})
css_new = node_collection.collection.GSystem()
css_new.member_of.append(css_gst._id)
# set name
css_new.name = css_node_name
css_new.modified_by = int(request.user.id)
css_new.created_by = int(request.user.id)
css_new.contributors.append(int(request.user.id))
cs_node = node_collection.one({"_id": ObjectId(cs_node_id)})
css_new.prior_node.append(cs_node._id)
css_new.save()
node_collection.collection.update({'_id': cs_node._id}, {'$push': {'collection_set': css_new._id }}, upsert=False, multi=False)
response_dict["success"] = True
response_dict["css_new_id"] = str(css_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def change_node_name(request, group_id):
'''
Accepts:
* CourseSection/ CourseSubSection node _id
* New name for CourseSection node
Actions:
* Updates received node's name
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
node_id = request.POST.get("node_id", '')
new_name = request.POST.get("new_name", '')
node = node_collection.one({"_id": ObjectId(node_id)})
node.name = new_name.strip()
node.save()
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
@login_required
def change_order(request, group_id):
'''
Accepts:
* 2 node ids.
Basically, either of CourseSection or CourseSubSection
* Parent node id
Either a NUSSD Course/Course or CourseSection
Actions:
* Swaps the 2 node ids in the collection set of received
parent node
'''
response_dict = {"success": False}
collection_set_list = []
if request.is_ajax() and request.method == "POST":
node_id_up = request.POST.get("node_id_up", '')
node_id_down = request.POST.get("node_id_down", '')
parent_node_id = request.POST.get("parent_node", '')
parent_node = node_collection.one({"_id": ObjectId(parent_node_id)})
collection_set_list = parent_node.collection_set
a, b = collection_set_list.index(ObjectId(node_id_up)), collection_set_list.index(ObjectId(node_id_down))<|fim▁hole|> return HttpResponse(json.dumps(response_dict))
@login_required
def course_sub_section_prop(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* Properties dict
Actions:
* Creates GAttributes with the values of received dict
for the respective CourseSubSection node
Returns:
* success (i.e True/False)
* If request.method is POST, all GAttributes in a dict structure,
'''
response_dict = {"success": False}
if request.is_ajax():
if request.method == "POST":
assessment_flag = False
css_node_id = request.POST.get("css_node_id", '')
prop_dict = request.POST.get("prop_dict", '')
assessment_chk = json.loads(request.POST.get("assessment_chk", ''))
prop_dict = json.loads(prop_dict)
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
at_cs_hours = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_minutes'})
at_cs_assessment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assessment'})
at_cs_assignment = node_collection.one({'_type': 'AttributeType', 'name': 'course_structure_assignment'})
at_cs_min_marks = node_collection.one({'_type': 'AttributeType', 'name': 'min_marks'})
at_cs_max_marks = node_collection.one({'_type': 'AttributeType', 'name': 'max_marks'})
if assessment_chk is True:
create_gattribute(css_node._id, at_cs_assessment, True)
assessment_flag = True
for propk, propv in prop_dict.items():
# add attributes to css gs
if(propk == "course_structure_minutes"):
create_gattribute(css_node._id, at_cs_hours, int(propv))
elif(propk == "course_structure_assignment"):
create_gattribute(css_node._id, at_cs_assignment, propv)
if assessment_flag:
if(propk == "min_marks"):
create_gattribute(css_node._id, at_cs_min_marks, int(propv))
if(propk == "max_marks"):
create_gattribute(css_node._id, at_cs_max_marks, int(propv))
css_node.reload()
response_dict["success"] = True
else:
css_node_id = request.GET.get("css_node_id", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
if css_node.attribute_set:
for each in css_node.attribute_set:
for k, v in each.items():
response_dict[k] = v
response_dict["success"] = True
else:
response_dict["success"] = False
return HttpResponse(json.dumps(response_dict))
@login_required
def add_units(request, group_id):
'''
Accepts:
* CourseSubSection node _id
* NUSSD Course/Course node _id
Actions:
* Redirects to course_units.html
'''
variable = None
unit_node = None
css_node_id = request.GET.get('css_node_id', '')
unit_node_id = request.GET.get('unit_node_id', '')
course_node_id = request.GET.get('course_node', '')
app_id = request.GET.get('app_id', '')
app_set_id = request.GET.get('app_set_id', '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
course_node = node_collection.one({"_id": ObjectId(course_node_id)})
title = "Course Units"
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
variable = RequestContext(request, {
'group_id': group_id, 'groupid': group_id,
'css_node': css_node,
'title': title,
'app_set_id': app_set_id,
'app_id': app_id,
'unit_node': unit_node,
'course_node': course_node,
})
template = "ndf/course_units.html"
return render_to_response(template, variable)
@login_required
def get_resources(request, group_id):
'''
Accepts:
* Name of GSystemType (Page, File, etc.)
* CourseSubSection node _id
* widget_for
Actions:
* Fetches all GSystems of selected GSystemType as resources
Returns:
* Returns Drawer with resources
'''
response_dict = {'success': False, 'message': ""}
try:
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get('css_node_id', "")
unit_node_id = request.POST.get('unit_node_id', "")
widget_for = request.POST.get('widget_for', "")
resource_type = request.POST.get('resource_type', "")
resource_type = resource_type.strip()
list_resources = []
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
unit_node = node_collection.one({"_id": ObjectId(unit_node_id)})
except:
unit_node = None
if resource_type:
if resource_type == "Pandora":
resource_type = "Pandora_video"
resource_gst = node_collection.one({'_type': "GSystemType", 'name': resource_type})
res = node_collection.find(
{
'member_of': resource_gst._id,
'group_set': ObjectId(group_id),
'status': u"PUBLISHED"
}
)
for each in res:
list_resources.append(each)
drawer_template_context = edit_drawer_widget("CourseUnits", group_id, unit_node, None, checked="collection_set", left_drawer_content=list_resources)
drawer_template_context["widget_for"] = widget_for
drawer_widget = render_to_string(
'ndf/drawer_widget.html',
drawer_template_context,
context_instance=RequestContext(request)
)
return HttpResponse(drawer_widget)
else:
error_message = "Resource Drawer: Either not an ajax call or not a POST request!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
except Exception as e:
error_message = "Resource Drawer: " + str(e) + "!!!"
response_dict["message"] = error_message
return HttpResponse(json.dumps(response_dict))
@login_required
def save_resources(request, group_id):
'''
Accepts:
* List of resources (i.e GSystem of Page, File, etc.)
* CourseSubSection node _id
Actions:
* Sets the received resources in respective node's collection_set
'''
response_dict = {"success": False,"create_new_unit": True}
if request.is_ajax() and request.method == "POST":
list_of_res = json.loads(request.POST.get('list_of_res', ""))
css_node_id = request.POST.get('css_node', "")
unit_name = request.POST.get('unit_name', "")
unit_name = unit_name.strip()
unit_node_id = request.POST.get('unit_node_id', "")
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
list_of_res_ids = [ObjectId(each_res) for each_res in list_of_res]
try:
cu_new = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_new = None
if not cu_new:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_new = node_collection.collection.GSystem()
cu_new.member_of.append(cu_gst._id)
# set name
cu_new.name = unit_name.strip()
cu_new.modified_by = int(request.user.id)
cu_new.created_by = int(request.user.id)
cu_new.contributors.append(int(request.user.id))
cu_new.prior_node.append(css_node._id)
cu_new.save()
response_dict["create_new_unit"] = True
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'name': unit_name }}, upsert=False, multi=False)
if cu_new._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_new._id }}, upsert=False, multi=False)
node_collection.collection.update({'_id': cu_new._id}, {'$set': {'collection_set':list_of_res_ids}},upsert=False,multi=False)
cu_new.reload()
response_dict["success"] = True
response_dict["cu_new_id"] = str(cu_new._id)
return HttpResponse(json.dumps(response_dict))
@login_required
def create_edit_unit(request, group_id):
'''
Accepts:
* ObjectId of unit node if exists
* ObjectId of CourseSubSection node
Actions:
* Creates/Updates Unit node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
if request.is_ajax() and request.method == "POST":
css_node_id = request.POST.get("css_node_id", '')
unit_node_id = request.POST.get("unit_node_id", '')
unit_name = request.POST.get("unit_name", '')
css_node = node_collection.one({"_id": ObjectId(css_node_id)})
try:
cu_node = node_collection.one({'_id': ObjectId(unit_node_id)})
except:
cu_node = None
if cu_node is None:
cu_gst = node_collection.one({'_type': "GSystemType", 'name': "CourseUnit"})
cu_node = node_collection.collection.GSystem()
cu_node.member_of.append(cu_gst._id)
# set name
cu_node.name = unit_name.strip()
cu_node.modified_by = int(request.user.id)
cu_node.created_by = int(request.user.id)
cu_node.contributors.append(int(request.user.id))
cu_node.prior_node.append(css_node._id)
cu_node.save()
response_dict["unit_node_id"] = str(cu_node._id)
node_collection.collection.update({'_id': cu_node._id}, {'$set': {'name': unit_name}}, upsert=False, multi=False)
if cu_node._id not in css_node.collection_set:
node_collection.collection.update({'_id': css_node._id}, {'$push': {'collection_set': cu_node._id}}, upsert=False, multi=False)
return HttpResponse(json.dumps(response_dict))
@login_required
def delete_from_course_structure(request, group_id):
'''
Accepts:
* ObjectId of node that is to be deleted.
It can be CourseSection/CourseSubSection/CourseUnit
Actions:
* Deletes the received node
Returns:
* success (i.e True/False)
'''
response_dict = {"success": False}
del_stat = False
if request.is_ajax() and request.method == "POST":
oid = request.POST.get("oid", '')
del_stat = delete_item(oid)
if del_stat:
response_dict["success"] = True
return HttpResponse(json.dumps(response_dict))
def delete_item(item):
node_item = node_collection.one({'_id': ObjectId(item)})
if u"CourseUnit" not in node_item.member_of_names_list and node_item.collection_set:
for each in node_item.collection_set:
d_st = delete_item(each)
del_status, del_status_msg = delete_node(
node_id=node_item._id,
deletion_type=0
)
return del_status
@login_required
def publish_course(request, group_id):
if request.is_ajax() and request.method == "POST":
try:
node_id = request.POST.get("node_id", "")
node = node_collection.one({'_id': ObjectId(node_id)})
node.status = unicode("PUBLISHED")
node.modified_by = int(request.user.id)
node.save()
except:
return HttpResponse("Fail")
return HttpResponse("Success")<|fim▁end|>
|
collection_set_list[b], collection_set_list[a] = collection_set_list[a], collection_set_list[b]
node_collection.collection.update({'_id': parent_node._id}, {'$set': {'collection_set': collection_set_list }}, upsert=False, multi=False)
parent_node.reload()
response_dict["success"] = True
|
<|file_name|>settings_editor.py<|end_file_name|><|fim▁begin|>#FIXME: UNDO, click time at end to undo
from PyQt4 import QtCore, QtGui
import sys, os
import volume_editor_layout, settings_layout, cPickle
import numpy as np
from utils import Utils
class SettingsEditWidget(QtGui.QDialog, settings_layout.Ui_Dialog):
#################################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
QtCore.QObject.connect( self.box_enable_learning, QtCore.SIGNAL("toggled(bool)"), self.setEnableLearning)
QtCore.QObject.connect( self.box_seconds_delay, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
QtCore.QObject.connect( self.box_click_dev, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
############################################### Main
def editClickParamsEvent(self, i_value):
self.emit(QtCore.SIGNAL("edit_click_params"))
def closeEvent(self, event):
QtGui.QDialog.close(self)
self.emit(QtCore.SIGNAL("close_settings"))
def clickPdfToSettingsParams(self, i_params):
"""Convert click pdf parameters to the ones stored in settings editor."""
(delay, std, fr, fp_rate) = i_params
fr *= 100.0
fp_rate *= 60.0
return (delay, std, fr, fp_rate)
def settingsToClickPdfParams(self, i_params):
"""Convert settings editorparameters to the ones stored by click pdf."""
(delay, std, fr, fp_rate) = i_params
fr /= 100.0
fp_rate /= 60.0
return (delay, std, fr, fp_rate)
################################################ Get
def getSettings(self):
settings = {}
#Click-time delay
delay = self.box_seconds_delay.value()
std = self.box_click_dev.value()
settings['is_train'] = self.box_enable_learning.isChecked()
settings['learning_rate'] = self.box_learning_rate.value()
settings['learn_delay'] = self.box_learn_delay.isChecked()
settings['learn_std'] = self.box_learn_std.isChecked()
#Switch noise
fp_rate = self.box_fp_rate.value()
fr = self.box_fr.value()
settings['learn_fp'] = self.box_learn_fp.isChecked()
settings['learn_fr'] = self.box_learn_fr.isChecked()
#Do the conversion
click_params = (delay, std, fr, fp_rate)
(settings['delay'], settings['std'], settings['fr'], settings['fp_rate']) = self.settingsToClickPdfParams(click_params)
#Error correction
settings['undo'] = self.box_undo.value()
settings['prog_status'] = self.box_prog_status.value()
settings['restart_word'] = self.box_restart_word.value()
settings['shut_down'] = self.box_shut_down.value()
settings['word_select_thresh'] = self.box_word_select.value()
#Speed & channels
settings['file_length'] = self.box_file_length.value()
settings['channel_index'] = int(self.box_channels.currentIndex())
settings['end_delay'] = self.box_end_delay.value()
return settings
def getCurrentChannel(self):
return self.getChannel(self.box_channels.currentIndex())
def getChannel(self, i_index):
return int(self.box_channels.itemText(i_index))
#################################################### Set
def setSettings(self, i_settings):
#Get the parameters
click_params = (i_settings['delay'], i_settings['std'], i_settings['fr'], i_settings['fp_rate'])
(delay, std, fr, fp_rate) = self.clickPdfToSettingsParams(click_params)
self.setClickParams((delay, std, fr, fp_rate))
#More click-time params
self.box_enable_learning.setChecked(i_settings['is_train'])
self.box_learning_rate.setValue(i_settings['learning_rate'])
self.box_learn_delay.setChecked( i_settings['learn_delay'])
self.box_learn_std.setChecked(i_settings['learn_std'])
#More switch noise params
self.box_learn_fp.setChecked(i_settings['learn_fp'])
self.box_learn_fr.setChecked(i_settings['learn_fr'])
#Error correction
self.box_undo.setValue(i_settings['undo'])
self.box_prog_status.setValue(i_settings['prog_status'])
self.box_restart_word.setValue(i_settings['restart_word'])
self.box_shut_down.setValue(i_settings['shut_down'])
self.box_word_select.setValue(i_settings['word_select_thresh'])
#Speed & channels
self.box_file_length.setValue(i_settings['file_length'])
self.box_channels.setCurrentIndex(i_settings['channel_index'])
self.box_end_delay.setValue(i_settings['end_delay'])
def setClickParams(self, i_params):
(delay, std, fr, fp_rate) = i_params
self.box_seconds_delay.setValue(delay)
self.box_click_dev.setValue(std)
self.box_fp_rate.setValue(fp_rate)
self.box_fr.setValue(fr)
def setEnableLearning(self, i_checked):
self.box_learn_delay.setChecked(i_checked)
self.box_learn_std.setChecked(i_checked)
self.box_learn_fp.setChecked(i_checked)
self.box_learn_fr.setChecked(i_checked)
class VolumeEditWidget(QtGui.QDialog, volume_editor_layout.Ui_Dialog):
##################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
self.volumes = []
for n in range(0, 5):
slider = getattr(self, "volume_settings_" + str(n))
self.volumes.append(slider.value())
func_vol = getattr(self, "setVolume" + str(n))
func_mute = getattr(self, "mute" + str(n))
box = getattr(self, "box_mute_" + str(n))
QtCore.QObject.connect( slider, QtCore.SIGNAL("sliderReleased()"), func_vol)
QtCore.QObject.connect( box, QtCore.SIGNAL("toggled(bool)"), func_mute)
QtCore.QObject.connect( self.box_mute_all, QtCore.SIGNAL("toggled(bool)"), self.muteAll)
########################################### Signal/slots
def mute0(self, i_checked):
self.mute(0, i_checked)
def mute1(self, i_checked):
self.mute(1, i_checked)
def mute2(self, i_checked):
self.mute(2, i_checked)
def mute3(self, i_checked):
self.mute(3, i_checked)
def mute4(self, i_checked):
self.mute(4, i_checked)
<|fim▁hole|> def setVolume0(self):
self.setVolume(0)
def setVolume1(self):
self.setVolume(1)
def setVolume2(self):
self.setVolume(2)
def setVolume3(self):
self.setVolume(3)
def setVolume4(self):
self.setVolume(4)
########################################## Get
def getVolume(self, i_channel):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
val = float(slider_object.value()) / 1000.0
return val
########################################## Set
def setVolume(self, i_channel, i_save_volume=True):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
slider_val = slider_object.value()
val = float(slider_val) / 1000.0
if i_save_volume:
self.volumes[i_channel] = slider_val
self.emit(QtCore.SIGNAL("volume(float,int)"), float(val), int(i_channel))
def setChannelConfig(self, i_channel_config):
nchannels = i_channel_config.getChannels()
channel_names = i_channel_config.getChannelNames()
for n in range(0, nchannels):
label_object = getattr(self, "volume_label_" + str(n))
label_object.setText(QtCore.QString(channel_names[n][0]))
label_object.show()
slider_object = getattr(self, "volume_settings_" + str(n))
slider_object.show()
for n in range(nchannels, 5):
object_name = "volume_label_" + str(n)
label_object = getattr(self, object_name)
label_object.hide()
slider_object = getattr(self, "volume_settings_" + str(n))
slider_object.hide()
def mute(self, i_channel, i_checked):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
if i_checked:
slider_object.setValue(0)
else:
slider_object.setValue(self.volumes[i_channel])
self.setVolume(i_channel, i_save_volume=False)
def muteAll(self, i_checked):
for channel in range(0, len(self.volumes)):
box_mute = getattr(self, "box_mute_" + str(channel))
box_mute.setChecked(i_checked)
########################################### Signal/slots
def mute0(self, i_checked):
self.mute(0, i_checked)
class VolumeEditGui(QtGui.QMainWindow):
def __init__(self):
from channel_config import ChannelConfig
QtGui.QWidget.__init__(self)
channel_config = ChannelConfig(i_nchannels=5, i_sound_overlap=0.5 , i_file_length=0.4, i_root_dir="./")
self.volume_editor = VolumeEditWidget(self)
self.volume_editor.setChannelConfig(channel_config)
self.volume_editor.show()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
gui = VolumeEditGui()
gui.show()
sys.exit( app.exec_())<|fim▁end|>
| |
<|file_name|>nl2br.py<|end_file_name|><|fim▁begin|>"""
NL2BR Extension
===============
A Python-Markdown extension to treat newlines as hard breaks; like
GitHub-flavored Markdown does.
Usage:
>>> import markdown
>>> print markdown.markdown('line 1\\nline 2', extensions=['nl2br'])
<p>line 1<br />
line 2</p>
Copyright 2011 [Brian Neal](http://deathofagremmie.com/)
Dependencies:
* [Python 2.4+](http://python.org)
* [Markdown 2.1+](http://packages.python.org/Markdown/)
"""<|fim▁hole|>import markdown
BR_RE = r'\n'
class Nl2BrExtension(markdown.Extension):
def extendMarkdown(self, md, md_globals):
br_tag = markdown.inlinepatterns.SubstituteTagPattern(BR_RE, 'br')
md.inlinePatterns.add('nl', br_tag, '_end')
def makeExtension(configs=None):
return Nl2BrExtension(configs)<|fim▁end|>
| |
<|file_name|>useless_conversion.rs<|end_file_name|><|fim▁begin|>use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
use clippy_utils::source::{snippet, snippet_with_macro_callsite};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, same_type_and_consts};
use clippy_utils::{get_parent_expr, is_trait_method, match_def_path, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, HirId, MatchSource};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` calls
/// which uselessly convert to the same type.
///
/// ### Why is this bad?
/// Redundant code.
///
/// ### Example
/// ```rust
/// // Bad
/// // format!() returns a `String`
/// let s: String = format!("hello").into();
///
/// // Good
/// let s: String = format!("hello");
/// ```
pub USELESS_CONVERSION,
complexity,
"calls to `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` which perform useless conversions to the same type"
}
#[derive(Default)]
pub struct UselessConversion {
try_desugar_arm: Vec<HirId>,
}
impl_lint_pass!(UselessConversion => [USELESS_CONVERSION]);
#[allow(clippy::too_many_lines)]
impl<'tcx> LateLintPass<'tcx> for UselessConversion {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if e.span.from_expansion() {
return;
}
if Some(&e.hir_id) == self.try_desugar_arm.last() {
return;
}
match e.kind {
ExprKind::Match(_, arms, MatchSource::TryDesugar) => {
let e = match arms[0].body.kind {
ExprKind::Ret(Some(e)) | ExprKind::Break(_, Some(e)) => e,
_ => return,
};
if let ExprKind::Call(_, args) = e.kind {
self.try_desugar_arm.push(args[0].hir_id);
}
},
ExprKind::MethodCall(name, .., args, _) => {
if is_trait_method(cx, e, sym::into_trait) && &*name.ident.as_str() == "into" {
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(&args[0]);
if same_type_and_consts(a, b) {
let sugg = snippet_with_macro_callsite(cx, args[0].span, "<expr>").to_string();
span_lint_and_sugg(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{}`", b),
"consider removing `.into()`",
sugg,
Applicability::MachineApplicable, // snippet
);
}
}
if is_trait_method(cx, e, sym::IntoIterator) && name.ident.name == sym::into_iter {
if let Some(parent_expr) = get_parent_expr(cx, e) {
if let ExprKind::MethodCall(parent_name, ..) = parent_expr.kind {
if parent_name.ident.name != sym::into_iter {
return;
}
}
}
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(&args[0]);
if same_type_and_consts(a, b) {
let sugg = snippet(cx, args[0].span, "<expr>").into_owned();
span_lint_and_sugg(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{}`", b),<|fim▁hole|> "consider removing `.into_iter()`",
sugg,
Applicability::MachineApplicable, // snippet
);
}
}
if_chain! {
if is_trait_method(cx, e, sym::try_into_trait) && name.ident.name == sym::try_into;
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(&args[0]);
if is_type_diagnostic_item(cx, a, sym::result_type);
if let ty::Adt(_, substs) = a.kind();
if let Some(a_type) = substs.types().next();
if same_type_and_consts(a_type, b);
then {
span_lint_and_help(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{}`", b),
None,
"consider removing `.try_into()`",
);
}
}
},
ExprKind::Call(path, args) => {
if_chain! {
if args.len() == 1;
if let ExprKind::Path(ref qpath) = path.kind;
if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
then {
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(&args[0]);
if_chain! {
if match_def_path(cx, def_id, &paths::TRY_FROM);
if is_type_diagnostic_item(cx, a, sym::result_type);
if let ty::Adt(_, substs) = a.kind();
if let Some(a_type) = substs.types().next();
if same_type_and_consts(a_type, b);
then {
let hint = format!("consider removing `{}()`", snippet(cx, path.span, "TryFrom::try_from"));
span_lint_and_help(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{}`", b),
None,
&hint,
);
}
}
if_chain! {
if match_def_path(cx, def_id, &paths::FROM_FROM);
if same_type_and_consts(a, b);
then {
let sugg = Sugg::hir_with_macro_callsite(cx, &args[0], "<expr>").maybe_par();
let sugg_msg =
format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
span_lint_and_sugg(
cx,
USELESS_CONVERSION,
e.span,
&format!("useless conversion to the same type: `{}`", b),
&sugg_msg,
sugg.to_string(),
Applicability::MachineApplicable, // snippet
);
}
}
}
}
},
_ => {},
}
}
fn check_expr_post(&mut self, _: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if Some(&e.hir_id) == self.try_desugar_arm.last() {
self.try_desugar_arm.pop();
}
}
}<|fim▁end|>
| |
<|file_name|>float_cmp.rs<|end_file_name|><|fim▁begin|>#![warn(clippy::float_cmp)]
#![allow(<|fim▁hole|> clippy::no_effect,
clippy::op_ref,
clippy::unnecessary_operation,
clippy::cast_lossless,
clippy::many_single_char_names
)]
use std::ops::Add;
const ZERO: f32 = 0.0;
const ONE: f32 = ZERO + 1.0;
fn twice<T>(x: T) -> T
where
T: Add<T, Output = T> + Copy,
{
x + x
}
fn eq_fl(x: f32, y: f32) -> bool {
if x.is_nan() { y.is_nan() } else { x == y } // no error, inside "eq" fn
}
fn fl_eq(x: f32, y: f32) -> bool {
if x.is_nan() { y.is_nan() } else { x == y } // no error, inside "eq" fn
}
struct X {
val: f32,
}
impl PartialEq for X {
fn eq(&self, o: &X) -> bool {
if self.val.is_nan() {
o.val.is_nan()
} else {
self.val == o.val // no error, inside "eq" fn
}
}
}
fn main() {
ZERO == 0f32; //no error, comparison with zero is ok
1.0f32 != f32::INFINITY; // also comparison with infinity
1.0f32 != f32::NEG_INFINITY; // and negative infinity
ZERO == 0.0; //no error, comparison with zero is ok
ZERO + ZERO != 1.0; //no error, comparison with zero is ok
ONE == 1f32;
ONE == 1.0 + 0.0;
ONE + ONE == ZERO + ONE + ONE;
ONE != 2.0;
ONE != 0.0; // no error, comparison with zero is ok
twice(ONE) != ONE;
ONE as f64 != 2.0;
ONE as f64 != 0.0; // no error, comparison with zero is ok
let x: f64 = 1.0;
x == 1.0;
x != 0f64; // no error, comparison with zero is ok
twice(x) != twice(ONE as f64);
x < 0.0; // no errors, lower or greater comparisons need no fuzzyness
x > 0.0;
x <= 0.0;
x >= 0.0;
let xs: [f32; 1] = [0.0];
let a: *const f32 = xs.as_ptr();
let b: *const f32 = xs.as_ptr();
assert_eq!(a, b); // no errors
const ZERO_ARRAY: [f32; 2] = [0.0, 0.0];
const NON_ZERO_ARRAY: [f32; 2] = [0.0, 0.1];
let i = 0;
let j = 1;
ZERO_ARRAY[i] == NON_ZERO_ARRAY[j]; // ok, because lhs is zero regardless of i
NON_ZERO_ARRAY[i] == NON_ZERO_ARRAY[j];
let a1: [f32; 1] = [0.0];
let a2: [f32; 1] = [1.1];
a1 == a2;
a1[0] == a2[0];
// no errors - comparing signums is ok
let x32 = 3.21f32;
1.23f32.signum() == x32.signum();
1.23f32.signum() == -(x32.signum());
1.23f32.signum() == 3.21f32.signum();
1.23f32.signum() != x32.signum();
1.23f32.signum() != -(x32.signum());
1.23f32.signum() != 3.21f32.signum();
let x64 = 3.21f64;
1.23f64.signum() == x64.signum();
1.23f64.signum() == -(x64.signum());
1.23f64.signum() == 3.21f64.signum();
1.23f64.signum() != x64.signum();
1.23f64.signum() != -(x64.signum());
1.23f64.signum() != 3.21f64.signum();
// the comparison should also look through references
&0.0 == &ZERO;
&&&&0.0 == &&&&ZERO;
}<|fim▁end|>
|
unused,
|
<|file_name|>_policy_assignments_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_delete_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
scope: str,
policy_assignment_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, 'str', skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_by_id_request(
policy_assignment_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.policy.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def delete(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> Optional["_models.PolicyAssignment"]:
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to delete.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PolicyAssignment"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def get(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to get.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def list_for_resource_group(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for the resource group.
:param resource_group_name: The name of the resource group that contains policy assignments.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for a resource.
:param resource_group_name: The name of the resource group containing the resource. The name is
case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource with policy assignments.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]<|fim▁hole|> :raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets all the policy assignments for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def delete_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Deletes a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to delete. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.delete_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def create_by_id(
self,
policy_assignment_id: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment by ID.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group. When providing a scope
for the assignment, use '/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for resource groups,
and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_by_id_request(
policy_assignment_id=policy_assignment_id,
content_type=content_type,
json=_json,
template_url=self.create_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def get_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to get. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.get_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore<|fim▁end|>
| |
<|file_name|>test_climatology.py<|end_file_name|><|fim▁begin|># Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""Integration tests for loading and saving netcdf files."""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from os.path import join as path_join, dirname, sep as os_sep
import shutil
from subprocess import check_call
import tempfile
import iris
from iris.tests import stock
class TestClimatology(iris.tests.IrisTest):
reference_cdl_path = os_sep.join(
[
dirname(tests.__file__),
(
"results/integration/climatology/TestClimatology/"
"reference_simpledata.cdl"
),
]
)
@classmethod
def _simple_cdl_string(cls):
with open(cls.reference_cdl_path, "r") as f:
cdl_content = f.read()
# Add the expected CDL first line since this is removed from the
# stored results file.
cdl_content = "netcdf {\n" + cdl_content
return cdl_content
@staticmethod
def _load_sanitised_cube(filepath):<|fim▁hole|> cube = iris.load_cube(filepath)
# Remove attributes convention, if any.
cube.attributes.pop("Conventions", None)
# Remove any var-names.
for coord in cube.coords():
coord.var_name = None
cube.var_name = None
return cube
@classmethod
def setUpClass(cls):
# Create a temp directory for temp files.
cls.temp_dir = tempfile.mkdtemp()
cls.path_ref_cdl = path_join(cls.temp_dir, "standard.cdl")
cls.path_ref_nc = path_join(cls.temp_dir, "standard.nc")
# Create reference CDL file.
with open(cls.path_ref_cdl, "w") as f_out:
f_out.write(cls._simple_cdl_string())
# Create reference netCDF file from reference CDL.
command = "ncgen -o {} {}".format(cls.path_ref_nc, cls.path_ref_cdl)
check_call(command, shell=True)
cls.path_temp_nc = path_join(cls.temp_dir, "tmp.nc")
# Create reference cube.
cls.cube_ref = stock.climatology_3d()
@classmethod
def tearDownClass(cls):
# Destroy a temp directory for temp files.
shutil.rmtree(cls.temp_dir)
###############################################################################
# Round-trip tests
def test_cube_to_cube(self):
# Save reference cube to file, load cube from same file, test against
# reference cube.
iris.save(self.cube_ref, self.path_temp_nc)
cube = self._load_sanitised_cube(self.path_temp_nc)
self.assertEqual(cube, self.cube_ref)
def test_file_to_file(self):
# Load cube from reference file, save same cube to file, test against
# reference CDL.
cube = iris.load_cube(self.path_ref_nc)
iris.save(cube, self.path_temp_nc)
self.assertCDL(
self.path_temp_nc,
reference_filename=self.reference_cdl_path,
flags="",
)
# NOTE:
# The saving half of the round-trip tests is tested in the
# appropriate dedicated test class:
# unit.fileformats.netcdf.test_Saver.Test_write.test_with_climatology .
# The loading half has no equivalent dedicated location, so is tested
# here as test_load_from_file.
def test_load_from_file(self):
# Create cube from file, test against reference cube.
cube = self._load_sanitised_cube(self.path_ref_nc)
self.assertEqual(cube, self.cube_ref)
if __name__ == "__main__":
tests.main()<|fim▁end|>
| |
<|file_name|>public_api.d.ts<|end_file_name|><|fim▁begin|>export * from './table';
export * from './cell';
export * from './row';<|fim▁hole|>export { DataSource } from '@angular/cdk/collections';
export declare class CdkTableModule {
}<|fim▁end|>
|
/** Re-export DataSource for a more intuitive experience for users of just the table. */
|
<|file_name|>threaded.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the<|fim▁hole|>// specific language governing permissions and limitations
// under the License.
use std::net::{TcpListener, TcpStream};
use std::sync::Arc;
use threadpool::ThreadPool;
use {ApplicationError, ApplicationErrorKind};
use protocol::{TInputProtocol, TInputProtocolFactory, TOutputProtocol, TOutputProtocolFactory};
use transport::{TIoChannel, TReadTransportFactory, TTcpChannel, TWriteTransportFactory};
use super::TProcessor;
/// Fixed-size thread-pool blocking Thrift server.
///
/// A `TServer` listens on a given address and submits accepted connections
/// to an **unbounded** queue. Connections from this queue are serviced by
/// the first available worker thread from a **fixed-size** thread pool. Each
/// accepted connection is handled by that worker thread, and communication
/// over this thread occurs sequentially and synchronously (i.e. calls block).
/// Accepted connections have an input half and an output half, each of which
/// uses a `TTransport` and `TInputProtocol`/`TOutputProtocol` to translate
/// messages to and from byes. Any combination of `TInputProtocol`, `TOutputProtocol`
/// and `TTransport` may be used.
///
/// # Examples
///
/// Creating and running a `TServer` using Thrift-compiler-generated
/// service code.
///
/// ```no_run
/// use thrift;
/// use thrift::protocol::{TInputProtocolFactory, TOutputProtocolFactory};
/// use thrift::protocol::{TBinaryInputProtocolFactory, TBinaryOutputProtocolFactory};
/// use thrift::protocol::{TInputProtocol, TOutputProtocol};
/// use thrift::transport::{TBufferedReadTransportFactory, TBufferedWriteTransportFactory,
/// TReadTransportFactory, TWriteTransportFactory};
/// use thrift::server::{TProcessor, TServer};
///
/// //
/// // auto-generated
/// //
///
/// // processor for `SimpleService`
/// struct SimpleServiceSyncProcessor;
/// impl SimpleServiceSyncProcessor {
/// fn new<H: SimpleServiceSyncHandler>(processor: H) -> SimpleServiceSyncProcessor {
/// unimplemented!();
/// }
/// }
///
/// // `TProcessor` implementation for `SimpleService`
/// impl TProcessor for SimpleServiceSyncProcessor {
/// fn process(&self, i: &mut TInputProtocol, o: &mut TOutputProtocol) -> thrift::Result<()> {
/// unimplemented!();
/// }
/// }
///
/// // service functions for SimpleService
/// trait SimpleServiceSyncHandler {
/// fn service_call(&self) -> thrift::Result<()>;
/// }
///
/// //
/// // user-code follows
/// //
///
/// // define a handler that will be invoked when `service_call` is received
/// struct SimpleServiceHandlerImpl;
/// impl SimpleServiceSyncHandler for SimpleServiceHandlerImpl {
/// fn service_call(&self) -> thrift::Result<()> {
/// unimplemented!();
/// }
/// }
///
/// // instantiate the processor
/// let processor = SimpleServiceSyncProcessor::new(SimpleServiceHandlerImpl {});
///
/// // instantiate the server
/// let i_tr_fact: Box<TReadTransportFactory> = Box::new(TBufferedReadTransportFactory::new());
/// let i_pr_fact: Box<TInputProtocolFactory> = Box::new(TBinaryInputProtocolFactory::new());
/// let o_tr_fact: Box<TWriteTransportFactory> = Box::new(TBufferedWriteTransportFactory::new());
/// let o_pr_fact: Box<TOutputProtocolFactory> = Box::new(TBinaryOutputProtocolFactory::new());
///
/// let mut server = TServer::new(
/// i_tr_fact,
/// i_pr_fact,
/// o_tr_fact,
/// o_pr_fact,
/// processor,
/// 10
/// );
///
/// // start listening for incoming connections
/// match server.listen("127.0.0.1:8080") {
/// Ok(_) => println!("listen completed"),
/// Err(e) => println!("listen failed with error {:?}", e),
/// }
/// ```
#[derive(Debug)]
pub struct TServer<PRC, RTF, IPF, WTF, OPF>
where
PRC: TProcessor + Send + Sync + 'static,
RTF: TReadTransportFactory + 'static,
IPF: TInputProtocolFactory + 'static,
WTF: TWriteTransportFactory + 'static,
OPF: TOutputProtocolFactory + 'static,
{
r_trans_factory: RTF,
i_proto_factory: IPF,
w_trans_factory: WTF,
o_proto_factory: OPF,
processor: Arc<PRC>,
worker_pool: ThreadPool,
}
impl<PRC, RTF, IPF, WTF, OPF> TServer<PRC, RTF, IPF, WTF, OPF>
where PRC: TProcessor + Send + Sync + 'static,
RTF: TReadTransportFactory + 'static,
IPF: TInputProtocolFactory + 'static,
WTF: TWriteTransportFactory + 'static,
OPF: TOutputProtocolFactory + 'static {
/// Create a `TServer`.
///
/// Each accepted connection has an input and output half, each of which
/// requires a `TTransport` and `TProtocol`. `TServer` uses
/// `read_transport_factory` and `input_protocol_factory` to create
/// implementations for the input, and `write_transport_factory` and
/// `output_protocol_factory` to create implementations for the output.
pub fn new(
read_transport_factory: RTF,
input_protocol_factory: IPF,
write_transport_factory: WTF,
output_protocol_factory: OPF,
processor: PRC,
num_workers: usize,
) -> TServer<PRC, RTF, IPF, WTF, OPF> {
TServer {
r_trans_factory: read_transport_factory,
i_proto_factory: input_protocol_factory,
w_trans_factory: write_transport_factory,
o_proto_factory: output_protocol_factory,
processor: Arc::new(processor),
worker_pool: ThreadPool::with_name(
"Thrift service processor".to_owned(),
num_workers,
),
}
}
/// Listen for incoming connections on `listen_address`.
///
/// `listen_address` should be in the form `host:port`,
/// for example: `127.0.0.1:8080`.
///
/// Return `()` if successful.
///
/// Return `Err` when the server cannot bind to `listen_address` or there
/// is an unrecoverable error.
pub fn listen(&mut self, listen_address: &str) -> ::Result<()> {
let listener = TcpListener::bind(listen_address)?;
for stream in listener.incoming() {
match stream {
Ok(s) => {
let (i_prot, o_prot) = self.new_protocols_for_connection(s)?;
let processor = self.processor.clone();
self.worker_pool
.execute(move || handle_incoming_connection(processor, i_prot, o_prot),);
}
Err(e) => {
warn!("failed to accept remote connection with error {:?}", e);
}
}
}
Err(
::Error::Application(
ApplicationError {
kind: ApplicationErrorKind::Unknown,
message: "aborted listen loop".into(),
},
),
)
}
fn new_protocols_for_connection(
&mut self,
stream: TcpStream,
) -> ::Result<(Box<TInputProtocol + Send>, Box<TOutputProtocol + Send>)> {
// create the shared tcp stream
let channel = TTcpChannel::with_stream(stream);
// split it into two - one to be owned by the
// input tran/proto and the other by the output
let (r_chan, w_chan) = channel.split()?;
// input protocol and transport
let r_tran = self.r_trans_factory.create(Box::new(r_chan));
let i_prot = self.i_proto_factory.create(r_tran);
// output protocol and transport
let w_tran = self.w_trans_factory.create(Box::new(w_chan));
let o_prot = self.o_proto_factory.create(w_tran);
Ok((i_prot, o_prot))
}
}
fn handle_incoming_connection<PRC>(
processor: Arc<PRC>,
i_prot: Box<TInputProtocol>,
o_prot: Box<TOutputProtocol>,
) where
PRC: TProcessor,
{
let mut i_prot = i_prot;
let mut o_prot = o_prot;
loop {
let r = processor.process(&mut *i_prot, &mut *o_prot);
if let Err(e) = r {
warn!("processor completed with error: {:?}", e);
break;
}
}
}<|fim▁end|>
| |
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""This file is part of the django ERP project.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <[email protected]>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.1'
from django.conf import settings
from django.db.models.signals import post_save, pre_delete
from djangoerp.core.utils.models import get_model
from djangoerp.core.signals import manage_author_permissions
from .models import Menu, Link, Bookmark
from .utils import create_bookmarks, delete_bookmarks
## HANDLERS ##
def _create_bookmarks(sender, instance, *args, **kwargs):
create_bookmarks(instance)
def _delete_bookmarks(sender, instance, *args, **kwargs):
delete_bookmarks(instance)
## API ##
def manage_bookmarks(cls, enabled=True):
"""Connects handlers for bookmarks management.
<|fim▁hole|>
>> manage_bookmarks(User)
It will auto generate a bookmark list associated to each new User's instance.
To disconnect:
>> manage_bookmarks(User, False)
"""
cls = get_model(cls)
cls_name = cls.__name__.lower()
create_dispatch_uid = "create_%s_bookmarks" % cls_name
delete_dispatch_uid = "delete_%s_bookmarks" % cls_name
if enabled:
post_save.connect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.connect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
else:
post_save.disconnect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.disconnect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
## CONNECTIONS ##
manage_author_permissions(Menu)
manage_author_permissions(Link)
manage_author_permissions(Bookmark)
manage_bookmarks(settings.AUTH_USER_MODEL)<|fim▁end|>
|
This handler could be used to automatically create a related bookmark list
on given model class instance creation. i.e.:
|
<|file_name|>UserInformationTestPage.tsx<|end_file_name|><|fim▁begin|>import { RootTestPage } from "v2/DevTools/RootTestPage"
export class UserInformationTestPage extends RootTestPage {
async clickSubmitButton() {
this.find("button[type='submit']").simulate("submit")
await this.update()
}
async changeEmailInput() {
const input = this.find("QuickInput input[name='email']")
// @ts-expect-error PLEASE_FIX_ME_STRICT_NULL_CHECK_MIGRATION
input.props().onChange({
// @ts-ignore
currentTarget: { id: "email", value: "[email protected]" },
})
input.simulate("blur")
await this.update()
}
<|fim▁hole|> const input = this.find("QuickInput input[name='name']")
// @ts-expect-error PLEASE_FIX_ME_STRICT_NULL_CHECK_MIGRATION
input.props().onChange({
// @ts-ignore
currentTarget: { id: "name", value },
})
input.simulate("blur")
await this.update()
}
}<|fim▁end|>
|
async changeNameInput(value = "New name") {
|
<|file_name|>generate_public.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import qa
import re
import numpy
have_use = re.compile("^\s{1,12}use\s")
remove_warn = re.compile('''(?!.*QA_WARN .+)''', re.VERBOSE)
unwanted = re.compile("(\s|&|\n)", re.VERBOSE)
def do_magic(files, options):
name = files[0]
glob = []
temp = []
for f in files[2:]:<|fim▁hole|> for f in uses:
glob.extend(f.split("only: ")[1].strip().split(','))
return numpy.unique([unwanted.sub('', f) for f in glob])
def pretty_format(list, col):
print " public :: &"
str = " & "
for item in list:
if(len(str) + len(item) + 2 > int(col)):
print str + "&"
str = " & "
str = str + item + ", "
print str.rstrip(", ")
if __name__ == "__main__":
from optparse import OptionParser
usage = "usage: %prog module_name line_lenght FILES\n\nExample: bin/generate_public.py grid 140 $(find . -name \"*F90\")"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="make lots of noise [default]")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose",
help="be vewwy quiet (I'm hunting wabbits)")
parser.add_option("-f", "--force",
action="store_true", dest="force",
help="commit despite errors (It will be logged)")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("incorrect number of arguments")
tab = do_magic(args, options)
pretty_format(tab, args[1])<|fim▁end|>
|
lines = open(f, 'r').readlines()
temp = qa2.remove_amp(filter(remove_warn.match, lines), True)
uses = [f for f in filter(have_use.search, temp) if (
re.match("\s{0,9}use " + name, f))]
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Terminal formatting library.
//!
//! This crate provides the `Terminal` trait, which abstracts over an [ANSI
//! Terminal][ansi] to provide color printing, among other things. There are two implementations,
//! the `TerminfoTerminal`, which uses control characters from a
//! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console
//! API][win].
//!
//! # Examples
//!
//! ```no_run
//! extern crate term;
//!
//! use std::io::prelude::*;
//!
//! fn main() {
//! let mut t = term::stdout().unwrap();
//!
//! t.fg(term::color::GREEN).unwrap();
//! (write!(t, "hello, ")).unwrap();
//!
//! t.fg(term::color::RED).unwrap();
//! (writeln!(t, "world!")).unwrap();
//!
//! t.reset().unwrap();
//! }
//! ```
//!
//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code
//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx
//! [ti]: https://en.wikipedia.org/wiki/Terminfo
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "term"]
#![unstable(feature = "rustc_private",
reason = "use the crates.io `term` library instead")]
#![staged_api]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![deny(missing_docs)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(int_uint)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(str_char)]
#![feature(path_ext)]
#![cfg_attr(windows, feature(libc))]
#[macro_use] extern crate log;
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
use std::io::prelude::*;
use std::io;
pub mod terminfo;
#[cfg(windows)]
mod win;
/// A hack to work around the fact that `Box<Write + Send>` does not
/// currently implement `Write`.
pub struct WriterWrapper {
wrapped: Box<Write + Send>,
}
impl Write for WriterWrapper {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.wrapped.write(buf)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.wrapped.flush()
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stdout() as Box<Write + Send>,
})
}
}
}
#[cfg(not(windows))]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
#[cfg(windows)]
/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<Terminal<WriterWrapper> + Send>> {
let ti = TerminfoTerminal::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
});
match ti {
Some(t) => Some(t),
None => {
WinConsole::new(WriterWrapper {
wrapped: box std::io::stderr() as Box<Write + Send>,
})
}
}
}
/// Terminal color definitions
pub mod color {
/// Number for a terminal color
pub type Color = u16;
pub const BLACK: Color = 0;
pub const RED: Color = 1;
pub const GREEN: Color = 2;
pub const YELLOW: Color = 3;
pub const BLUE: Color = 4;
pub const MAGENTA: Color = 5;
pub const CYAN: Color = 6;
pub const WHITE: Color = 7;
pub const BRIGHT_BLACK: Color = 8;
pub const BRIGHT_RED: Color = 9;
pub const BRIGHT_GREEN: Color = 10;
pub const BRIGHT_YELLOW: Color = 11;
pub const BRIGHT_BLUE: Color = 12;
pub const BRIGHT_MAGENTA: Color = 13;
pub const BRIGHT_CYAN: Color = 14;
pub const BRIGHT_WHITE: Color = 15;
}
/// Terminal attributes
pub mod attr {
pub use self::Attr::*;<|fim▁hole|>
/// Terminal attributes for use with term.attr().
///
/// Most attributes can only be turned on and must be turned off with term.reset().
/// The ones that can be turned off explicitly take a boolean value.
/// Color is also represented as an attribute for convenience.
#[derive(Copy)]
pub enum Attr {
/// Bold (or possibly bright) mode
Bold,
/// Dim mode, also called faint or half-bright. Often not supported
Dim,
/// Italics mode. Often not supported
Italic(bool),
/// Underline mode
Underline(bool),
/// Blink mode
Blink,
/// Standout mode. Often implemented as Reverse, sometimes coupled with Bold
Standout(bool),
/// Reverse mode, inverts the foreground and background colors
Reverse,
/// Secure mode, also called invis mode. Hides the printed text
Secure,
/// Convenience attribute to set the foreground color
ForegroundColor(super::color::Color),
/// Convenience attribute to set the background color
BackgroundColor(super::color::Color)
}
}
/// A terminal with similar capabilities to an ANSI Terminal
/// (foreground/background colors etc).
pub trait Terminal<T: Write>: Write {
/// Sets the foreground color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn fg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the background color to the given color.
///
/// If the color is a bright color, but the terminal only supports 8 colors,
/// the corresponding normal color will be used instead.
///
/// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`
/// if there was an I/O error.
fn bg(&mut self, color: color::Color) -> io::Result<bool>;
/// Sets the given terminal attribute, if supported. Returns `Ok(true)`
/// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if
/// there was an I/O error.
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool>;
/// Returns whether the given terminal attribute is supported.
fn supports_attr(&self, attr: attr::Attr) -> bool;
/// Resets all terminal attributes and color to the default.
/// Returns `Ok()`.
fn reset(&mut self) -> io::Result<()>;
/// Gets an immutable reference to the stream inside
fn get_ref<'a>(&'a self) -> &'a T;
/// Gets a mutable reference to the stream inside
fn get_mut<'a>(&'a mut self) -> &'a mut T;
}
/// A terminal which can be unwrapped.
pub trait UnwrappableTerminal<T: Write>: Terminal<T> {
/// Returns the contained stream, destroying the `Terminal`
fn unwrap(self) -> T;
}<|fim▁end|>
| |
<|file_name|>0007_auto__add_field_project_url.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Project.url'
db.add_column(u'core_project', 'url',
self.gf('django.db.models.fields.TextField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Project.url'
db.delete_column(u'core_project', 'url')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'badges.badge': {
'Meta': {'object_name': 'Badge'},
'icon': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'badges.projectbadge': {
'Meta': {'object_name': 'ProjectBadge'},
'awardLevel': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['badges.Badge']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multipleAwards': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Project']"}),
'tags': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': u"orm['badges.ProjectBadgeToUser']", 'to': u"orm['auth.User']"})
},
u'badges.projectbadgetouser': {
'Meta': {'object_name': 'ProjectBadgeToUser'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'projectbadge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['badges.ProjectBadge']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.points': {
'Meta': {'object_name': 'Points'},
'date_awarded': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'projectbadge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['badges.ProjectBadge']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'value': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'core.project': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'Project'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allowed_api_hosts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'background_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project_closing_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'properties': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'query_token': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'supervisors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'supervisors'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['core.Team']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'viewing_pass_phrase': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'visual_theme': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '20'})
},
u'core.team': {
'Meta': {'ordering': "['-order', '-date_created', 'id']", 'object_name': 'Team'},
'background_color': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})<|fim▁hole|> },
u'core.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['core']<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[link(name = "frog"
, vers = "0.0"
, author = "Benjamin Roux"
, uuid = "D4B7F5F0-6B78-4DF0-9CB5-EB65DDB43857")];
#[crate_type = "lib"];
#[warn(non_camel_case_types)]
extern mod extra;<|fim▁hole|>pub mod simulated_annealing;<|fim▁end|>
|
extern mod nalgebra;
pub mod ga;
pub mod dtw;
|
<|file_name|>data_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate hyper;
extern crate hyper_serde;<|fim▁hole|>
use hyper_serde::Serde;
use ipc_channel::ipc;
use msg::constellation_msg::{PipelineId, ReferrerPolicy};
use net_traits::LoadConsumer::Channel;
use net_traits::ProgressMsg::{Payload, Done};
use net_traits::{LoadData, LoadContext, NetworkError, LoadOrigin};
use self::hyper::header::ContentType;
use self::hyper::mime::{Mime, TopLevel, SubLevel, Attr, Value};
use url::Url;
struct DataLoadTest;
impl LoadOrigin for DataLoadTest {
fn referrer_url(&self) -> Option<Url> {
None
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
None
}
fn pipeline_id(&self) -> Option<PipelineId> {
None
}
}
#[cfg(test)]
fn assert_parse(url: &'static str,
content_type: Option<ContentType>,
charset: Option<String>,
data: Option<Vec<u8>>) {
use net::data_loader::load;
use net::mime_classifier::MimeClassifier;
use net::resource_thread::CancellationListener;
use std::sync::Arc;
let (start_chan, start_port) = ipc::channel().unwrap();
let classifier = Arc::new(MimeClassifier::new());
load(LoadData::new(LoadContext::Browsing, Url::parse(url).unwrap(), &DataLoadTest),
Channel(start_chan),
classifier, CancellationListener::new(None));
let response = start_port.recv().unwrap();
assert_eq!(&response.metadata.content_type.map(Serde::into_inner),
&content_type);
assert_eq!(&response.metadata.charset, &charset);
let progress = response.progress_port.recv().unwrap();
match data {
None => {
assert_eq!(progress, Done(Err(NetworkError::Internal("invalid data uri".to_owned()))));
}
Some(dat) => {
assert_eq!(progress, Payload(dat));
assert_eq!(response.progress_port.recv().unwrap(), Done(Ok(())));
}
}
}
#[test]
fn empty_invalid() {
assert_parse("data:", None, None, None);
}
#[test]
fn plain() {
assert_parse(
"data:,hello%20world",
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain,
vec!((Attr::Charset, Value::Ext("us-ascii".to_owned())))))),
Some("US-ASCII".to_owned()), Some(b"hello world".iter().map(|&x| x).collect()));
}
#[test]
fn plain_ct() {
assert_parse(
"data:text/plain,hello",
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec!()))),
None,
Some(b"hello".iter().map(|&x| x).collect()));
}
#[test]
fn plain_charset() {
assert_parse("data:text/plain;charset=latin1,hello",
Some(ContentType(Mime(TopLevel::Text,
SubLevel::Plain,
vec!((Attr::Charset, Value::Ext("latin1".to_owned())))))),
Some("latin1".to_owned()), Some(b"hello".iter().map(|&x| x).collect()));
}
#[test]
fn plain_only_charset() {
assert_parse(
"data:;charset=utf-8,hello",
Some(ContentType(Mime(TopLevel::Text,
SubLevel::Plain,
vec!((Attr::Charset, Value::Utf8))))),
Some("utf-8".to_owned()), Some(b"hello".iter().map(|&x| x).collect()));
}
#[test]
fn base64() {
assert_parse(
"data:;base64,C62+7w==",
Some(ContentType(Mime(TopLevel::Text,
SubLevel::Plain,
vec!((Attr::Charset, Value::Ext("us-ascii".to_owned())))))),
Some("US-ASCII".to_owned()), Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));
}
#[test]
fn base64_ct() {
assert_parse("data:application/octet-stream;base64,C62+7w==",
Some(ContentType(Mime(TopLevel::Application, SubLevel::Ext("octet-stream".to_owned()), vec!()))),
None,
Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));
}
#[test]
fn base64_charset() {
assert_parse("data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==",
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain,
vec!((Attr::Charset, Value::Ext("koi8-r".to_owned())))))),
Some("koi8-r".to_owned()),
Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4)));
}<|fim▁end|>
| |
<|file_name|>ModalContent.js<|end_file_name|><|fim▁begin|>import PropTypes from 'prop-types';
import React from 'react';
import Icon from 'Components/Icon';
import Link from 'Components/Link/Link';
import { icons } from 'Helpers/Props';
import styles from './ModalContent.css';
function ModalContent(props) {
const {
className,
children,
showCloseButton,
onModalClose,
...otherProps
} = props;
return (
<div
className={className}<|fim▁hole|> <Link
className={styles.closeButton}
onPress={onModalClose}
>
<Icon
name={icons.CLOSE}
size={18}
/>
</Link>
}
{children}
</div>
);
}
ModalContent.propTypes = {
className: PropTypes.string,
children: PropTypes.node,
showCloseButton: PropTypes.bool.isRequired,
onModalClose: PropTypes.func.isRequired
};
ModalContent.defaultProps = {
className: styles.modalContent,
showCloseButton: true
};
export default ModalContent;<|fim▁end|>
|
{...otherProps}
>
{
showCloseButton &&
|
<|file_name|>issue-65679-inst-opaque-ty-from-val-twice.rs<|end_file_name|><|fim▁begin|>// compile-flags: -Zsave-analysis
// check-pass
#![feature(type_alias_impl_trait, rustc_attrs)]
type T = impl Sized;
// The concrete type referred by impl-trait-type-alias(`T`) is guaranteed
// to be the same as where it occurs, whereas `impl Trait`'s instance is location sensitive;
// so difference assertion should not be declared on impl-trait-type-alias's instances.
// for details, check RFC-2515:
// https://github.com/rust-lang/rfcs/blob/master/text/2515-type_alias_impl_trait.md
fn take(_: fn() -> T) {}
fn main() {
take(|| {});
take(|| {});<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md><|fim▁hole|><|fim▁end|>
|
fn main() {
println!("cargo:rustc-flags=-l wmvcore");
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__version__ = "0.0.2"<|fim▁hole|><|fim▁end|>
|
from .samplesubmod import *
|
<|file_name|>HightEqWidthRounderImage.java<|end_file_name|><|fim▁begin|>package connect.view;
import android.content.Context;
import android.util.AttributeSet;
import connect.view.roundedimageview.RoundedImageView;
/**
* Created by Administrator on 2016/12/15.
*/
public class HightEqWidthRounderImage extends RoundedImageView {
public HightEqWidthRounderImage(Context context) {<|fim▁hole|> }
public HightEqWidthRounderImage(Context context, AttributeSet attrs) {
super(context, attrs);
}
public HightEqWidthRounderImage(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, widthMeasureSpec);
}
}<|fim▁end|>
|
super(context);
|
<|file_name|>constructor.py<|end_file_name|><|fim▁begin|>__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError']
from error import *
from nodes import *
import datetime
import binascii, re, sys, types
class ConstructorError(MarkedYAMLError):
pass
class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
self.state_generators = []
self.deep_construct = False
def check_data(self):
# If there are more documents available?
return self.check_node()
def get_data(self):
# Construct and return the next document.
if self.check_node():
return self.construct_document(self.get_node())
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
if node.tag in self.yaml_constructors:
constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = node.tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
if self.deep_construct:
for dummy in generator:
pass
else:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == u'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node):
merge = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == u'tag:yaml.org,2002:merge':
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
node.start_mark,
"expected a mapping for merging, but found %s"
% subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
"expected a mapping or list of mappings for merging, but found %s"
% value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if merge:
node.value = merge + node.value
def construct_mapping(self, node, deep=False):
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
bool_values = {
u'yes': True,
u'no': False,
u'true': True,
u'false': False,
u'on': True,
u'off': False,
}
def construct_yaml_bool(self, node):
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '0':
return 0
elif value.startswith('0b'):
return sign*int(value[2:], 2)
elif value.startswith('0x'):
return sign*int(value[2:], 16)
elif value[0] == '0':
return sign*int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*int(value)
inf_value = 1e300
while inf_value != inf_value*inf_value:
inf_value *= inf_value
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
return sign*self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
digits = [float(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
try:
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
"failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
(?P<hour>[0-9][0-9]?)
:(?P<minute>[0-9][0-9])
:(?P<second>[0-9][0-9])
(?:\.(?P<fraction>[0-9]*))?
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
def construct_yaml_timestamp(self, node):
value = self.construct_scalar(node)
match = self.timestamp_regexp.match(node.value)
values = match.groupdict()
year = int(values['year'])
month = int(values['month'])
day = int(values['day'])
if not values['hour']:
return datetime.date(year, month, day)
hour = int(values['hour'])
minute = int(values['minute'])
second = int(values['second'])
fraction = 0
if values['fraction']:
fraction = values['fraction'][:6]
while len(fraction) < 6:
fraction += '0'
fraction = int(fraction)
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
tz_minute = int(values['tz_minute'] or 0)
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
if delta:
data -= delta
return data
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
omap = []
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
omap.append((key, value))
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node):
data = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node):
<|fim▁hole|> return value.encode('ascii')
except UnicodeEncodeError:
return value
def construct_yaml_seq(self, node):
data = []
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = {}
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node, cls):
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node):
raise ConstructorError(None, None,
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
node.start_mark)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:null',
SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:bool',
SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:int',
SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:float',
SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:binary',
SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:timestamp',
SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:omap',
SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:pairs',
SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:set',
SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:str',
SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:seq',
SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:map',
SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
def construct_python_unicode(self, node):
return self.construct_scalar(node)
def construct_python_long(self, node):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
"expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
"expected non-empty name appended to the tag", mark)
if u'.' in name:
module_name, object_name = name.rsplit('.', 1)
else:
module_name = '__builtin__'
object_name = name
try:
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_module(suffix, node.start_mark)
class classobj: pass
def make_python_instance(self, suffix, node,
args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
return instance
elif newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance, state):
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(object, key, value)
def construct_python_object(self, suffix, node):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if state:
self.set_python_instance_state(instance, state)
if listitems:
instance.extend(listitems)
if dictitems:
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/bool',
Constructor.construct_yaml_bool)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/str',
Constructor.construct_python_str)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/int',
Constructor.construct_yaml_int)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/long',
Constructor.construct_python_long)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/float',
Constructor.construct_yaml_float)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/list',
Constructor.construct_yaml_seq)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/dict',
Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)<|fim▁end|>
|
value = self.construct_scalar(node)
try:
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
import json
from flask_pluginengine import current_plugin
def get_json_from_remote_server(func, default={}, **kwargs):
"""
Safely manage calls to the remote server by encapsulating JSON creation
from Piwik data.
"""
rawjson = func(**kwargs)
try:
data = json.loads(rawjson)
if isinstance(data, dict) and data.get('result') == 'error':
current_plugin.logger.error('The Piwik server responded with an error: %s', data['message'])
return {}
return data<|fim▁hole|>
def reduce_json(data):
"""Reduce a JSON object"""
return reduce(lambda x, y: int(x) + int(y), data.values())
def stringify_seconds(seconds=0):
"""
Takes time as a value of seconds and deduces the delta in human-readable
HHh MMm SSs format.
"""
seconds = int(seconds)
minutes = seconds / 60
ti = {'h': 0, 'm': 0, 's': 0}
if seconds > 0:
ti['s'] = seconds % 60
ti['m'] = minutes % 60
ti['h'] = minutes / 60
return "%dh %dm %ds" % (ti['h'], ti['m'], ti['s'])<|fim▁end|>
|
except Exception:
current_plugin.logger.exception('Unable to load JSON from source %s', rawjson)
return default
|
<|file_name|>ComparableFilter.java<|end_file_name|><|fim▁begin|>package vizardous.delegate.dataFilter;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import vizardous.util.Converter;
/**
* Filter class that provides filter functionality for data structures with comparable content
*
* @author Johannes Seiffarth <[email protected]>
*/
public class ComparableFilter {
/**
* Filters a map. All values (not keys!) that equal kick will be removed
* @param map to filter
* @param kick Value to kick out
*/
public static <T extends Comparable<T>, K> void filter(Map<K,T> map, T kick) {
Set<Map.Entry<K, T>> entrySet = map.entrySet();
for(Iterator<Map.Entry<K,T>> it = entrySet.iterator(); it.hasNext();) {
Map.Entry<K, T> entry = it.next();
if(entry.getValue().equals(kick))
it.remove();
}
}
/**
* Filters a list. All values that equal kick will be removed
* @param list to filter
* @param kick Value to kick out
* @return a reference to list (no new list!)
*/
public static <T extends Comparable<T>> List<T> filter(List<T> list, T kick) {
for(Iterator<T> it = list.iterator(); it.hasNext();) {
T val = it.next();
if(val.equals(kick))
it.remove();
}
return list;
}
/**
* Filters a double array. All values that equal kick will be removed
* @param data array to filter
* @param kick Value to kick out
* @return a new filtered array
*/
public static double[] filter(double[] data, double kick) {
LinkedList<Double> list = new LinkedList<Double>();
for(double value : data) {
if(value != kick)
list.add(value);
}
<|fim▁hole|>
}<|fim▁end|>
|
return Converter.listToArray(list);
}
|
<|file_name|>PatternParserTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.pattern;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.MarkerManager;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.Logger;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.NullConfiguration;
import org.apache.logging.log4j.core.impl.Log4jLogEvent;
import org.apache.logging.log4j.core.util.DummyNanoClock;
import org.apache.logging.log4j.core.util.SystemNanoClock;
import org.apache.logging.log4j.message.SimpleMessage;
import org.apache.logging.log4j.util.Strings;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
*/
public class PatternParserTest {
static String OUTPUT_FILE = "output/PatternParser";
static String WITNESS_FILE = "witness/PatternParser";
LoggerContext ctx = LoggerContext.getContext();
Logger root = ctx.getRootLogger();
private static String msgPattern = "%m%n";
private final String mdcMsgPattern1 = "%m : %X%n";
private final String mdcMsgPattern2 = "%m : %X{key1}%n";
private final String mdcMsgPattern3 = "%m : %X{key2}%n";
private final String mdcMsgPattern4 = "%m : %X{key3}%n";
private final String mdcMsgPattern5 = "%m : %X{key1},%X{key2},%X{key3}%n";
private static String badPattern = "[%d{yyyyMMdd HH:mm:ss,SSS] %-5p [%c{10}] - %m%n";
private static String customPattern = "[%d{yyyyMMdd HH:mm:ss,SSS}] %-5p [%-25.25c{1}:%-4L] - %m%n";
private static String patternTruncateFromEnd = "%d; %-5p %5.-5c %m%n";
private static String patternTruncateFromBeginning = "%d; %-5p %5.5c %m%n";
private static String nestedPatternHighlight =
"%highlight{%d{dd MMM yyyy HH:mm:ss,SSS}{GMT+0} [%t] %-5level: %msg%n%throwable}";
private static final String KEY = "Converter";
private PatternParser parser;
@Before
public void setup() {
parser = new PatternParser(KEY);
}
private void validateConverter(final List<PatternFormatter> formatter, final int index, final String name) {
final PatternConverter pc = formatter.get(index).getConverter();
assertEquals("Incorrect converter " + pc.getName() + " at index " + index + " expected " + name,
pc.getName(), name);
}
/**
* Test the default pattern
*/
@Test
public void defaultPattern() {
final List<PatternFormatter> formatters = parser.parse(msgPattern);
assertNotNull(formatters);
assertTrue(formatters.size() == 2);
validateConverter(formatters, 0, "Message");
validateConverter(formatters, 1, "Line Sep");
}
/**
* Test the custom pattern
*/
@Test
public void testCustomPattern() {
final List<PatternFormatter> formatters = parser.parse(customPattern);
assertNotNull(formatters);
final Map<String, String> mdc = new HashMap<>();
mdc.put("loginId", "Fred");
final Throwable t = new Throwable();
final StackTraceElement[] elements = t.getStackTrace();
final Log4jLogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("org.apache.logging.log4j.PatternParserTest") //
.setMarker(MarkerManager.getMarker("TEST")) //
.setLoggerFqcn(Logger.class.getName()) //
.setLevel(Level.INFO) //
.setMessage(new SimpleMessage("Hello, world")) //
.setContextMap(mdc) //
.setThreadName("Thread1") //
.setSource(elements[0])
.setTimeMillis(System.currentTimeMillis()).build();
final StringBuilder buf = new StringBuilder();
for (final PatternFormatter formatter : formatters) {
formatter.format(event, buf);
}
final String str = buf.toString();
final String expected = "INFO [PatternParserTest :100 ] - Hello, world" + Strings.LINE_SEPARATOR;
assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected));
}
@Test
public void testPatternTruncateFromBeginning() {
final List<PatternFormatter> formatters = parser.parse(patternTruncateFromBeginning);
assertNotNull(formatters);
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("org.apache.logging.log4j.PatternParserTest") //
.setLoggerFqcn(Logger.class.getName()) //
.setLevel(Level.INFO) //
.setMessage(new SimpleMessage("Hello, world")) //
.setThreadName("Thread1") //
.setTimeMillis(System.currentTimeMillis()) //
.build();
final StringBuilder buf = new StringBuilder();
for (final PatternFormatter formatter : formatters) {
formatter.format(event, buf);
}
final String str = buf.toString();
final String expected = "INFO rTest Hello, world" + Strings.LINE_SEPARATOR;
assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected));
}
@Test
public void testPatternTruncateFromEnd() {
final List<PatternFormatter> formatters = parser.parse(patternTruncateFromEnd);
assertNotNull(formatters);
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("org.apache.logging.log4j.PatternParserTest") //
.setLoggerFqcn(Logger.class.getName()) //
.setLevel(Level.INFO) //
.setMessage(new SimpleMessage("Hello, world")) //
.setThreadName("Thread1") //
.setTimeMillis(System.currentTimeMillis()) //
.build();
final StringBuilder buf = new StringBuilder();
for (final PatternFormatter formatter : formatters) {
formatter.format(event, buf);
}
final String str = buf.toString();
final String expected = "INFO org.a Hello, world" + Strings.LINE_SEPARATOR;
assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected));
}
@Test
public void testBadPattern() {
final Calendar cal = Calendar.getInstance();
cal.set(2001, Calendar.FEBRUARY, 3, 4, 5, 6);
cal.set(Calendar.MILLISECOND, 789);
final long timestamp = cal.getTimeInMillis();
final List<PatternFormatter> formatters = parser.parse(badPattern);
assertNotNull(formatters);
final Throwable t = new Throwable();
final StackTraceElement[] elements = t.getStackTrace();
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("a.b.c") //
.setLoggerFqcn(Logger.class.getName()) //
.setLevel(Level.INFO) //
.setMessage(new SimpleMessage("Hello, world")) //
.setThreadName("Thread1") //
.setSource(elements[0]) //
.setTimeMillis(timestamp) //
.build();
final StringBuilder buf = new StringBuilder();
for (final PatternFormatter formatter : formatters) {
formatter.format(event, buf);
}
final String str = buf.toString();
// eats all characters until the closing '}' character
final String expected = "[2001-02-03 04:05:06,789] - Hello, world";
assertTrue("Expected to start with: " + expected + ". Actual: " + str, str.startsWith(expected));
}
@Test
public void testNestedPatternHighlight() {
testNestedPatternHighlight(Level.TRACE, "\u001B[30m");
testNestedPatternHighlight(Level.DEBUG, "\u001B[36m");
testNestedPatternHighlight(Level.INFO, "\u001B[32m");
testNestedPatternHighlight(Level.WARN, "\u001B[33m");
testNestedPatternHighlight(Level.ERROR, "\u001B[1;31m");
testNestedPatternHighlight(Level.FATAL, "\u001B[1;31m");
}
private void testNestedPatternHighlight(final Level level, final String expectedStart) {
final List<PatternFormatter> formatters = parser.parse(nestedPatternHighlight);
assertNotNull(formatters);
final Throwable t = new Throwable();
t.getStackTrace();
final LogEvent event = Log4jLogEvent.newBuilder() //
.setLoggerName("org.apache.logging.log4j.PatternParserTest") //
.setMarker(MarkerManager.getMarker("TEST")) //
.setLoggerFqcn(Logger.class.getName()) //
.setLevel(level) //
.setMessage(new SimpleMessage("Hello, world")) //
.setThreadName("Thread1") //
.setSource(/*stackTraceElement[0]*/ null) //
.setTimeMillis(System.currentTimeMillis()) //
.build();
final StringBuilder buf = new StringBuilder();
for (final PatternFormatter formatter : formatters) {
formatter.format(event, buf);
}
final String str = buf.toString();
final String expectedEnd = String.format("] %-5s: Hello, world%s\u001B[m", level, Strings.LINE_SEPARATOR);
assertTrue("Expected to start with: " + expectedStart + ". Actual: " + str, str.startsWith(expectedStart));
assertTrue("Expected to end with: \"" + expectedEnd + "\". Actual: \"" + str, str.endsWith(expectedEnd));
}
@Test
public void testNanoPatternShort() {
final List<PatternFormatter> formatters = parser.parse("%N");
assertNotNull(formatters);
assertEquals(1, formatters.size());
assertTrue(formatters.get(0).getConverter() instanceof NanoTimePatternConverter);
}
@Test
public void testNanoPatternLong() {
final List<PatternFormatter> formatters = parser.parse("%nano");
assertNotNull(formatters);
assertEquals(1, formatters.size());
assertTrue(formatters.get(0).getConverter() instanceof NanoTimePatternConverter);
}
@Test
public void testThreadNamePattern() {
testThreadNamePattern("%thread");
}
<|fim▁hole|> @Test
public void testThreadNameFullPattern() {
testThreadNamePattern("%threadName");
}
@Test
public void testThreadIdFullPattern() {
testThreadIdPattern("%threadId");
}
@Test
public void testThreadIdShortPattern1() {
testThreadIdPattern("%tid");
}
@Test
public void testThreadIdShortPattern2() {
testThreadIdPattern("%T");
}
@Test
public void testThreadPriorityShortPattern() {
testThreadPriorityPattern("%tp");
}
@Test
public void testThreadPriorityFullPattern() {
testThreadPriorityPattern("%threadPriority");
}
private void testThreadIdPattern(final String pattern) {
testFirstConverter(pattern, ThreadIdPatternConverter.class);
}
private void testThreadNamePattern(final String pattern) {
testFirstConverter(pattern, ThreadNamePatternConverter.class);
}
private void testThreadPriorityPattern(final String pattern) {
testFirstConverter(pattern, ThreadPriorityPatternConverter.class);
}
private void testFirstConverter(final String pattern, final Class<?> checkClass) {
final List<PatternFormatter> formatters = parser.parse(pattern);
assertNotNull(formatters);
final String msg = formatters.toString();
assertEquals(msg, 1, formatters.size());
assertTrue(msg, checkClass.isInstance(formatters.get(0).getConverter()));
}
@Test
public void testThreadNameShortPattern() {
testThreadNamePattern("%t");
}
@Test
public void testNanoPatternShortChangesConfigurationNanoClock() {
final Configuration config = new NullConfiguration();
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
final PatternParser pp = new PatternParser(config, KEY, null);
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
pp.parse("%m");
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
pp.parse("%nano"); // this changes the config clock
assertTrue(config.getNanoClock() instanceof SystemNanoClock);
}
@Test
public void testNanoPatternLongChangesNanoClockFactoryMode() {
final Configuration config = new NullConfiguration();
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
final PatternParser pp = new PatternParser(config, KEY, null);
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
pp.parse("%m");
assertTrue(config.getNanoClock() instanceof DummyNanoClock);
pp.parse("%N");
assertTrue(config.getNanoClock() instanceof SystemNanoClock);
}
}<|fim▁end|>
| |
<|file_name|>bam.js<|end_file_name|><|fim▁begin|>!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.Bam=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
},{}],2:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
// If obj.hasOwnProperty has been overridden, then calling
// obj.hasOwnProperty(prop) will break.
// See: https://github.com/joyent/node/issues/1707
function hasOwnProperty(obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
}
module.exports = function(qs, sep, eq, options) {
sep = sep || '&';
eq = eq || '=';
var obj = {};
if (typeof qs !== 'string' || qs.length === 0) {
return obj;
}
var regexp = /\+/g;
qs = qs.split(sep);
var maxKeys = 1000;
if (options && typeof options.maxKeys === 'number') {
maxKeys = options.maxKeys;
}
var len = qs.length;
// maxKeys <= 0 means that we should not limit keys count
if (maxKeys > 0 && len > maxKeys) {
len = maxKeys;
}
for (var i = 0; i < len; ++i) {
var x = qs[i].replace(regexp, '%20'),
idx = x.indexOf(eq),
kstr, vstr, k, v;
if (idx >= 0) {
kstr = x.substr(0, idx);
vstr = x.substr(idx + 1);
} else {
kstr = x;
vstr = '';
}
k = decodeURIComponent(kstr);
v = decodeURIComponent(vstr);
if (!hasOwnProperty(obj, k)) {
obj[k] = v;
} else if (isArray(obj[k])) {
obj[k].push(v);
} else {
obj[k] = [obj[k], v];
}
}
return obj;
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
},{}],3:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
var stringifyPrimitive = function(v) {
switch (typeof v) {
case 'string':
return v;
case 'boolean':
return v ? 'true' : 'false';
case 'number':
return isFinite(v) ? v : '';
default:
return '';
}
};
module.exports = function(obj, sep, eq, name) {
sep = sep || '&';
eq = eq || '=';
if (obj === null) {
obj = undefined;
}
if (typeof obj === 'object') {
return map(objectKeys(obj), function(k) {
var ks = encodeURIComponent(stringifyPrimitive(k)) + eq;
if (isArray(obj[k])) {
return map(obj[k], function(v) {
return ks + encodeURIComponent(stringifyPrimitive(v));
}).join(sep);
} else {
return ks + encodeURIComponent(stringifyPrimitive(obj[k]));
}
}).join(sep);
}
if (!name) return '';
return encodeURIComponent(stringifyPrimitive(name)) + eq +
encodeURIComponent(stringifyPrimitive(obj));
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
function map (xs, f) {
if (xs.map) return xs.map(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
res.push(f(xs[i], i));
}
return res;
}
var objectKeys = Object.keys || function (obj) {
var res = [];
for (var key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key);
}
return res;
};
},{}],4:[function(require,module,exports){
'use strict';
exports.decode = exports.parse = require('./decode');
exports.encode = exports.stringify = require('./encode');
},{"./decode":2,"./encode":3}],5:[function(require,module,exports){
module.exports = require('backbone');
},{"backbone":1}],6:[function(require,module,exports){
var Backbone, Collection,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
Collection = (function(_super) {
__extends(Collection, _super);
function Collection() {
return Collection.__super__.constructor.apply(this, arguments);
}
/*
Returns the model at the index immediately before the passed in model
instance. If the model instance is the first model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.before = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === 0) {
return null;
}
return this.at(index - 1);
};
/*
Returns the model at the index immediately after the passed in model
instance. If the model instance is the last model in the collection, or
the model instance does not exist in the collection, this will return
null.
*/
Collection.prototype.after = function(model) {
var index;
index = this.indexOf(model);
if (index === -1 || index === this.length - 1) {
return null;
}
return this.at(index + 1);
};
/*
Convenience function for getting an array of all the models in a
collection
*/
Collection.prototype.all = function() {
return this.models.slice();
};
return Collection;
})(Backbone.Collection);
module.exports = Collection;
},{"backbone":1}],7:[function(require,module,exports){
var Bam;
module.exports = Bam = {
Backbone: require('./backbone'),
Router: require('./router'),
View: require('./view'),
Model: require('./model'),
Collection: require('./collection')
};
},{"./backbone":5,"./collection":6,"./model":8,"./router":9,"./view":10}],8:[function(require,module,exports){
var Backbone, DEFAULT_CASTS, Model, any, map, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
Backbone = require('backbone');
_ref = require('underscore'), map = _ref.map, any = _ref.any;
DEFAULT_CASTS = {
string: function(v) {
return v + '';
},
int: function(v) {
return Math.floor(+v);
},
number: function(v) {
return +v;
},
date: function(v) {
return new Date(v);
},
boolean: function(v) {
return !!v;
}
};
Model = (function(_super) {
__extends(Model, _super);
function Model() {
return Model.__super__.constructor.apply(this, arguments);
}
/*
Allows derived get values. The format is:
derived:
foo:
deps: ['bar', 'baz']
value: (bar, baz) -> bar + ' ' + baz
Your deps define which properties will be passed to the value function and
in what order. They're also used to trigger change events for derived values
i.e., if a dep changes the derived value will trigger a change too.
*/
Model.prototype.derived = {};
/*
Allows casting specific keys. The format is:
cast:
timestamp: (v) -> moment(v)
bar: 'string'
baz: 'int'
You can either provide your own function or use a provided basic cast. These
include:
* `'string'`: `(v) -> v + ''`
* `'int'`: `(v) -> Math.floor(+v)`
* `'number'`: `(v) -> +v`
* `'date'`: `(v) -> new Date(v)`
* `'boolean'`: (v) -> !!v
Doesn't cast derived or null values.
*/
Model.prototype.cast = {};
/*
Returns the model after this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.next = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.after(this) : void 0;
};
/*
Returns the model before this model in its collection. If it's not in a
collection this will return null.
*/
Model.prototype.prev = function() {
var _ref1;
return (_ref1 = this.collection) != null ? _ref1.before(this) : void 0;
};
/*
Returns a clone of the attributes object.
*/
Model.prototype.getAttributes = function() {
return Backbone.$.extend(true, {}, this.attributes);
};
/*
Override get to allow default value and derived values.
*/
Model.prototype.get = function(key, defaultValue) {
var ret;
if (this.derived[key]) {
ret = this._derive(derived[key]);
} else {
ret = Model.__super__.get.call(this, key);
}
if (ret === void 0) {
return defaultValue;
} else {
return ret;
}
};
/*
Derive a value from a definition
*/
Model.prototype._derive = function(definition) {
var args;
args = map(definition.deps, (function(_this) {
return function(key) {
return _this.get('key');
};
})(this));
return definition.value.apply(definition, args);
};
/*
Override the set method to allow for casting as data comes in.
*/
Model.prototype.set = function(key, val, options) {
var attrs, changed, definition, derived, ret, _ref1;
if (typeof key === 'object') {
attrs = key;
options = val;
} else {
attrs = {};
attrs[key] = val;
}
for (key in attrs) {
val = attrs[key];
if (val === null) {
continue;
}
if (this.cast[key]) {
attrs[key] = this._cast(val, this.cast[key]);
}
}
ret = Model.__super__.set.call(this, attrs, options);
_ref1 = this.derived;
for (derived in _ref1) {
definition = _ref1[derived];
changed = map(definition.deps, function(key) {
return attrs.hasOwnProperty(key);
});
if (any(changed)) {
this.trigger("change:" + derived, this._derive(definition));
}
}
return ret;
};
/*
Take a value, and a casting definition and perform the cast
*/
Model.prototype._cast = function(value, cast) {
var error;
try {
return value = this._getCastFunc(cast)(value);
} catch (_error) {
error = _error;
return value = null;
} finally {
return value;
}
};
/*
Given a casting definition, return a function that should perform the cast
*/
Model.prototype._getCastFunc = function(cast) {
var _ref1;
if (typeof cast === 'function') {
return cast;
}
return (_ref1 = DEFAULT_CASTS[cast]) != null ? _ref1 : function(v) {
return v;
};
};
return Model;
})(Backbone.Model);
module.exports = Model;
},{"backbone":1,"underscore":1}],9:[function(require,module,exports){
var Backbone, Router, difference, extend, getIndexes, getNames, isFunction, isRegExp, keys, map, object, pluck, process, querystring, sortBy, splice, zip, _,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
__slice = [].slice;
Backbone = require('backbone');
querystring = require('querystring');
_ = require('underscore');
extend = _.extend, object = _.object, isRegExp = _.isRegExp, isFunction = _.isFunction, zip = _.zip, pluck = _.pluck, sortBy = _.sortBy, keys = _.keys;
difference = _.difference, map = _.map;
getNames = function(string) {
var ret;
ret = [];
ret.push.apply(ret, process(string, /(\(\?)?:\w+/g));
ret.push.apply(ret, process(string, /\*\w+/g));
return ret;
};
process = function(string, regex) {
var indexes, matches, _ref;
matches = (_ref = string.match(regex)) != null ? _ref : [];
indexes = getIndexes(string, regex);
return zip(matches, indexes);
};
getIndexes = function(string, regex) {
var ret;
ret = [];
while (regex.test(string)) {
ret.push(regex.lastIndex);
}
return ret;
};
splice = function(source, from, to, replacement) {
if (replacement == null) {
replacement = '';
}
return source.slice(0, from) + replacement + source.slice(to);
};
Router = (function(_super) {
__extends(Router, _super);
/*
Override so our _routes object is unique to each router. I hate this side of
js.
*/
function Router() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
this._routes = {};
Router.__super__.constructor.apply(this, args);
}
/*
Override route to perform some subtle tweaks! Namely, storing raw string
routes for reverse routing and passing the name to the buildRequest function
*/
Router.prototype.route = function(route, name, callback) {
if (!isRegExp(route)) {
this._routes[name] = route;
route = this._routeToRegExp(route);
}
if (isFunction(name)) {
callback = name;
name = '';
}
if (!callback) {
callback = this[name];
}
return Backbone.history.route(route, (function(_this) {
return function(fragment) {
var req;
req = _this._buildRequest(route, fragment, name);
_this.execute(callback, req);
_this.trigger.apply(_this, ['route:' + name, req]);
_this.trigger('route', name, req);
return Backbone.history.trigger('route', _this, name, req);
};
})(this));
};
/*
Store names of parameters in a propery of route
*/
Router.prototype._routeToRegExp = function(route) {
var names, ret;
ret = Router.__super__._routeToRegExp.call(this, route);
names = getNames(route);
ret.names = map(pluck(sortBy(names, '1'), '0'), function(s) {
return s.slice(1);
});
return ret;
};
/*
Create a request object. It should have the route name, named params as
keys with their values and a query object which is the query params, an<|fim▁hole|>
Router.prototype._buildRequest = function(route, fragment, name) {
var names, query, req, values, _ref, _ref1;
values = this._extractParameters(route, fragment);
query = fragment.split('?').slice(1).join('?');
if (values[values.length - 1] === query) {
values = values.slice(0, -1);
}
names = (_ref = route.names) != null ? _ref : map(values, function(v, i) {
return i;
});
req = {
route: (_ref1 = this._routes[name]) != null ? _ref1 : route,
fragment: fragment,
name: name,
values: values,
params: object(names, values),
query: querystring.parse(query)
};
return req;
};
/*
No-op to stop the routes propery being used
*/
Router.prototype._bindRoutes = function() {};
/*
Rather than the default backbone behaviour of applying the args to the
callback, call the callback with the request object.
*/
Router.prototype.execute = function(callback, req) {
if (callback) {
return callback.call(this, req);
}
};
/*
Reverse a named route with a barebones request object.
*/
Router.prototype.reverse = function(name, req) {
var diff, lastIndex, nameds, names, optional, optionals, params, query, ret, route, segment, value, _i, _j, _len, _len1, _ref, _ref1, _ref2, _ref3, _ref4;
route = this._routes[name];
if (!route) {
return null;
}
ret = route;
params = (_ref = req.params) != null ? _ref : {};
query = (_ref1 = req.query) != null ? _ref1 : {};
names = keys(params);
optionals = process(route, /\((.*?)\)/g).reverse();
for (_i = 0, _len = optionals.length; _i < _len; _i++) {
_ref2 = optionals[_i], optional = _ref2[0], lastIndex = _ref2[1];
nameds = map(pluck(getNames(optional), '0'), function(s) {
return s.slice(1);
});
diff = difference(nameds, names).length;
if (nameds.length === 0 || diff !== 0) {
route = splice(route, lastIndex - optional.length, lastIndex);
} else {
route = splice(route, lastIndex - optional.length, lastIndex, optional.slice(1, -1));
}
}
nameds = getNames(route).reverse();
for (_j = 0, _len1 = nameds.length; _j < _len1; _j++) {
_ref3 = nameds[_j], segment = _ref3[0], lastIndex = _ref3[1];
value = (_ref4 = params[segment.slice(1)]) != null ? _ref4 : null;
if (value !== null) {
route = splice(route, lastIndex - segment.length, lastIndex, params[segment.slice(1)]);
}
}
query = querystring.stringify(query);
if (query) {
route += '?' + query;
}
return route;
};
return Router;
})(Backbone.Router);
module.exports = Router;
},{"backbone":1,"querystring":4,"underscore":1}],10:[function(require,module,exports){
var Backbone, View, difference, without, _ref,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; },
__slice = [].slice;
Backbone = require('backbone');
_ref = require('underscore'), without = _ref.without, difference = _ref.difference;
View = (function(_super) {
__extends(View, _super);
View.prototype.parent = null;
View.prototype.children = null;
View.prototype.namespace = '';
/*
Ensure the classname is applied, then set the parent and children if any
are passed in. Does the normal backbone constructor and then does the
first state change.
*/
function View(options) {
var _ref1;
this.children = [];
if (options.className) {
this.className = options.className;
}
if (options.namespace) {
this.namespace = options.namespace;
}
if (options.el) {
this._ensureClass(options.el);
}
if (options.parent) {
this.setParent(options.parent);
}
if ((_ref1 = options.children) != null ? _ref1.length : void 0) {
this.addChildren(options.children);
}
View.__super__.constructor.call(this, options);
}
/*
Used to ensure that the className property of the view is applied to an
el passed in as an option.
*/
View.prototype._ensureClass = function(el, className) {
if (className == null) {
className = this.className;
}
return Backbone.$(el).addClass(className);
};
/*
Adds a list of views as children of this view.
*/
View.prototype.addChildren = function(views) {
var view, _i, _len, _results;
_results = [];
for (_i = 0, _len = views.length; _i < _len; _i++) {
view = views[_i];
_results.push(this.addChild(view));
}
return _results;
};
/*
Adds a view as a child of this view.
*/
View.prototype.addChild = function(view) {
if (view.parent) {
view.unsetParent();
}
this.children.push(view);
return view.parent = this;
};
/*
Sets the parent view.
*/
View.prototype.setParent = function(parent) {
if (this.parent) {
this.unsetParent();
}
this.parent = parent;
return this.parent.children.push(this);
};
/*
Unsets the parent view.
*/
View.prototype.unsetParent = function() {
if (!this.parent) {
return;
}
return this.parent.removeChild(this);
};
/*
Parent and Child accessors.
*/
View.prototype.hasParent = function() {
return !!this.parent;
};
View.prototype.getParent = function() {
return this.parent;
};
View.prototype.hasChildren = function() {
return this.children.length;
};
View.prototype.getChildren = function() {
return this.children;
};
View.prototype.hasChild = function(view) {
return __indexOf.call(this.children, view) >= 0;
};
View.prototype.hasDescendant = function(view) {
var child, _i, _len, _ref1;
if (__indexOf.call(this.children, view) >= 0) {
return true;
}
_ref1 = this.children;
for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
child = _ref1[_i];
if (child.hasDescendant(view)) {
return true;
}
}
return false;
};
View.prototype.removeChild = function(child) {
this.children = without(this.children, child);
return child.parent = null;
};
View.prototype.removeChildren = function(children) {
var child, _i, _len, _ref1, _results;
_ref1 = this.children;
_results = [];
for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
child = _ref1[_i];
_results.push(this.removeChild(child));
}
return _results;
};
/*
Gets the root view for a particular view. Can be itself.
*/
View.prototype.root = function() {
var root;
root = this;
while (root.hasParent()) {
root = root.getParent();
}
return root;
};
/*
Calls remove on all child views before removing itself
*/
View.prototype.remove = function() {
this.children.forEach(function(child) {
return child.remove();
});
this.children = [];
this.parent = null;
this.off();
this.undelegateEvents();
return View.__super__.remove.call(this);
};
/*
Calls trigger on the root() object with the namespace added, and also on
itself without the namespace.
*/
View.prototype.trigger = function() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
Backbone.View.prototype.trigger.apply(this, args);
if (this.namespace) {
args[0] = this.namespace + '.' + args[0];
}
if (this.parent) {
return this.parent.trigger.apply(this.parent, args);
}
};
return View;
})(Backbone.View);
module.exports = View;
},{"backbone":1,"underscore":1}]},{},[7])(7)
});<|fim▁end|>
|
empty object if no query params available.
*/
|
<|file_name|>CoreXT.Globals.ts<|end_file_name|><|fim▁begin|>// ###########################################################################################################################
// These are functions for creating global scope variables/references that eliminate/minimize collisions between conflicting scripts.
// Normally, each manifest and module gets its own local-global scope; however, in some cases, 3rd-party libraries do not
// expect or support dot-delimited object paths, so unfortunately a root global callback reference may be required in such cases.
// CoreXT.Globals contains functions to help deal with this as it relates to loading modules.
// Note: There's no need to use any of these functions directly from within manifest and module scripts. Each has a local reference
// using the identifiers 'this', 'manifest', or 'module' (accordingly), which provides functions for local-global scope storage.
// ###########################################################################################################################
/**
* An empty object whose sole purpose is to store global properties by resource namespace (usual a URL). It exists as an
* alternative to using the global JavaScript host environment, but also supports it as well. The get/set methods always use
* named index based lookups, so no string concatenation is used, which makes the process many times faster.
* Note: It's never a good idea to put anything in the global HOST scope, as various frameworks/scripts might set conflicting
* property names. To be safe, make sure to always use the 'CoreXT.Globals.register()' function. It can create isolated
* global variables, and if necessary, also create a safer unique host global scope name.
*/
namespace CoreXT.Globals { //http://jsperf.com/string-concat-vs-nested-object-lookups
namespace("CoreXT", "Globals");
/** Internal: used when initializing CoreXT. */
var _globals: IndexedObject = CoreXT.Globals;
var _namespaces: { [index: string]: string } = {};
var _nsCount: number = 1;
/**
* Registers and initializes a global property for the specified resource, and returns the dot-delimited string reference (see CoreXT.Globals).
* Subsequent calls with the same resource and identifier name ignores the 'initialValue' and 'asHostGlobal' arguments, and simply returns the
* existing property path instead.
* @param {System.IO.ResourceRequest} resource The resource type object associated with the globals to create.
* @param {T} initialValue The initial value to set.
* @param {boolean} asHostGlobal If true, a host global scope unique variable name is returned. If false (default), a dot-delimited one is returned<|fim▁hole|> */
export function register<T>(resource: System.IO.IResourceRequest, name: string, initialValue: T, asHostGlobal?: boolean): string;
/**
* Registers and initializes a global property for the specified namespace, and returns the dot-delimited string reference (see CoreXT.Globals).
* Subsequent calls with the same namespace and identifier name ignores the 'initialValue' and 'asHostGlobal' arguments, and simply returns the
* existing property path instead.
* @param {string} namespace Any string that is unique to your application/framework/resource/etc. (usually a URI of some sort), and is used to group globals
* under a single object scope to prevent naming conflicts. When resources are used, the URL is used as the namespace.
* A windows-style GUID, MD5 hash, or SHA1+ hash is perfectly fine as well (to use as a safe unique namespace for this purpose).
* @param {T} initialValue The initial value to set.
* @param {boolean} asHostGlobal If true, a host global scope unique variable name is returned. If false (default), a dot-delimited one is returned
* instead which references the global variable within the CoreXT namespace related global scope (so as not to
* pollute the host's global scope).
* Some frameworks, such as the Google Maps API, support callbacks with dot-delimited names for nested objects to help
* prevent global scope pollution.
*/
export function register<T>(namespace: string, name: string, initialValue: T, asHostGlobal?: boolean): string;
export function register<T>(namespace: any, name: string, initialValue: T, asHostGlobal: boolean = false): string {
var nsID: string, nsglobals: { [index: string]: any }, alreadyRegistered: boolean = false;
if (typeof namespace == 'object' && namespace.url)
namespace = namespace.url;
if (!(namespace in _namespaces))
_namespaces[namespace] = nsID = '_' + _nsCount++;
else {
nsID = _namespaces[namespace];
alreadyRegistered = true;
}
nsglobals = _globals[nsID];
if (!nsglobals)
_globals[nsID] = nsglobals = {};
//?if (name in nsglobals)
//? throw System.Exception.from("The global variable name '" + name + "' already exists in the global namespace '" + namespace + "'.", namespace);
if (asHostGlobal) {
// ... set and return a host global reference ...
var hostGlobalName = "_" + CoreXT.ROOT_NAMESPACE + nsID + "_" + name;
if (!alreadyRegistered) {
nsglobals[name] = { "global": global, "hostGlobalName": hostGlobalName };// (any namespace global value referencing the global [window] scope is a redirect to lookup the value name there instead)
global[hostGlobalName] = initialValue;
}
return hostGlobalName;
} else {
// ... set and return a namespace global reference (only works for routines that support dot-delimited callback references) ...
if (!alreadyRegistered) nsglobals[name] = initialValue;
if (/^[A-Z_\$]+[A-Z0-9_\$]*$/gim.test(name)) // (if 'name' contains invalid identifier characters, then it needs to be referenced by index)
return CoreXT.ROOT_NAMESPACE + ".Globals." + nsID + "." + name;
else
return CoreXT.ROOT_NAMESPACE + ".Globals." + nsID + "['" + name.replace(/'/g, "\\'") + "']";
}
};
/**
* Returns true if the specified global variable name is registered.
*/
export function exists<T>(resource: System.IO.IResourceRequest, name: string): boolean;
/**
* Returns true if the specified global variable name is registered.
*/
export function exists<T>(namespace: string, name: string): boolean;
export function exists<T>(namespace: any, name: string): boolean {
var namespace = namespace.url || ('' + namespace), nsID: string, nsglobals: { [index: string]: any };
nsID = _namespaces[namespace];
if (!nsID) return false;
nsglobals = _globals[nsID];
return name in nsglobals;
};
/**
* Erases the registered global variable (by setting it to 'undefined' - which is faster than deleting it).
* Returns true if successful.
*/
export function erase<T>(resource: System.IO.IResourceRequest, name: string): boolean;
export function erase<T>(namespace: string, name: string): boolean;
export function erase<T>(namespace: any, name: string): boolean {
var namespace = namespace.url || ('' + namespace), nsID: string, nsglobals: { [index: string]: any };
nsID = _namespaces[namespace];
if (!nsID) return false;
nsglobals = _globals[nsID];
if (!(name in nsglobals))
return false;
var existingValue = nsglobals[name];
if (existingValue && existingValue["global"] == global) {
var hgname = existingValue["hostGlobalName"];
delete global[hgname];
}
return nsglobals[name] === void 0;
};
/**
* Clears all registered globals by releasing the associated global object for the specified resource's namespace
* and creating a new object. Any host globals are deleted first.
* Return true on success, and false if the namespace doesn't exist.
*/
export function clear<T>(resource: System.IO.IResourceRequest): boolean;
/**
* Clears all registered globals by releasing the associated global object for the specified resource's namespace
* and creating a new object. Any host globals are deleted first.
* Return true on success, and false if the namespace doesn't exist.
*/
export function clear<T>(namespace: string): boolean;
export function clear<T>(namespace: any): boolean {
var namespace = namespace.url || ('' + namespace), nsID: string, nsglobals: { [index: string]: any };
nsID = _namespaces[namespace];
if (!nsID) return false;
nsglobals = _globals[nsID];
for (var name in nsglobals) { // (clear any root globals first before resetting the namespace global instance)
var existingValue = nsglobals[name];
if (existingValue && existingValue["global"] == global)
delete global[existingValue["hostGlobalName"]];
}
_globals[nsID] = {};
return true;
};
/**
* Sets and returns a global property value.
*/
export function setValue<T>(resource: System.IO.IResourceRequest, name: string, value: T): T;
/**
* Sets and returns a global property value.
*/
export function setValue<T>(namespace: string, name: string, value: T): T;
export function setValue<T>(namespace: any, name: string, value: T): T {
var namespace = namespace.url || ('' + namespace), nsID: string, nsglobals: { [index: string]: any };
nsID = _namespaces[namespace];
if (!nsID) {
//?throw System.Exception.from("The namespace '" + namespace + "' does not exist - did you remember to call 'CoreXT.Globals.register()' first?", namespace);
register(namespace, name, value); // (implicitly register the namespace as a local global)
nsID = _namespaces[namespace];
}
nsglobals = _globals[nsID];
//?if (!(name in nsglobals))
//? throw System.Exception.from("The global variable name '" + name + "' was not found in the global namespace '" + namespace + "' - did you remember to call 'CoreXT.Globals.register()' first?", namespace);
var existingValue = nsglobals[name];
if (existingValue && existingValue["global"] == global) {
return global[existingValue["hostGlobalName"]] = value;
}
else return nsglobals[name] = value;
};
/**
* Gets a global property value.
*/
export function getValue<T>(resource: System.IO.IResourceRequest, name: string): T;
/**
* Gets a global property value.
*/
export function getValue<T>(namespace: string, name: string): T;
export function getValue<T>(namespace: any, name: string): T {
var namespace = namespace.url || ('' + namespace), nsID: string, nsglobals: { [index: string]: any };
nsID = _namespaces[namespace];
if (!nsID)
throw System.Exception.from("The namespace '" + namespace + "' does not exist - did you remember to call 'CoreXT.Globals.register()' first?", namespace);
nsglobals = _globals[nsID];
if (!(name in nsglobals))
return void 0;
var existingValue = nsglobals[name];
if (existingValue && existingValue["global"] == global) {
return global[existingValue["hostGlobalName"]];
}
else return nsglobals[name];
};
}
// ###########################################################################################################################<|fim▁end|>
|
* instead which references the global variable within the CoreXT namespace related global scope (so as not to
* pollute the host's global scope).
* Some frameworks, such as the Google Maps API, support callbacks with dot-delimited names for nested objects to help
* prevent global scope pollution.
|
<|file_name|>lookingGlass.js<|end_file_name|><|fim▁begin|>// -*- mode: js; js-indent-level: 4; indent-tabs-mode: nil -*-
const Clutter = imports.gi.Clutter;
const Cogl = imports.gi.Cogl;
const GLib = imports.gi.GLib;
const Gio = imports.gi.Gio;
const Gtk = imports.gi.Gtk;
const Meta = imports.gi.Meta;
const Pango = imports.gi.Pango;
const St = imports.gi.St;
const Shell = imports.gi.Shell;
const Signals = imports.signals;
const Lang = imports.lang;
const Mainloop = imports.mainloop;
const System = imports.system;
const History = imports.misc.history;
const ExtensionSystem = imports.ui.extensionSystem;
const ExtensionUtils = imports.misc.extensionUtils;
const ShellEntry = imports.ui.shellEntry;
const Tweener = imports.ui.tweener;
const Main = imports.ui.main;
const JsParse = imports.misc.jsParse;
const CHEVRON = '>>> ';
/* Imports...feel free to add here as needed */
var commandHeader = 'const Clutter = imports.gi.Clutter; ' +
'const GLib = imports.gi.GLib; ' +
'const GObject = imports.gi.GObject; ' +
'const Gio = imports.gi.Gio; ' +
'const Gtk = imports.gi.Gtk; ' +
'const Mainloop = imports.mainloop; ' +
'const Meta = imports.gi.Meta; ' +
'const Shell = imports.gi.Shell; ' +
'const Tp = imports.gi.TelepathyGLib; ' +
'const Main = imports.ui.main; ' +
'const Lang = imports.lang; ' +
'const Tweener = imports.ui.tweener; ' +
/* Utility functions...we should probably be able to use these
* in the shell core code too. */
'const stage = global.stage; ' +
/* Special lookingGlass functions */
'const inspect = Lang.bind(Main.lookingGlass, Main.lookingGlass.inspect); ' +
'const it = Main.lookingGlass.getIt(); ' +
'const r = Lang.bind(Main.lookingGlass, Main.lookingGlass.getResult); ';
const HISTORY_KEY = 'looking-glass-history';
// Time between tabs for them to count as a double-tab event
const AUTO_COMPLETE_DOUBLE_TAB_DELAY = 500;
const AUTO_COMPLETE_SHOW_COMPLETION_ANIMATION_DURATION = 0.2;
const AUTO_COMPLETE_GLOBAL_KEYWORDS = _getAutoCompleteGlobalKeywords();
function _getAutoCompleteGlobalKeywords() {
const keywords = ['true', 'false', 'null', 'new'];
// Don't add the private properties of window (i.e., ones starting with '_')
const windowProperties = Object.getOwnPropertyNames(window).filter(function(a){ return a.charAt(0) != '_' });
const headerProperties = JsParse.getDeclaredConstants(commandHeader);
return keywords.concat(windowProperties).concat(headerProperties);
}
<|fim▁hole|>
_init: function(entry) {
this._entry = entry;
this._entry.connect('key-press-event', Lang.bind(this, this._entryKeyPressEvent));
this._lastTabTime = global.get_current_time();
},
_processCompletionRequest: function(event) {
if (event.completions.length == 0) {
return;
}
// Unique match = go ahead and complete; multiple matches + single tab = complete the common starting string;
// multiple matches + double tab = emit a suggest event with all possible options
if (event.completions.length == 1) {
this.additionalCompletionText(event.completions[0], event.attrHead);
this.emit('completion', { completion: event.completions[0], type: 'whole-word' });
} else if (event.completions.length > 1 && event.tabType === 'single') {
let commonPrefix = JsParse.getCommonPrefix(event.completions);
if (commonPrefix.length > 0) {
this.additionalCompletionText(commonPrefix, event.attrHead);
this.emit('completion', { completion: commonPrefix, type: 'prefix' });
this.emit('suggest', { completions: event.completions});
}
} else if (event.completions.length > 1 && event.tabType === 'double') {
this.emit('suggest', { completions: event.completions});
}
},
_entryKeyPressEvent: function(actor, event) {
let cursorPos = this._entry.clutter_text.get_cursor_position();
let text = this._entry.get_text();
if (cursorPos != -1) {
text = text.slice(0, cursorPos);
}
if (event.get_key_symbol() == Clutter.Tab) {
let [completions, attrHead] = JsParse.getCompletions(text, commandHeader, AUTO_COMPLETE_GLOBAL_KEYWORDS);
let currTime = global.get_current_time();
if ((currTime - this._lastTabTime) < AUTO_COMPLETE_DOUBLE_TAB_DELAY) {
this._processCompletionRequest({ tabType: 'double',
completions: completions,
attrHead: attrHead });
} else {
this._processCompletionRequest({ tabType: 'single',
completions: completions,
attrHead: attrHead });
}
this._lastTabTime = currTime;
}
return Clutter.EVENT_PROPAGATE;
},
// Insert characters of text not already included in head at cursor position. i.e., if text="abc" and head="a",
// the string "bc" will be appended to this._entry
additionalCompletionText: function(text, head) {
let additionalCompletionText = text.slice(head.length);
let cursorPos = this._entry.clutter_text.get_cursor_position();
this._entry.clutter_text.insert_text(additionalCompletionText, cursorPos);
}
});
Signals.addSignalMethods(AutoComplete.prototype);
const Notebook = new Lang.Class({
Name: 'Notebook',
_init: function() {
this.actor = new St.BoxLayout({ vertical: true });
this.tabControls = new St.BoxLayout({ style_class: 'labels' });
this._selectedIndex = -1;
this._tabs = [];
},
appendPage: function(name, child) {
let labelBox = new St.BoxLayout({ style_class: 'notebook-tab',
reactive: true,
track_hover: true });
let label = new St.Button({ label: name });
label.connect('clicked', Lang.bind(this, function () {
this.selectChild(child);
return true;
}));
labelBox.add(label, { expand: true });
this.tabControls.add(labelBox);
let scrollview = new St.ScrollView({ x_fill: true, y_fill: true });
scrollview.get_hscroll_bar().hide();
scrollview.add_actor(child);
let tabData = { child: child,
labelBox: labelBox,
label: label,
scrollView: scrollview,
_scrollToBottom: false };
this._tabs.push(tabData);
scrollview.hide();
this.actor.add(scrollview, { expand: true });
let vAdjust = scrollview.vscroll.adjustment;
vAdjust.connect('changed', Lang.bind(this, function () { this._onAdjustScopeChanged(tabData); }));
vAdjust.connect('notify::value', Lang.bind(this, function() { this._onAdjustValueChanged(tabData); }));
if (this._selectedIndex == -1)
this.selectIndex(0);
},
_unselect: function() {
if (this._selectedIndex < 0)
return;
let tabData = this._tabs[this._selectedIndex];
tabData.labelBox.remove_style_pseudo_class('selected');
tabData.scrollView.hide();
this._selectedIndex = -1;
},
selectIndex: function(index) {
if (index == this._selectedIndex)
return;
if (index < 0) {
this._unselect();
this.emit('selection', null);
return;
}
// Focus the new tab before unmapping the old one
let tabData = this._tabs[index];
if (!tabData.scrollView.navigate_focus(null, Gtk.DirectionType.TAB_FORWARD, false))
this.actor.grab_key_focus();
this._unselect();
tabData.labelBox.add_style_pseudo_class('selected');
tabData.scrollView.show();
this._selectedIndex = index;
this.emit('selection', tabData.child);
},
selectChild: function(child) {
if (child == null)
this.selectIndex(-1);
else {
for (let i = 0; i < this._tabs.length; i++) {
let tabData = this._tabs[i];
if (tabData.child == child) {
this.selectIndex(i);
return;
}
}
}
},
scrollToBottom: function(index) {
let tabData = this._tabs[index];
tabData._scrollToBottom = true;
},
_onAdjustValueChanged: function (tabData) {
let vAdjust = tabData.scrollView.vscroll.adjustment;
if (vAdjust.value < (vAdjust.upper - vAdjust.lower - 0.5))
tabData._scrolltoBottom = false;
},
_onAdjustScopeChanged: function (tabData) {
if (!tabData._scrollToBottom)
return;
let vAdjust = tabData.scrollView.vscroll.adjustment;
vAdjust.value = vAdjust.upper - vAdjust.page_size;
},
nextTab: function() {
let nextIndex = this._selectedIndex;
if (nextIndex < this._tabs.length - 1) {
++nextIndex;
}
this.selectIndex(nextIndex);
},
prevTab: function() {
let prevIndex = this._selectedIndex;
if (prevIndex > 0) {
--prevIndex;
}
this.selectIndex(prevIndex);
}
});
Signals.addSignalMethods(Notebook.prototype);
function objectToString(o) {
if (typeof(o) == typeof(objectToString)) {
// special case this since the default is way, way too verbose
return '<js function>';
} else {
return '' + o;
}
}
const ObjLink = new Lang.Class({
Name: 'ObjLink',
_init: function(lookingGlass, o, title) {
let text;
if (title)
text = title;
else
text = objectToString(o);
text = GLib.markup_escape_text(text, -1);
this._obj = o;
this.actor = new St.Button({ reactive: true,
track_hover: true,
style_class: 'shell-link',
label: text });
this.actor.get_child().single_line_mode = true;
this.actor.connect('clicked', Lang.bind(this, this._onClicked));
this._lookingGlass = lookingGlass;
},
_onClicked: function (link) {
this._lookingGlass.inspectObject(this._obj, this.actor);
}
});
const Result = new Lang.Class({
Name: 'Result',
_init: function(lookingGlass, command, o, index) {
this.index = index;
this.o = o;
this.actor = new St.BoxLayout({ vertical: true });
this._lookingGlass = lookingGlass;
let cmdTxt = new St.Label({ text: command });
cmdTxt.clutter_text.ellipsize = Pango.EllipsizeMode.END;
this.actor.add(cmdTxt);
let box = new St.BoxLayout({});
this.actor.add(box);
let resultTxt = new St.Label({ text: 'r(' + index + ') = ' });
resultTxt.clutter_text.ellipsize = Pango.EllipsizeMode.END;
box.add(resultTxt);
let objLink = new ObjLink(this._lookingGlass, o);
box.add(objLink.actor);
}
});
const WindowList = new Lang.Class({
Name: 'WindowList',
_init: function(lookingGlass) {
this.actor = new St.BoxLayout({ name: 'Windows', vertical: true, style: 'spacing: 8px' });
let tracker = Shell.WindowTracker.get_default();
this._updateId = Main.initializeDeferredWork(this.actor, Lang.bind(this, this._updateWindowList));
global.display.connect('window-created', Lang.bind(this, this._updateWindowList));
tracker.connect('tracked-windows-changed', Lang.bind(this, this._updateWindowList));
this._lookingGlass = lookingGlass;
},
_updateWindowList: function() {
this.actor.destroy_all_children();
let windows = global.get_window_actors();
let tracker = Shell.WindowTracker.get_default();
for (let i = 0; i < windows.length; i++) {
let metaWindow = windows[i].metaWindow;
// Avoid multiple connections
if (!metaWindow._lookingGlassManaged) {
metaWindow.connect('unmanaged', Lang.bind(this, this._updateWindowList));
metaWindow._lookingGlassManaged = true;
}
let box = new St.BoxLayout({ vertical: true });
this.actor.add(box);
let windowLink = new ObjLink(this._lookingGlass, metaWindow, metaWindow.title);
box.add(windowLink.actor, { x_align: St.Align.START, x_fill: false });
let propsBox = new St.BoxLayout({ vertical: true, style: 'padding-left: 6px;' });
box.add(propsBox);
propsBox.add(new St.Label({ text: 'wmclass: ' + metaWindow.get_wm_class() }));
let app = tracker.get_window_app(metaWindow);
if (app != null && !app.is_window_backed()) {
let icon = app.create_icon_texture(22);
let propBox = new St.BoxLayout({ style: 'spacing: 6px; ' });
propsBox.add(propBox);
propBox.add(new St.Label({ text: 'app: ' }), { y_fill: false });
let appLink = new ObjLink(this._lookingGlass, app, app.get_id());
propBox.add(appLink.actor, { y_fill: false });
propBox.add(icon, { y_fill: false });
} else {
propsBox.add(new St.Label({ text: '<untracked>' }));
}
}
}
});
Signals.addSignalMethods(WindowList.prototype);
const ObjInspector = new Lang.Class({
Name: 'ObjInspector',
_init: function(lookingGlass) {
this._obj = null;
this._previousObj = null;
this._parentList = [];
this.actor = new St.ScrollView({ pivot_point: new Clutter.Point({ x: 0.5, y: 0.5 }),
x_fill: true, y_fill: true });
this.actor.get_hscroll_bar().hide();
this._container = new St.BoxLayout({ name: 'LookingGlassPropertyInspector',
style_class: 'lg-dialog',
vertical: true });
this.actor.add_actor(this._container);
this._lookingGlass = lookingGlass;
},
selectObject: function(obj, skipPrevious) {
if (!skipPrevious)
this._previousObj = this._obj;
else
this._previousObj = null;
this._obj = obj;
this._container.destroy_all_children();
let hbox = new St.BoxLayout({ style_class: 'lg-obj-inspector-title' });
this._container.add_actor(hbox);
let label = new St.Label({ text: 'Inspecting: %s: %s'.format(typeof(obj),
objectToString(obj)) });
label.single_line_mode = true;
hbox.add(label, { expand: true, y_fill: false });
let button = new St.Button({ label: 'Insert', style_class: 'lg-obj-inspector-button' });
button.connect('clicked', Lang.bind(this, this._onInsert));
hbox.add(button);
if (this._previousObj != null) {
button = new St.Button({ label: 'Back', style_class: 'lg-obj-inspector-button' });
button.connect('clicked', Lang.bind(this, this._onBack));
hbox.add(button);
}
button = new St.Button({ style_class: 'window-close' });
button.connect('clicked', Lang.bind(this, this.close));
hbox.add(button);
if (typeof(obj) == typeof({})) {
let properties = [];
for (let propName in obj) {
properties.push(propName);
}
properties.sort();
for (let i = 0; i < properties.length; i++) {
let propName = properties[i];
let valueStr;
let link;
try {
let prop = obj[propName];
link = new ObjLink(this._lookingGlass, prop).actor;
} catch (e) {
link = new St.Label({ text: '<error>' });
}
let hbox = new St.BoxLayout();
let propText = propName + ': ' + valueStr;
hbox.add(new St.Label({ text: propName + ': ' }));
hbox.add(link);
this._container.add_actor(hbox);
}
}
},
open: function(sourceActor) {
if (this._open)
return;
this._previousObj = null;
this._open = true;
this.actor.show();
if (sourceActor) {
this.actor.set_scale(0, 0);
Tweener.addTween(this.actor, { scale_x: 1, scale_y: 1,
transition: 'easeOutQuad',
time: 0.2 });
} else {
this.actor.set_scale(1, 1);
}
},
close: function() {
if (!this._open)
return;
this._open = false;
this.actor.hide();
this._previousObj = null;
this._obj = null;
},
_onInsert: function() {
let obj = this._obj;
this.close();
this._lookingGlass.insertObject(obj);
},
_onBack: function() {
this.selectObject(this._previousObj, true);
}
});
const RedBorderEffect = new Lang.Class({
Name: 'RedBorderEffect',
Extends: Clutter.Effect,
vfunc_paint: function() {
let actor = this.get_actor();
actor.continue_paint();
let color = new Cogl.Color();
color.init_from_4ub(0xff, 0, 0, 0xc4);
Cogl.set_source_color(color);
let geom = actor.get_allocation_geometry();
let width = 2;
// clockwise order
Cogl.rectangle(0, 0, geom.width, width);
Cogl.rectangle(geom.width - width, width,
geom.width, geom.height);
Cogl.rectangle(0, geom.height,
geom.width - width, geom.height - width);
Cogl.rectangle(0, geom.height - width,
width, width);
},
});
const Inspector = new Lang.Class({
Name: 'Inspector',
_init: function(lookingGlass) {
let container = new Shell.GenericContainer({ width: 0,
height: 0 });
container.connect('allocate', Lang.bind(this, this._allocate));
Main.uiGroup.add_actor(container);
let eventHandler = new St.BoxLayout({ name: 'LookingGlassDialog',
vertical: false,
reactive: true });
this._eventHandler = eventHandler;
container.add_actor(eventHandler);
this._displayText = new St.Label();
eventHandler.add(this._displayText, { expand: true });
eventHandler.connect('key-press-event', Lang.bind(this, this._onKeyPressEvent));
eventHandler.connect('button-press-event', Lang.bind(this, this._onButtonPressEvent));
eventHandler.connect('scroll-event', Lang.bind(this, this._onScrollEvent));
eventHandler.connect('motion-event', Lang.bind(this, this._onMotionEvent));
Clutter.grab_pointer(eventHandler);
Clutter.grab_keyboard(eventHandler);
// this._target is the actor currently shown by the inspector.
// this._pointerTarget is the actor directly under the pointer.
// Normally these are the same, but if you use the scroll wheel
// to drill down, they'll diverge until you either scroll back
// out, or move the pointer outside of _pointerTarget.
this._target = null;
this._pointerTarget = null;
this._lookingGlass = lookingGlass;
},
_allocate: function(actor, box, flags) {
if (!this._eventHandler)
return;
let primary = Main.layoutManager.primaryMonitor;
let [minWidth, minHeight, natWidth, natHeight] =
this._eventHandler.get_preferred_size();
let childBox = new Clutter.ActorBox();
childBox.x1 = primary.x + Math.floor((primary.width - natWidth) / 2);
childBox.x2 = childBox.x1 + natWidth;
childBox.y1 = primary.y + Math.floor((primary.height - natHeight) / 2);
childBox.y2 = childBox.y1 + natHeight;
this._eventHandler.allocate(childBox, flags);
},
_close: function() {
Clutter.ungrab_pointer();
Clutter.ungrab_keyboard();
this._eventHandler.destroy();
this._eventHandler = null;
this.emit('closed');
},
_onKeyPressEvent: function (actor, event) {
if (event.get_key_symbol() == Clutter.Escape)
this._close();
return Clutter.EVENT_STOP;
},
_onButtonPressEvent: function (actor, event) {
if (this._target) {
let [stageX, stageY] = event.get_coords();
this.emit('target', this._target, stageX, stageY);
}
this._close();
return Clutter.EVENT_STOP;
},
_onScrollEvent: function (actor, event) {
switch (event.get_scroll_direction()) {
case Clutter.ScrollDirection.UP:
// select parent
let parent = this._target.get_parent();
if (parent != null) {
this._target = parent;
this._update(event);
}
break;
case Clutter.ScrollDirection.DOWN:
// select child
if (this._target != this._pointerTarget) {
let child = this._pointerTarget;
while (child) {
let parent = child.get_parent();
if (parent == this._target)
break;
child = parent;
}
if (child) {
this._target = child;
this._update(event);
}
}
break;
default:
break;
}
return Clutter.EVENT_STOP;
},
_onMotionEvent: function (actor, event) {
this._update(event);
return Clutter.EVENT_STOP;
},
_update: function(event) {
let [stageX, stageY] = event.get_coords();
let target = global.stage.get_actor_at_pos(Clutter.PickMode.ALL,
stageX,
stageY);
if (target != this._pointerTarget)
this._target = target;
this._pointerTarget = target;
let position = '[inspect x: ' + stageX + ' y: ' + stageY + ']';
this._displayText.text = '';
this._displayText.text = position + ' ' + this._target;
this._lookingGlass.setBorderPaintTarget(this._target);
}
});
Signals.addSignalMethods(Inspector.prototype);
const Extensions = new Lang.Class({
Name: 'Extensions',
_init: function(lookingGlass) {
this._lookingGlass = lookingGlass;
this.actor = new St.BoxLayout({ vertical: true,
name: 'lookingGlassExtensions' });
this._noExtensions = new St.Label({ style_class: 'lg-extensions-none',
text: _("No extensions installed") });
this._numExtensions = 0;
this._extensionsList = new St.BoxLayout({ vertical: true,
style_class: 'lg-extensions-list' });
this._extensionsList.add(this._noExtensions);
this.actor.add(this._extensionsList);
for (let uuid in ExtensionUtils.extensions)
this._loadExtension(null, uuid);
ExtensionSystem.connect('extension-loaded',
Lang.bind(this, this._loadExtension));
},
_loadExtension: function(o, uuid) {
let extension = ExtensionUtils.extensions[uuid];
// There can be cases where we create dummy extension metadata
// that's not really a proper extension. Don't bother with these.
if (!extension.metadata.name)
return;
let extensionDisplay = this._createExtensionDisplay(extension);
if (this._numExtensions == 0)
this._extensionsList.remove_actor(this._noExtensions);
this._numExtensions ++;
this._extensionsList.add(extensionDisplay);
},
_onViewSource: function (actor) {
let extension = actor._extension;
let uri = extension.dir.get_uri();
Gio.app_info_launch_default_for_uri(uri, global.create_app_launch_context(0, -1));
this._lookingGlass.close();
},
_onWebPage: function (actor) {
let extension = actor._extension;
Gio.app_info_launch_default_for_uri(extension.metadata.url, global.create_app_launch_context(0, -1));
this._lookingGlass.close();
},
_onViewErrors: function (actor) {
let extension = actor._extension;
let shouldShow = !actor._isShowing;
if (shouldShow) {
let errors = extension.errors;
let errorDisplay = new St.BoxLayout({ vertical: true });
if (errors && errors.length) {
for (let i = 0; i < errors.length; i ++)
errorDisplay.add(new St.Label({ text: errors[i] }));
} else {
/* Translators: argument is an extension UUID. */
let message = _("%s has not emitted any errors.").format(extension.uuid);
errorDisplay.add(new St.Label({ text: message }));
}
actor._errorDisplay = errorDisplay;
actor._parentBox.add(errorDisplay);
actor.label = _("Hide Errors");
} else {
actor._errorDisplay.destroy();
actor._errorDisplay = null;
actor.label = _("Show Errors");
}
actor._isShowing = shouldShow;
},
_stateToString: function(extensionState) {
switch (extensionState) {
case ExtensionSystem.ExtensionState.ENABLED:
return _("Enabled");
case ExtensionSystem.ExtensionState.DISABLED:
case ExtensionSystem.ExtensionState.INITIALIZED:
return _("Disabled");
case ExtensionSystem.ExtensionState.ERROR:
return _("Error");
case ExtensionSystem.ExtensionState.OUT_OF_DATE:
return _("Out of date");
case ExtensionSystem.ExtensionState.DOWNLOADING:
return _("Downloading");
}
return 'Unknown'; // Not translated, shouldn't appear
},
_createExtensionDisplay: function(extension) {
let box = new St.BoxLayout({ style_class: 'lg-extension', vertical: true });
let name = new St.Label({ style_class: 'lg-extension-name',
text: extension.metadata.name });
box.add(name, { expand: true });
let description = new St.Label({ style_class: 'lg-extension-description',
text: extension.metadata.description || 'No description' });
box.add(description, { expand: true });
let metaBox = new St.BoxLayout({ style_class: 'lg-extension-meta' });
box.add(metaBox);
let stateString = this._stateToString(extension.state);
let state = new St.Label({ style_class: 'lg-extension-state',
text: this._stateToString(extension.state) });
metaBox.add(state);
let viewsource = new St.Button({ reactive: true,
track_hover: true,
style_class: 'shell-link',
label: _("View Source") });
viewsource._extension = extension;
viewsource.connect('clicked', Lang.bind(this, this._onViewSource));
metaBox.add(viewsource);
if (extension.metadata.url) {
let webpage = new St.Button({ reactive: true,
track_hover: true,
style_class: 'shell-link',
label: _("Web Page") });
webpage._extension = extension;
webpage.connect('clicked', Lang.bind(this, this._onWebPage));
metaBox.add(webpage);
}
let viewerrors = new St.Button({ reactive: true,
track_hover: true,
style_class: 'shell-link',
label: _("Show Errors") });
viewerrors._extension = extension;
viewerrors._parentBox = box;
viewerrors._isShowing = false;
viewerrors.connect('clicked', Lang.bind(this, this._onViewErrors));
metaBox.add(viewerrors);
return box;
}
});
const LookingGlass = new Lang.Class({
Name: 'LookingGlass',
_init : function() {
this._borderPaintTarget = null;
this._redBorderEffect = new RedBorderEffect();
this._open = false;
this._offset = 0;
this._results = [];
// Sort of magic, but...eh.
this._maxItems = 150;
this.actor = new St.BoxLayout({ name: 'LookingGlassDialog',
style_class: 'lg-dialog',
vertical: true,
visible: false,
reactive: true });
this.actor.connect('key-press-event', Lang.bind(this, this._globalKeyPressEvent));
this._interfaceSettings = new Gio.Settings({ schema_id: 'org.gnome.desktop.interface' });
this._interfaceSettings.connect('changed::monospace-font-name',
Lang.bind(this, this._updateFont));
this._updateFont();
// We want it to appear to slide out from underneath the panel
Main.uiGroup.add_actor(this.actor);
Main.uiGroup.set_child_below_sibling(this.actor,
Main.layoutManager.panelBox);
Main.layoutManager.panelBox.connect('allocation-changed',
Lang.bind(this, this._queueResize));
Main.layoutManager.keyboardBox.connect('allocation-changed',
Lang.bind(this, this._queueResize));
this._objInspector = new ObjInspector(this);
Main.uiGroup.add_actor(this._objInspector.actor);
this._objInspector.actor.hide();
let toolbar = new St.BoxLayout({ name: 'Toolbar' });
this.actor.add_actor(toolbar);
let inspectIcon = new St.Icon({ icon_name: 'gtk-color-picker',
icon_size: 24 });
toolbar.add_actor(inspectIcon);
inspectIcon.reactive = true;
inspectIcon.connect('button-press-event', Lang.bind(this, function () {
let inspector = new Inspector(this);
inspector.connect('target', Lang.bind(this, function(i, target, stageX, stageY) {
this._pushResult('inspect(' + Math.round(stageX) + ', ' + Math.round(stageY) + ')', target);
}));
inspector.connect('closed', Lang.bind(this, function() {
this.actor.show();
global.stage.set_key_focus(this._entry);
}));
this.actor.hide();
return Clutter.EVENT_STOP;
}));
let gcIcon = new St.Icon({ icon_name: 'gnome-fs-trash-full',
icon_size: 24 });
toolbar.add_actor(gcIcon);
gcIcon.reactive = true;
gcIcon.connect('button-press-event', Lang.bind(this, function () {
gcIcon.icon_name = 'gnome-fs-trash-empty';
System.gc();
this._timeoutId = Mainloop.timeout_add(500, Lang.bind(this, function () {
gcIcon.icon_name = 'gnome-fs-trash-full';
this._timeoutId = 0;
return GLib.SOURCE_REMOVE;
}));
GLib.Source.set_name_by_id(this._timeoutId, '[gnome-shell] gcIcon.icon_name = \'gnome-fs-trash-full\'');
return Clutter.EVENT_PROPAGATE;
}));
let notebook = new Notebook();
this._notebook = notebook;
this.actor.add(notebook.actor, { expand: true });
let emptyBox = new St.Bin();
toolbar.add(emptyBox, { expand: true });
toolbar.add_actor(notebook.tabControls);
this._evalBox = new St.BoxLayout({ name: 'EvalBox', vertical: true });
notebook.appendPage('Evaluator', this._evalBox);
this._resultsArea = new St.BoxLayout({ name: 'ResultsArea', vertical: true });
this._evalBox.add(this._resultsArea, { expand: true });
this._entryArea = new St.BoxLayout({ name: 'EntryArea' });
this._evalBox.add_actor(this._entryArea);
let label = new St.Label({ text: CHEVRON });
this._entryArea.add(label);
this._entry = new St.Entry({ can_focus: true });
ShellEntry.addContextMenu(this._entry);
this._entryArea.add(this._entry, { expand: true });
this._windowList = new WindowList(this);
notebook.appendPage('Windows', this._windowList.actor);
this._extensions = new Extensions(this);
notebook.appendPage('Extensions', this._extensions.actor);
this._entry.clutter_text.connect('activate', Lang.bind(this, function (o, e) {
// Hide any completions we are currently showing
this._hideCompletions();
let text = o.get_text();
// Ensure we don't get newlines in the command; the history file is
// newline-separated.
text = text.replace('\n', ' ');
// Strip leading and trailing whitespace
text = text.replace(/^\s+/g, '').replace(/\s+$/g, '');
if (text == '')
return true;
this._evaluate(text);
return true;
}));
this._history = new History.HistoryManager({ gsettingsKey: HISTORY_KEY,
entry: this._entry.clutter_text });
this._autoComplete = new AutoComplete(this._entry);
this._autoComplete.connect('suggest', Lang.bind(this, function(a,e) {
this._showCompletions(e.completions);
}));
// If a completion is completed unambiguously, the currently-displayed completion
// suggestions become irrelevant.
this._autoComplete.connect('completion', Lang.bind(this, function(a,e) {
if (e.type == 'whole-word')
this._hideCompletions();
}));
this._resize();
},
_updateFont: function() {
let fontName = this._interfaceSettings.get_string('monospace-font-name');
let fontDesc = Pango.FontDescription.from_string(fontName);
// We ignore everything but size and style; you'd be crazy to set your system-wide
// monospace font to be bold/oblique/etc. Could easily be added here.
this.actor.style =
'font-size: ' + fontDesc.get_size() / 1024. + (fontDesc.get_size_is_absolute() ? 'px' : 'pt') + ';'
+ 'font-family: "' + fontDesc.get_family() + '";';
},
setBorderPaintTarget: function(obj) {
if (this._borderPaintTarget != null)
this._borderPaintTarget.remove_effect(this._redBorderEffect);
this._borderPaintTarget = obj;
if (this._borderPaintTarget != null)
this._borderPaintTarget.add_effect(this._redBorderEffect);
},
_pushResult: function(command, obj) {
let index = this._results.length + this._offset;
let result = new Result(this, CHEVRON + command, obj, index);
this._results.push(result);
this._resultsArea.add(result.actor);
if (obj instanceof Clutter.Actor)
this.setBorderPaintTarget(obj);
let children = this._resultsArea.get_children();
if (children.length > this._maxItems) {
this._results.shift();
children[0].destroy();
this._offset++;
}
this._it = obj;
// Scroll to bottom
this._notebook.scrollToBottom(0);
},
_showCompletions: function(completions) {
if (!this._completionActor) {
this._completionActor = new St.Label({ name: 'LookingGlassAutoCompletionText', style_class: 'lg-completions-text' });
this._completionActor.clutter_text.ellipsize = Pango.EllipsizeMode.NONE;
this._completionActor.clutter_text.line_wrap = true;
this._evalBox.insert_child_below(this._completionActor, this._entryArea);
}
this._completionActor.set_text(completions.join(', '));
// Setting the height to -1 allows us to get its actual preferred height rather than
// whatever was last given in set_height by Tweener.
this._completionActor.set_height(-1);
let [minHeight, naturalHeight] = this._completionActor.get_preferred_height(this._resultsArea.get_width());
// Don't reanimate if we are already visible
if (this._completionActor.visible) {
this._completionActor.height = naturalHeight;
} else {
this._completionActor.show();
Tweener.removeTweens(this._completionActor);
Tweener.addTween(this._completionActor, { time: AUTO_COMPLETE_SHOW_COMPLETION_ANIMATION_DURATION / St.get_slow_down_factor(),
transition: 'easeOutQuad',
height: naturalHeight,
opacity: 255
});
}
},
_hideCompletions: function() {
if (this._completionActor) {
Tweener.removeTweens(this._completionActor);
Tweener.addTween(this._completionActor, { time: AUTO_COMPLETE_SHOW_COMPLETION_ANIMATION_DURATION / St.get_slow_down_factor(),
transition: 'easeOutQuad',
height: 0,
opacity: 0,
onComplete: Lang.bind(this, function () {
this._completionActor.hide();
})
});
}
},
_evaluate : function(command) {
this._history.addItem(command);
let fullCmd = commandHeader + command;
let resultObj;
try {
resultObj = eval(fullCmd);
} catch (e) {
resultObj = '<exception ' + e + '>';
}
this._pushResult(command, resultObj);
this._entry.text = '';
},
inspect: function(x, y) {
return global.stage.get_actor_at_pos(Clutter.PickMode.REACTIVE, x, y);
},
getIt: function () {
return this._it;
},
getResult: function(idx) {
return this._results[idx - this._offset].o;
},
toggle: function() {
if (this._open)
this.close();
else
this.open();
},
_queueResize: function() {
Meta.later_add(Meta.LaterType.BEFORE_REDRAW,
Lang.bind(this, function () { this._resize(); }));
},
_resize: function() {
let primary = Main.layoutManager.primaryMonitor;
let myWidth = primary.width * 0.7;
let availableHeight = primary.height - Main.layoutManager.keyboardBox.height;
let myHeight = Math.min(primary.height * 0.7, availableHeight * 0.9);
this.actor.x = primary.x + (primary.width - myWidth) / 2;
this._hiddenY = primary.y + Main.layoutManager.panelBox.height - myHeight - 4; // -4 to hide the top corners
this._targetY = this._hiddenY + myHeight;
this.actor.y = this._hiddenY;
this.actor.width = myWidth;
this.actor.height = myHeight;
this._objInspector.actor.set_size(Math.floor(myWidth * 0.8), Math.floor(myHeight * 0.8));
this._objInspector.actor.set_position(this.actor.x + Math.floor(myWidth * 0.1),
this._targetY + Math.floor(myHeight * 0.1));
},
insertObject: function(obj) {
this._pushResult('<insert>', obj);
},
inspectObject: function(obj, sourceActor) {
this._objInspector.open(sourceActor);
this._objInspector.selectObject(obj);
},
// Handle key events which are relevant for all tabs of the LookingGlass
_globalKeyPressEvent : function(actor, event) {
let symbol = event.get_key_symbol();
let modifierState = event.get_state();
if (symbol == Clutter.Escape) {
if (this._objInspector.actor.visible) {
this._objInspector.close();
} else {
this.close();
}
return Clutter.EVENT_STOP;
}
// Ctrl+PgUp and Ctrl+PgDown switches tabs in the notebook view
if (modifierState & Clutter.ModifierType.CONTROL_MASK) {
if (symbol == Clutter.KEY_Page_Up) {
this._notebook.prevTab();
} else if (symbol == Clutter.KEY_Page_Down) {
this._notebook.nextTab();
}
}
return Clutter.EVENT_PROPAGATE;
},
open : function() {
if (this._open)
return;
if (!Main.pushModal(this._entry, { keybindingMode: Shell.KeyBindingMode.LOOKING_GLASS }))
return;
this._notebook.selectIndex(0);
this.actor.show();
this._open = true;
this._history.lastItem();
Tweener.removeTweens(this.actor);
// We inverse compensate for the slow-down so you can change the factor
// through LookingGlass without long waits.
Tweener.addTween(this.actor, { time: 0.5 / St.get_slow_down_factor(),
transition: 'easeOutQuad',
y: this._targetY
});
},
close : function() {
if (!this._open)
return;
this._objInspector.actor.hide();
this._open = false;
Tweener.removeTweens(this.actor);
this.setBorderPaintTarget(null);
Main.popModal(this._entry);
Tweener.addTween(this.actor, { time: Math.min(0.5 / St.get_slow_down_factor(), 0.5),
transition: 'easeOutQuad',
y: this._hiddenY,
onComplete: Lang.bind(this, function () {
this.actor.hide();
})
});
}
});
Signals.addSignalMethods(LookingGlass.prototype);<|fim▁end|>
|
const AutoComplete = new Lang.Class({
Name: 'AutoComplete',
|
<|file_name|>sample_index.rs<|end_file_name|><|fim▁begin|>use std::{
cmp::Ordering,
error::Error,
fmt,
ops::{Add, AddAssign, Sub},
};
use metadata::Duration;
use super::{SampleIndexRange, Timestamp};
#[derive(Debug)]
pub struct DecrementError;
impl fmt::Display for DecrementError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str("overflow while decrementing")
}
}
impl Error for DecrementError {}
#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct SampleIndex(usize);
impl SampleIndex {
pub fn new(value: usize) -> Self {
SampleIndex(value)
}
#[track_caller]
pub fn from_ts(ts: Timestamp, sample_duration: Duration) -> Self {
SampleIndex((ts.as_u64() / sample_duration.as_u64()) as usize)
}
#[track_caller]
pub fn snap_to(self, sample_step: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 / sample_step.as_usize() * sample_step.as_usize())
}
#[track_caller]
pub fn as_ts(self, sample_duration: Duration) -> Timestamp {
Timestamp::new(self.0 as u64 * sample_duration.as_u64())
}
pub fn as_usize(self) -> usize {
self.0
}
pub fn as_u64(self) -> u64 {
self.0 as u64
}
pub fn try_dec(&mut self) -> Result<(), DecrementError> {
if self.0 > 0 {
*self = SampleIndex(self.0 - 1);
Ok(())
} else {
Err(DecrementError)
}
}
pub fn inc(&mut self) {
*self = SampleIndex(self.0 + 1);
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn checked_sub(self, rhs: Self) -> Option<SampleIndexRange> {
self.0.checked_sub(rhs.0).map(SampleIndexRange::new)
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub(self, rhs: Self) -> SampleIndexRange {
SampleIndexRange::new(self.0.saturating_sub(rhs.0))
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub_range(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex::new(self.0.saturating_sub(rhs.as_usize()))
}
}
impl From<usize> for SampleIndex {
fn from(value: usize) -> Self {
Self(value)
}
}
impl From<u64> for SampleIndex {
fn from(value: u64) -> Self {
Self(value as usize)
}
}
impl From<SampleIndexRange> for SampleIndex {
fn from(range: SampleIndexRange) -> Self {<|fim▁hole|> Self(range.as_usize())
}
}
impl Sub for SampleIndex {
type Output = SampleIndexRange;
#[track_caller]
fn sub(self, rhs: SampleIndex) -> SampleIndexRange {
SampleIndexRange::new(self.0 - rhs.0)
}
}
impl Add<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn add(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 + rhs.as_usize())
}
}
impl AddAssign<SampleIndexRange> for SampleIndex {
#[track_caller]
fn add_assign(&mut self, rhs: SampleIndexRange) {
*self = SampleIndex(self.0 + rhs.as_usize());
}
}
impl Sub<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn sub(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 - rhs.as_usize())
}
}
impl PartialOrd<SampleIndexRange> for SampleIndex {
fn partial_cmp(&self, rhs: &SampleIndexRange) -> Option<Ordering> {
Some(self.0.cmp(&rhs.as_usize()))
}
}
impl PartialEq<SampleIndexRange> for SampleIndex {
fn eq(&self, rhs: &SampleIndexRange) -> bool {
self.0 == rhs.as_usize()
}
}
impl fmt::Display for SampleIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "idx {}", self.0)
}
}<|fim▁end|>
| |
<|file_name|>filter.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012 Dmitri Melikyan
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
* NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
var nt;
var filterKeys;
var sampleNum;
exports.init = function() {
nt = global.nodetime;
filterKeys = {};
sampleNum = 0;
nt.on('sample', function(sample) {
if(!nt.headless && nt.sessionId) {
collectKeys(undefined, sample, 0);
sampleNum++;
if(sampleNum == 1 || sampleNum == 10) {
sendKeys();
}
}
});
setInterval(function() {
try {
sendKeys();
}
catch(e) {
nt.error(e);
}
}, 60000);
};
var collectKeys = function(key, obj, depth) {
if(depth > 20) return 0;
var isArray = Array.isArray(obj);
for(var prop in obj) {
if(prop.match(/^\_/)) continue;
if(typeof obj[prop] === 'object') {
collectKeys(prop, obj[prop], depth + 1);
}
else {
if(!isArray) {
filterKeys[prop] = true;
}
else {
filterKeys[key] = true;<|fim▁hole|> }
}
}
};
var sendKeys = function() {
var keys = [];
for(var prop in filterKeys) {
keys.push(prop);
}
keys = keys.sort(function(a, b) {
a = a.toLowerCase();
b = b.toLowerCase();
if(a > b) return 1;
if(a < b) return -1;
return 0;
});
if(keys.length > 0) {
nt.agent.send({cmd: 'updateFilterKeys', args: keys});
}
};
var PredicateFilter = function() {
}
exports.PredicateFilter = PredicateFilter;
PredicateFilter.prototype.preparePredicates = function(preds) {
preds.forEach(function(pred) {
try{
pred.valNum = parseFloat(pred.val)
}
catch(err) {
}
try{
if(pred.op === 'match') pred.valRe = new RegExp(pred.val);
if(typeof pred.val === 'string') pred.valLc = pred.val.toLowerCase();
}
catch(err) {
return nt.error(err);
}
});
this.preds = preds;
return true;
}
PredicateFilter.prototype.filter = function(sample) {
var matched = 0;
this.preds.forEach(function(pred) {
matched += walk(pred, sample, 0);
});
return (matched > 0);
};
function walk(pred, obj, depth) {
if(depth > 20) return 0;
var matched = 0;
for(var prop in obj) {
var val = obj[prop];
if(val === undefined || val === null) {
continue;
}
else if(typeof val === 'object') {
matched += walk(pred, val, depth + 1);
}
else if((pred.key === '*' || pred.key === prop) && test(pred, val)) {
matched++;
}
if(matched) break;
}
return matched;
}
function test(pred, val) {
var ret = false;
if(typeof val === 'number') {
if(pred.valNum !== NaN) {
if (pred.op === '==') {
ret = (val == pred.valNum);
}
else if (pred.op === '!=') {
ret = (val != pred.valNum);
}
else if (pred.op === '<') {
ret = (val < pred.valNum);
}
else if (pred.op === '>') {
ret = (val > pred.valNum);
}
}
}
else if(typeof val === 'string') {
if(pred.op === 'match' && pred.valRe) {
ret = pred.valRe.exec(val);
}
else if (pred.op === '==') {
ret = (val.toLowerCase() == pred.valLc);
}
else if (pred.op === '!=') {
ret = (val.toLowerCase() != pred.valLc);
}
}
return ret;
}<|fim▁end|>
| |
<|file_name|>delete_user_by_id_parameters.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT.
package users
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime/middleware"
strfmt "github.com/go-openapi/strfmt"
)
// NewDeleteUserByIDParams creates a new DeleteUserByIDParams object
// with the default values initialized.
func NewDeleteUserByIDParams() DeleteUserByIDParams {
var ()
return DeleteUserByIDParams{}
}
// DeleteUserByIDParams contains all the bound params for the delete user by ID operation
// typically these are obtained from a http.Request
//
// swagger:parameters deleteUserByID
type DeleteUserByIDParams struct {
// HTTP Request Object
HTTPRequest *http.Request `json:"-"`
/*ID of user to delete
Required: true
In: path
*/
UserID string
}
// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
// for simple values it will use straight method calls
func (o *DeleteUserByIDParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
var res []error
o.HTTPRequest = r
rUserID, rhkUserID, _ := route.Params.GetOK("userID")
if err := o.bindUserID(rUserID, rhkUserID, route.Formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}<|fim▁hole|>
func (o *DeleteUserByIDParams) bindUserID(rawData []string, hasKey bool, formats strfmt.Registry) error {
var raw string
if len(rawData) > 0 {
raw = rawData[len(rawData)-1]
}
o.UserID = raw
return nil
}<|fim▁end|>
| |
<|file_name|>frog_queue_videos.py<|end_file_name|><|fim▁begin|>##################################################################################################
# Copyright (c) 2012 Brett Dixon
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all<|fim▁hole|># FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##################################################################################################
from optparse import make_option
from django.core.management.base import BaseCommand
import path
from frog.models import Gallery, Image, Video, Piece, VideoQueue
class Command(BaseCommand):
help = 'Generated thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'guids',
nargs='*',
default=[],
)
def handle(self, *args, **options):
for guid in options['guids']:
video = Video.objects.get(guid=guid)
item = VideoQueue.objects.get_or_create(video=video)[0]
item.video = video
item.status = VideoQueue.QUEUED
item.save()
self.stdout.write('Added: {}'.format(video))<|fim▁end|>
|
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
<|file_name|>model_b.py<|end_file_name|><|fim▁begin|>"""
model_b.py
by Ted Morin
contains a function to predict 2-year Incident Hypertension risks using Weibull beta coefficients from:
10.7326/0003-4819-148-2-200801150-00005
2008 A Risk Score for Predicting Near-Term Incidence of Hypertension
Framingham Heart Study
translated and adapted from from FHS online risk calculator's javascript
Uses Weibul model set to 2 years
function expects parameters of:
"Male Sex" "Age" "Systolic BP" "Diastolic BP" "BMI" "Smoking Status" "Parental with Hypert. History"
years mm Hg mm Hg kg/m^2
bool int/float int/float int/float int/float bool int
"""
def model(ismale,age,sbp,dbp,bmi,smoker,parentalht):
# imports
import numpy as np
# betas and Weibull scale factor
betas = np.array([
22.949536, #intercept
-0.202933, #female gender
-0.156412, #age
-0.033881, #bmi
-0.05933, #sbp
-0.128468, #dbp
-0.190731, #smoker
-0.166121, #parentalht
0.001624 #ageXdbp
])
s = 0.876925
# Fill in derived values
ageXdbp = (age * dbp)
# values
values = np.array([1, int(not ismale), age, bmi, sbp, dbp,smoker, parentalht, ageXdbp])
# do computation
betaSum = np.dot(betas, values)
risk = 1.0 - np.exp( -np.exp(( np.log(2) - betaSum) / s))
# ^only change between models a, b, and c<|fim▁hole|> return risk<|fim▁end|>
| |
<|file_name|>_thicknessmode.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class ThicknessmodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name="thicknessmode",
parent_name="densitymapbox.colorbar",
**kwargs
):<|fim▁hole|> super(ThicknessmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["fraction", "pixels"]),
**kwargs
)<|fim▁end|>
| |
<|file_name|>register.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2011 Bruno Jouhier <[email protected]>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
"use strict";
var fs = require("fs");
var path = require("path");
var compile = require("./compile");
var registered = false;
var _options = {};
/// !doc
///
/// # streamline/lib/compiler/register
///
/// Streamline `require` handler registration
///
/// * `register.register(options)`
/// Registers `require` handlers for streamline.
/// `options` is a set of default options passed to the `transform` function.
exports.register = function(setoptions) {
if (registered) return;<|fim▁hole|> _options = setoptions || {};
registered = true;
var pModule = require('module').prototype;
var orig = pModule._compile;
pModule._compile = function(content, filename) {
content = compile.transformModule(content, filename, _options);
return orig.call(this, content, filename);
}
};
var dirMode = parseInt('777', 8);
exports.trackModule = function(m, options) {
if (registered) throw new Error("invalid call to require('streamline/module')");
m.filename = m.filename.replace(/\\/g, '/');
var tmp = m.filename.substring(0, m.filename.lastIndexOf('/'));
tmp += '/tmp--' + Math.round(Math.random() * 1e9) + path.extname(m.filename);
//console.error("WARNING: streamline not registered, re-loading module " + m.filename + " as " + tmp);
exports.register({});
fs.writeFileSync(tmp, fs.readFileSync(m.filename, "utf8"), "utf8");
process.on('exit', function() {
try { fs.unlinkSync(tmp); }
catch (ex) {}
})
m.exports = require(tmp);
return false;
}
Object.defineProperty(exports, "options", {
enumerable: true,
get: function() {
return _options;
}
});<|fim▁end|>
| |
<|file_name|>aes_gcm_hkdf.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! AES-GCM-HKDF based implementation of the [`tink_core::StreamingAead`] trait.
use super::{noncebased, AesVariant};
use aes_gcm::aead::{generic_array::GenericArray, Aead, NewAead};
use std::convert::TryInto;
use tink_core::{subtle::random::get_random_bytes, utils::wrap_err, TinkError};
use tink_proto::HashType;
/// The size of the nonces used for GCM.
pub const AES_GCM_HKDF_NONCE_SIZE_IN_BYTES: usize = 12;
/// The size of the randomly generated nonce prefix.
pub const AES_GCM_HKDF_NONCE_PREFIX_SIZE_IN_BYTES: usize = 7;
/// The size of the tags of each ciphertext segment.
pub const AES_GCM_HKDF_TAG_SIZE_IN_BYTES: usize = 16;
/// `AesGcmHkdf` implements streaming AEAD encryption using AES-GCM.
///
/// Each ciphertext uses a new AES-GCM key. These keys are derived using HKDF
/// and are derived from the key derivation key, a randomly chosen salt of the
/// same size as the key and a nonce prefix.
#[derive(Clone)]
pub struct AesGcmHkdf {
pub main_key: Vec<u8>,
hkdf_alg: HashType,
aes_variant: AesVariant,
ciphertext_segment_size: usize,
first_ciphertext_segment_offset: usize,
plaintext_segment_size: usize,
}
#[derive(Clone)]
enum AesGcmKeyVariant {
Aes128(Box<aes_gcm::Aes128Gcm>),
Aes256(Box<aes_gcm::Aes256Gcm>),
}
/// Calculate the header length for a given key size. The header includes
/// space for:
/// - a single byte indicating header length
/// - a salt that is the same size as the sub key
/// - a nonce prefix.
fn header_length_for(key_size_in_bytes: usize) -> usize {
1 + key_size_in_bytes + AES_GCM_HKDF_NONCE_PREFIX_SIZE_IN_BYTES
}
impl AesGcmHkdf {
/// Initialize a streaming primitive with a key derivation key
/// and encryption parameters.
///
/// `main_key` is input keying material used to derive sub keys. This must be
/// longer than the size of the sub keys (`key_size_in_bytes`).
/// `hkdf_alg` is a MAC algorithm hash type, used for the HKDF key derivation.
/// `key_size_in_bytes` argument is a key size of the sub keys.
/// `ciphertext_segment_size` argument is the size of ciphertext segments.
/// `first_segment_offset` argument is the offset of the first ciphertext segment.
pub fn new(
main_key: &[u8],
hkdf_alg: HashType,
key_size_in_bytes: usize,
ciphertext_segment_size: usize,
first_segment_offset: usize,
) -> Result<AesGcmHkdf, TinkError> {
if main_key.len() < 16 || main_key.len() < key_size_in_bytes {
return Err("main_key too short".into());
}
let aes_variant = super::validate_aes_key_size(key_size_in_bytes)?;
let header_len = header_length_for(key_size_in_bytes);
if ciphertext_segment_size
<= first_segment_offset + header_len + AES_GCM_HKDF_TAG_SIZE_IN_BYTES
{
return Err("ciphertext_segment_size too small".into());
}
Ok(AesGcmHkdf {
main_key: main_key.to_vec(),
hkdf_alg,
aes_variant,
ciphertext_segment_size,
first_ciphertext_segment_offset: first_segment_offset + header_len,
plaintext_segment_size: ciphertext_segment_size - AES_GCM_HKDF_TAG_SIZE_IN_BYTES,
})
}
/// Return the length of the encryption header.
pub fn header_length(&self) -> usize {
header_length_for(self.aes_variant.key_size())
}
/// Return a key derived from the given main key using `salt` and `aad` parameters.
fn derive_key(&self, salt: &[u8], aad: &[u8]) -> Result<Vec<u8>, TinkError> {
tink_core::subtle::compute_hkdf(
self.hkdf_alg,
&self.main_key,
salt,
aad,
self.aes_variant.key_size(),
)
}
}
impl tink_core::StreamingAead for AesGcmHkdf {
/// Return a wrapper around an underlying [`std::io::Write`], such that
/// any write-operation via the wrapper results in AEAD-encryption of the
/// written data, using aad as associated authenticated data. The associated
/// data is not included in the ciphertext and has to be passed in as parameter
/// for decryption.
fn new_encrypting_writer(
&self,
mut w: Box<dyn std::io::Write>,
aad: &[u8],
) -> Result<Box<dyn tink_core::EncryptingWrite>, TinkError> {
let salt = get_random_bytes(self.aes_variant.key_size());
let nonce_prefix = get_random_bytes(AES_GCM_HKDF_NONCE_PREFIX_SIZE_IN_BYTES);
let dkey = self.derive_key(&salt, aad)?;
let cipher_key = new_cipher_key(self.aes_variant, &dkey)?;
let mut header = Vec::with_capacity(self.header_length());
header.push(
self.header_length()
.try_into()
.map_err(|e| wrap_err("header length too long", e))?,
);
header.extend_from_slice(&salt);
header.extend_from_slice(&nonce_prefix);
w.write(&header).map_err(|e| wrap_err("write failed", e))?;
<|fim▁hole|> nonce_prefix,
plaintext_segment_size: self.plaintext_segment_size,
first_ciphertext_segment_offset: self.first_ciphertext_segment_offset,
})?;
Ok(Box::new(nw))
}
/// Return a wrapper around an underlying [`std::io::Read`], such that
/// any read-operation via the wrapper results in AEAD-decryption of the
/// underlying ciphertext, using aad as associated authenticated data.
fn new_decrypting_reader(
&self,
mut r: Box<dyn std::io::Read>,
aad: &[u8],
) -> Result<Box<dyn std::io::Read>, TinkError> {
let mut hlen = vec![0; 1];
r.read_exact(&mut hlen)
.map_err(|e| wrap_err("failed to reader header len", e))?;
if hlen[0] as usize != self.header_length() {
return Err("invalid header length".into());
}
let mut salt = vec![0; self.aes_variant.key_size()];
r.read_exact(&mut salt)
.map_err(|e| wrap_err("cannot read salt", e))?;
let mut nonce_prefix = vec![0; AES_GCM_HKDF_NONCE_PREFIX_SIZE_IN_BYTES];
r.read_exact(&mut nonce_prefix)
.map_err(|e| wrap_err("cannot read nonce_prefix", e))?;
let dkey = self.derive_key(&salt, aad)?;
let cipher_key = new_cipher_key(self.aes_variant, &dkey)?;
let nr = noncebased::Reader::new(noncebased::ReaderParams {
r,
segment_decrypter: Box::new(AesGcmHkdfSegmentDecrypter { cipher_key }),
nonce_size: AES_GCM_HKDF_NONCE_SIZE_IN_BYTES,
nonce_prefix,
ciphertext_segment_size: self.ciphertext_segment_size,
first_ciphertext_segment_offset: self.first_ciphertext_segment_offset,
})?;
Ok(Box::new(nr))
}
}
/// Create a new AES-GCM cipher key using the given key and the crypto library.
fn new_cipher_key(aes_variant: AesVariant, key: &[u8]) -> Result<AesGcmKeyVariant, TinkError> {
match aes_variant {
AesVariant::Aes128 => Ok(AesGcmKeyVariant::Aes128(Box::new(aes_gcm::Aes128Gcm::new(
GenericArray::from_slice(key),
)))),
AesVariant::Aes256 => Ok(AesGcmKeyVariant::Aes256(Box::new(aes_gcm::Aes256Gcm::new(
GenericArray::from_slice(key),
)))),
}
}
/// A [`noncebased::SegmentEncrypter`] based on AES-GCM-HKDF.
struct AesGcmHkdfSegmentEncrypter {
cipher_key: AesGcmKeyVariant,
}
impl noncebased::SegmentEncrypter for AesGcmHkdfSegmentEncrypter {
fn encrypt_segment(&self, segment: &[u8], nonce: &[u8]) -> Result<Vec<u8>, TinkError> {
let iv = GenericArray::from_slice(nonce);
match &self.cipher_key {
AesGcmKeyVariant::Aes128(key) => key.encrypt(iv, segment),
AesGcmKeyVariant::Aes256(key) => key.encrypt(iv, segment),
}
.map_err(|e| wrap_err("AesGcmHkdf: encryption failed", e))
}
}
/// A [`noncebased::SegmentDecrypter`] based on AES-GCM-HKDF.
struct AesGcmHkdfSegmentDecrypter {
cipher_key: AesGcmKeyVariant,
}
impl noncebased::SegmentDecrypter for AesGcmHkdfSegmentDecrypter {
fn decrypt_segment(&self, segment: &[u8], nonce: &[u8]) -> Result<Vec<u8>, TinkError> {
let iv = GenericArray::from_slice(nonce);
match &self.cipher_key {
AesGcmKeyVariant::Aes128(key) => key.decrypt(iv, segment),
AesGcmKeyVariant::Aes256(key) => key.decrypt(iv, segment),
}
.map_err(|e| wrap_err("AesGcmHkdf: decryption failed", e))
}
}<|fim▁end|>
|
let nw = noncebased::Writer::new(noncebased::WriterParams {
w,
segment_encrypter: Box::new(AesGcmHkdfSegmentEncrypter { cipher_key }),
nonce_size: AES_GCM_HKDF_NONCE_SIZE_IN_BYTES,
|
<|file_name|>hakeandositeprecodescontowhiletemposimounao.py<|end_file_name|><|fim▁begin|>import urllib.request
import time
def pega_preço():
pagina = urllib.request.urlopen('http://beans.itcarlow.ie/prices-loyalty.html')<|fim▁hole|> texto = pagina.read().decode('utf8')
onde = texto.find('>$')
inicio= onde + 2
fim = inicio + 4
return float(texto[inicio:fim])
opção = input("deseja comprar já? (S/N)")
if opção == 'S' :
preço = pega_preço()
print('Você comprou por %5.2f R$' % preço)
else:
preço = 99.99
while preço >= 4.74:
preço = pega_preço()
if preço >= 4.74:
time.sleep(5)
print ('comprar ! Preço: %5.2f' %preço)<|fim▁end|>
| |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Actor'
db.create_table('actors_actor', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('registered_on', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_activity', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('owned', self.gf('django.db.models.fields.BooleanField')(default=False)),
('calendar', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['agenda.Calendar'], unique=True, null=True, blank=True)),
))
db.send_create_signal('actors', ['Actor'])
def backwards(self, orm):
# Deleting model 'Actor'
db.delete_table('actors_actor')
models = {
'actors.actor': {
'Meta': {'object_name': 'Actor'},
'calendar': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['agenda.Calendar']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'owned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registered_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'agenda.calendar': {
'Meta': {'object_name': 'Calendar'},
'events': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'calendars'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['agenda.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'agenda.event': {
'Meta': {'ordering': "['-event_date', '-start_time', '-title']", 'unique_together': "(('event_date', 'slug'),)", 'object_name': 'Event'},
'add_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'allow_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'event_date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mod_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2011, 4, 5, 12, 34, 7, 487258)'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'V'", 'max_length': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {<|fim▁hole|> }
}
complete_apps = ['actors']<|fim▁end|>
|
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
|
<|file_name|>DSS.py<|end_file_name|><|fim▁begin|>#
# Signature/DSS.py : DSS.py
#
# ===================================================================
#
# Copyright (c) 2014, Legrandin <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
"""
Digital Signature Standard (DSS), as specified in `FIPS PUB 186-3`__.
A sender signs a message in the following way:
>>> from Cryptodome.Hash import SHA256
>>> from Cryptodome.PublicKey import ECC
>>> from Cryptodome.Signature import DSS
>>>
>>> message = b'I give my permission to order #4355'
>>> key = ECC.import_key(open('privkey.der').read())
>>> h = SHA256.new(message)
>>> signer = DSS.new(key, 'fips-186-3')
>>> signature = signer.sign(h)
The receiver can verify authenticity of the message:
>>> key = ECC.import_key(open('pubkey.der').read())
>>> h = SHA256.new(received_message)
>>> verifier = DSS.new(key, 'fips-186-3')
>>> try:
>>> verifier.verify(h, signature):
>>> print "The message is authentic."
>>> except ValueError:
>>> print "The message is not authentic."
.. __: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf
"""
__all__ = ['new', 'DssSigScheme']
from Cryptodome.Util.py3compat import bchr, b
from Cryptodome.Util.asn1 import DerSequence
from Cryptodome.Util.number import long_to_bytes
from Cryptodome.Math.Numbers import Integer
from Cryptodome.Hash import HMAC
from Cryptodome.PublicKey.ECC import _curve, EccKey
class DssSigScheme(object):
"""This signature scheme can perform DSS signature or verification.
:undocumented: __init__
"""
def __init__(self, key, encoding, order):
"""Create a new Digital Signature Standard (DSS) object.
Do not instantiate this object directly,
use `Cryptodome.Signature.DSS.new` instead.
"""
self._key = key
self._encoding = encoding
self._order = order
self._order_bits = self._order.size_in_bits()
self._order_bytes = (self._order_bits - 1) // 8 + 1
def can_sign(self):
"""Return True if this signature object can be used
for signing messages."""
return self._key.has_private()
def _compute_nonce(self, msg_hash):
raise NotImplementedError("To be provided by subclasses")
def _valid_hash(self, msg_hash):
raise NotImplementedError("To be provided by subclasses")
def sign(self, msg_hash):
"""Produce the DSS signature of a message.
:Parameters:
msg_hash : hash object
The hash that was carried out over the message.
The object belongs to the `Cryptodome.Hash` package.
Under mode *'fips-186-3'*, the hash must be a FIPS
approved secure hash (SHA-1 or a member of the SHA-2 family),
of cryptographic strength appropriate for the DSA key.
For instance, a 3072/256 DSA key can only be used
in combination with SHA-512.
:Return: The signature encoded as a byte string.
:Raise ValueError:
If the hash algorithm is incompatible to the DSA key.
:Raise TypeError:
If the DSA key has no private half.
"""
if not self._valid_hash(msg_hash):
raise ValueError("Hash is not sufficiently strong")
# Generate the nonce k (critical!)
nonce = self._compute_nonce(msg_hash)
# Perform signature using the raw API
z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes])
sig_pair = self._key._sign(z, nonce)
# Encode the signature into a single byte string
if self._encoding == 'binary':
output = b("").join([long_to_bytes(x, self._order_bytes)
for x in sig_pair])
else:
# Dss-sig ::= SEQUENCE {
# r OCTET STRING,
# s OCTET STRING
# }
output = DerSequence(sig_pair).encode()
return output
def verify(self, msg_hash, signature):
"""Verify that a certain DSS signature is authentic.
This function checks if the party holding the private half of the key
really signed the message.
:Parameters:
msg_hash : hash object
The hash that was carried out over the message.
This is an object belonging to the `Cryptodome.Hash` module.
Under mode *'fips-186-3'*, the hash must be a FIPS
approved secure hash (SHA-1 or a member of the SHA-2 family),
of cryptographic strength appropriate for the DSA key.
For instance, a 3072/256 DSA key can only be used in
combination with SHA-512.
signature : byte string
The signature that needs to be validated.
:Raise ValueError:
If the signature is not authentic.
"""
if not self._valid_hash(msg_hash):
raise ValueError("Hash does not belong to SHS")
if self._encoding == 'binary':
if len(signature) != (2 * self._order_bytes):
raise ValueError("The signature is not authentic (length)")
r_prime, s_prime = [Integer.from_bytes(x)
for x in (signature[:self._order_bytes],
signature[self._order_bytes:])]
else:
try:
der_seq = DerSequence().decode(signature)
except (ValueError, IndexError):
raise ValueError("The signature is not authentic (DER)")
if len(der_seq) != 2 or not der_seq.hasOnlyInts():
raise ValueError("The signature is not authentic (DER content)")
r_prime, s_prime = der_seq[0], der_seq[1]
if not (0 < r_prime < self._order) or not (0 < s_prime < self._order):
raise ValueError("The signature is not authentic (d)")
<|fim▁hole|> raise ValueError("The signature is not authentic")
# Make PyCryptodome code to fail
return False
class DeterministicDsaSigScheme(DssSigScheme):
# Also applicable to ECDSA
def __init__(self, key, encoding, order, private_key):
super(DeterministicDsaSigScheme, self).__init__(key, encoding, order)
self._private_key = private_key
def _bits2int(self, bstr):
"""See 2.3.2 in RFC6979"""
result = Integer.from_bytes(bstr)
q_len = self._order.size_in_bits()
b_len = len(bstr) * 8
if b_len > q_len:
result >>= (b_len - q_len)
return result
def _int2octets(self, int_mod_q):
"""See 2.3.3 in RFC6979"""
assert 0 < int_mod_q < self._order
return long_to_bytes(int_mod_q, self._order_bytes)
def _bits2octets(self, bstr):
"""See 2.3.4 in RFC6979"""
z1 = self._bits2int(bstr)
if z1 < self._order:
z2 = z1
else:
z2 = z1 - self._order
return self._int2octets(z2)
def _compute_nonce(self, mhash):
"""Generate k in a deterministic way"""
# See section 3.2 in RFC6979.txt
# Step a
h1 = mhash.digest()
# Step b
mask_v = bchr(1) * mhash.digest_size
# Step c
nonce_k = bchr(0) * mhash.digest_size
for int_oct in 0, 1:
# Step d/f
nonce_k = HMAC.new(nonce_k,
mask_v + bchr(int_oct) +
self._int2octets(self._private_key) +
self._bits2octets(h1), mhash).digest()
# Step e/g
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
nonce = -1
while not (0 < nonce < self._order):
# Step h.C (second part)
if nonce != -1:
nonce_k = HMAC.new(nonce_k, mask_v + bchr(0),
mhash).digest()
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
# Step h.A
mask_t = b("")
# Step h.B
while len(mask_t) < self._order_bytes:
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
mask_t += mask_v
# Step h.C (first part)
nonce = self._bits2int(mask_t)
return nonce
def _valid_hash(self, msg_hash):
return True
class FipsDsaSigScheme(DssSigScheme):
#: List of L (bit length of p) and N (bit length of q) combinations
#: that are allowed by FIPS 186-3. The security level is provided in
#: Table 2 of FIPS 800-57 (rev3).
_fips_186_3_L_N = (
(1024, 160), # 80 bits (SHA-1 or stronger)
(2048, 224), # 112 bits (SHA-224 or stronger)
(2048, 256), # 128 bits (SHA-256 or stronger)
(3072, 256) # 256 bits (SHA-512)
)
def __init__(self, key, encoding, order, randfunc):
super(FipsDsaSigScheme, self).__init__(key, encoding, order)
self._randfunc = randfunc
L = Integer(key.p).size_in_bits()
if (L, self._order_bits) not in self._fips_186_3_L_N:
error = ("L/N (%d, %d) is not compliant to FIPS 186-3"
% (L, self._order_bits))
raise ValueError(error)
def _compute_nonce(self, msg_hash):
# hash is not used
return Integer.random_range(min_inclusive=1,
max_exclusive=self._order,
randfunc=self._randfunc)
def _valid_hash(self, msg_hash):
"""Verify that SHA-1, SHA-2 or SHA-3 are used"""
return (msg_hash.oid == "1.3.14.3.2.26" or
msg_hash.oid.startswith("2.16.840.1.101.3.4.2."))
class FipsEcDsaSigScheme(DssSigScheme):
def __init__(self, key, encoding, order, randfunc):
super(FipsEcDsaSigScheme, self).__init__(key, encoding, order)
self._randfunc = randfunc
def _compute_nonce(self, msg_hash):
return Integer.random_range(min_inclusive=1,
max_exclusive=_curve.order,
randfunc=self._randfunc)
def _valid_hash(self, msg_hash):
"""Verify that SHA-[23] (256|384|512) bits are used to
match the 128-bit security of P-256"""
approved = ("2.16.840.1.101.3.4.2.1",
"2.16.840.1.101.3.4.2.2",
"2.16.840.1.101.3.4.2.3",
"2.16.840.1.101.3.4.2.8",
"2.16.840.1.101.3.4.2.9",
"2.16.840.1.101.3.4.2.10")
return msg_hash.oid in approved
def new(key, mode, encoding='binary', randfunc=None):
"""Return a signature scheme object `DSS_SigScheme` that
can be used to perform DSS signature or verification.
:Parameters:
key : a `Cryptodome.PublicKey.DSA` or `Cryptodome.PublicKey.ECC` key object
If the key has got its private half, both signature and
verification are possible.
If it only has the public half, verification is possible
but not signature generation.
For DSA keys, let *L* and *N* be the bit lengths of the modules *p*
and *q*: the combination *(L,N)* must appear in the following list,
in compliance to section 4.2 of `FIPS-186`__:
- (1024, 160)
- (2048, 224)
- (2048, 256)
- (3072, 256)
mode : string
The parameter can take these values:
- *'fips-186-3'*. The signature generation is carried out
according to `FIPS-186`__: the nonce *k* is taken from the RNG.
- *'deterministic-rfc6979'*. The signature generation
process does not rely on a random generator.
See RFC6979_.
encoding : string
How the signature is encoded. This value determines the output of
``sign`` and the input of ``verify``.
The following values are accepted:
- *'binary'* (default), the signature is the raw concatenation
of *r* and *s*. The size in bytes of the signature is always
two times the size of *q*.
- *'der'*, the signature is a DER encoded SEQUENCE with two
INTEGERs, *r* and *s*. The size of the signature is variable.
randfunc : callable
The source of randomness. If ``None``, the internal RNG is used.
Only used for the *'fips-186-3'* mode.
.. __: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf
.. __: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf
.. _RFC6979: http://tools.ietf.org/html/rfc6979
"""
# The goal of the 'mode' parameter is to avoid to
# have the current version of the standard as default.
#
# Over time, such version will be superseded by (for instance)
# FIPS 186-4 and it will be odd to have -3 as default.
if encoding not in ('binary', 'der'):
raise ValueError("Unknown encoding '%s'" % encoding)
if isinstance(key, EccKey):
order = _curve.order
private_key_attr = 'd'
else:
order = Integer(key.q)
private_key_attr = 'x'
if key.has_private():
private_key = getattr(key, private_key_attr)
else:
private_key = None
if mode == 'deterministic-rfc6979':
return DeterministicDsaSigScheme(key, encoding, order, private_key)
elif mode == 'fips-186-3':
if isinstance(key, EccKey):
return FipsEcDsaSigScheme(key, encoding, order, randfunc)
else:
return FipsDsaSigScheme(key, encoding, order, randfunc)
else:
raise ValueError("Unknown DSS mode '%s'" % mode)<|fim▁end|>
|
z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes])
result = self._key._verify(z, (r_prime, s_prime))
if not result:
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class Channel(models.Model):
channel_id = models.CharField(max_length=50, unique=True)
channel_name = models.CharField(max_length=50, null=True, blank=True)
rtmp_url = models.CharField(max_length=100, null=True, blank=True)
active = models.IntegerField(null=True, blank=True)
start = models.IntegerField(null=True, blank=True)
PID = models.IntegerField(null=True, blank=True)
PGID = models.IntegerField(null=True, blank=True)
client_ip = models.CharField(max_length=50, null=True, blank=True)<|fim▁hole|>
class Meta:
managed = False
db_table = 'channel'
verbose_name = '频道'
verbose_name_plural = '频道管理'
def __str__(self):
return self.channel_name + '(' + self.channel_id + ')'
class Program(models.Model):
channel = models.ForeignKey(Channel, to_field='channel_id', null=True)
start_time = models.DateTimeField(auto_now_add=False, null=True, blank=True)
end_time = models.DateTimeField(auto_now_add=False, null=True, blank=True)
url = models.CharField(max_length=50, null=True, blank=True)
title = models.CharField(max_length=50, null=True, blank=True)
finished = models.IntegerField(null=True, blank=True, default=0)
event_id = models.IntegerField(null=True, blank=True)
class Meta:
managed = False
db_table = 'program'
verbose_name = '节目'
verbose_name_plural = '节目管理'
def __str__(self):
return str(self.channel) + ':' + self.title<|fim▁end|>
|
sort = models.IntegerField(null=False, blank=True, default=0)
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import json
from django_api_tools.APIModel import APIModel, UserAuthCode
from django_api_tools.APIView import APIUrl, ReservedURL, StatusCode
from django_api_tools.tests.models import Foo, Bar, Baz, Qux, TestProfile
from django_api_tools.tests.views import TestAPIView
from django.test import TestCase
from django.test.client import RequestFactory, Client
from django.contrib.auth.models import AnonymousUser, User
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.core.exceptions import ObjectDoesNotExist
__author__ = 'szpytfire'
class APIToolsTestCase(TestCase):
def assertDictKeysEqual(self, dict, keys):
# For related fields, APIModel cuts off the special related syntax when dictifying
# We should therefore do the same when testing for the correct keys
for index, val in enumerate(keys):
prefix = filter(lambda prefix: val.startswith(prefix), APIModel._reserved_prefixes)
if prefix:
keys[index] = keys[index][len(prefix[0]) + 1:]
self.assertSetEqual(set(dict.keys()), set(keys))
class APIModelTestCase(APIToolsTestCase):
fixtures = ['user_testprofile_foo.json', 'bar_baz_qux.json']
def remove_foreign_key_fields(self, fields):
return [field for field in fields if not filter(lambda prefix: field.startswith(prefix), APIModel._reserved_prefixes)]
def test_dictify(self):
foo = Foo.objects.get(id=1)
foo._curr_user = AnonymousUser()
# Test no fields to include returns empty dict
self.assertDictEqual(foo.dictify([], False), {})
# Test random fields to include returns empty dict
self.assertDictEqual(foo.dictify(['bar1', 'bar2'], False), {})
# Test defaults to public user
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields))
# Test correct registered user fields returned
foo._user_auth = UserAuthCode.REGISTERED_USER
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields + Foo.registered_user_fields))
# Test correct owner fields returned
foo._user_auth = UserAuthCode.OWNER
self.assertDictKeysEqual(foo.dictify(Foo.long_description_fields, False), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
def test_dictify_helper(self):
user = User.objects.get(id=1)
foo = Foo.objects.get(id=1)
foo.set_user_auth(user)
# Test no dictified fields returned for empty fields to return
self.assertDictEqual(foo.dictify_helper(Foo.public_fields, [], False), {})
# Test no dictified fields returned for fields which aren't in the auth level
self.assertDictEqual(foo.dictify_helper(Foo.public_fields, ['bar1', 'bar2'], False), {})
# Test regular field is set in the dictionary
dictified_foo = foo.dictify_helper(Foo.public_fields, Foo.public_fields, False)
self.assertEqual(dictified_foo['id'], foo.id)
# Test invalid regular fields is set as None
non_existent_field = ('test', )
dictified_foo = foo.dictify_helper(non_existent_field, non_existent_field, False)
self.assertIsNone(dictified_foo[non_existent_field[0]])
# Test invalid related field is set as None
non_existent_rel_field = ('fk_short_test', )
dictified_foo = foo.dictify_helper(non_existent_rel_field, non_existent_rel_field, False)
self.assertIsNone(dictified_foo['test'])
# Test fk_short only returns the foreign model's ID
fk_short_field = ('fk_short_baz', )
bar = Bar.objects.get(id=1)
bar.set_user_auth(user)
dictified_bar = bar.dictify_helper(fk_short_field, fk_short_field, False)
self.assertEqual(len(dictified_bar), 1)
self.assertDictKeysEqual(dictified_bar['baz'], self.remove_foreign_key_fields(bar.baz.short_description_fields))
# Test fk_long returns the foreign model's dictify_long()
fk_long_field = ('fk_long_baz', )
dictified_bar = bar.dictify_helper(fk_long_field, fk_long_field, False)
self.assertEqual(len(dictified_bar), 1)
self.assertDictKeysEqual(dictified_bar['baz'], self.remove_foreign_key_fields(bar.baz.short_description_fields + bar.baz.long_description_fields))
# Test onetoone_short only returns the foreign model's ID
onetoone_short_field = ('onetoone_short_owner', )
dictified_foo = foo.dictify_helper(onetoone_short_field, onetoone_short_field, False)
self.assertEqual(len(dictified_foo), 1)
self.assertDictKeysEqual(dictified_foo['owner'], self.remove_foreign_key_fields(foo.owner.short_description_fields))
# Test onetoone_long returns the foreign model's dictify_long()
fk_long_field = ('onetoone_long_owner', )
qux = Qux.objects.get(id=1)
qux.set_user_auth(user)
dictified_qux = qux.dictify_helper(fk_long_field, fk_long_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertDictKeysEqual(dictified_qux['owner'], self.remove_foreign_key_fields(qux.owner.short_description_fields + qux.owner.long_description_fields))
# Test rel_short only returns the related models' ID's
rel_short_field = ('rel_short_bars', )
baz = Baz.objects.get(id=1)
baz.set_user_auth(user)
dictified_baz = baz.dictify_helper(rel_short_field, rel_short_field, False)
self.assertEqual(len(dictified_baz), 1)
self.assertEqual(len(dictified_baz['bars']), baz.bars.all().count())
self.assertDictKeysEqual(dictified_baz['bars'][0], self.remove_foreign_key_fields(baz.bars.all()[0].short_description_fields))
# Test rel_long returns the related models' dictify_long()
rel_long_field = ('rel_long_bars', )
dictified_baz = baz.dictify_helper(rel_long_field, rel_long_field, False)
self.assertEqual(len(dictified_baz), 1)
self.assertEqual(len(dictified_baz['bars']), baz.bars.all().count())
self.assertDictKeysEqual(dictified_baz['bars'][0], self.remove_foreign_key_fields(baz.bars.all()[0].short_description_fields + baz.bars.all()[0].long_description_fields))
# Test m2m_short only returns the related models' ID's
m2m_short_field = ('m2m_short_foos', )
qux = Qux.objects.get(id=1)
qux.set_user_auth(user)
qux.foos.add(foo)
dictified_qux = qux.dictify_helper(m2m_short_field, m2m_short_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertEqual(len(dictified_qux['foos']), qux.foos.all().count())
self.assertDictKeysEqual(dictified_qux['foos'][0], self.remove_foreign_key_fields(qux.foos.all()[0].short_description_fields))
# Test m2m_long returns the related models' dictify_long()
m2m_long_field = ('m2m_long_foos', )
dictified_qux = qux.dictify_helper(m2m_long_field, m2m_long_field, False)
self.assertEqual(len(dictified_qux), 1)
self.assertEqual(len(dictified_qux['foos']), qux.foos.all().count())
self.assertDictKeysEqual(dictified_qux['foos'][0], self.remove_foreign_key_fields(qux.foos.all()[0].short_description_fields + qux.foos.all()[0].long_description_fields))
def test_dictify_short(self):
# Test that the method only returns the short description fields
foo = Foo.objects.get(id=1)
self.assertDictKeysEqual(foo.dictify_short(False), Foo.short_description_fields)
def test_dictify_long(self):
# Test that the method returns the long and short description fields
foo = Foo.objects.get(id=1)
owner = TestProfile.objects.get(id=1).user
foo.set_user_auth(owner)
self.assertDictKeysEqual(foo.dictify_long(False), list(Foo.short_description_fields + Foo.long_description_fields))
def test_dictify_with_auth(self):
active_foo = Foo.objects.get(id=1)
deactivated_foo = Foo.objects.filter(active=0)[0]
owner = User.objects.get(id=1)
not_owner = User.objects.get(id=2)
public_user = AnonymousUser()
# Test whether a deactivated instance returns None
self.assertIsNone(deactivated_foo.dictify_with_auth(owner, False))
# Test whether a public user only sees the public fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(public_user, False), list(Foo.public_fields))
# Test whether an owner can view all the fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(owner, False), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
# Test whether a registered user sees registered user + public fields
self.assertDictKeysEqual(active_foo.dictify_with_auth(not_owner, False), list(Foo.public_fields + Foo.registered_user_fields))
def test_is_owner(self):
# Test ownership of Foo
foo = Foo.objects.get(id=1)
# Test Foo with its rightful owner
# Test Foo with its rightful owner
owner = User.objects.get(id=1)
self.assertTrue(foo.is_owner(owner))
# Test Foo with an incorrect owner
not_owner = User.objects.get(id=2)
self.assertFalse(foo.is_owner(not_owner))
# Test Bar with an arbitrary user - Bar's don't have an owner.
bar = Bar.objects.get(id=1)
self.assertTrue(bar.is_owner(owner))
def test_get_all(self):
user = User.objects.get(id=1)
# Test number of Foo's equal to 10
self.assertEqual(len(Foo.get_all(1, user)), Foo.pagination)
# Test number of Bar's equal to number of Bar's (< 10)
self.assertEqual(len(Bar.get_all(1, user)), Bar.objects.all().count())
# Test invalid page number raises expected exception
with self.assertRaises(EmptyPage):
Bar.get_all(2, user)
# Test invalid page value raises expected exception
with self.assertRaises(PageNotAnInteger):
Bar.get_all("foo", user)
def test_get_model_instance(self):
# Test getting a Foo object with a valid ID
valid_foo_id = 1
# Make sure the method returns the right object
foo = Foo.objects.get(id=valid_foo_id)
self.assertEqual(Foo.get_model_instance(valid_foo_id), foo)
# Test invalid lookup raises expected exception
with self.assertRaises(ValueError):
Foo.objects.get(id="foo")
with self.assertRaises(ObjectDoesNotExist):
Foo.objects.get(id=20)
class APIViewTestCase(APIToolsTestCase):
fixtures = ['user_testprofile_foo.json', 'bar_baz_qux.json']
urls = 'django_api_tools.tests.urls'
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
t = TestAPIView()
# Test invalid request gives back 404
request = self.factory.get('/test_api/')
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test reserved URL gives back 200
request = self.factory.get('/test_api/{}'.format(ReservedURL.CSRFTOKEN))
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
user = User.objects.get(id=1)
# Test model request returns 200
request = self.factory.get('/test_api/foo/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test get instance gives back 200
request = self.factory.get('/test_api/foo/1/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test custom request on model with custom_request implemented gives back 200
request = self.factory.get('/test_api/qux/1/custom/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test custom request on model without implementation gives back 404
request = self.factory.get('/test_api/foo/1/custom/')
request.user = user
response = t.get(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_post(self):
t = TestAPIView()
# Test invalid request gives back 404
request = self.factory.post('/test_api/')
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test reserved URL gives back 200
request = self.factory.post('/test_api/{}/'.format(ReservedURL.CSRFTOKEN))
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
user = User.objects.get(id=1)
# Test post model request (create) returns 200
APIUrl.ADDITIONAL_FIELDS = list()
request = self.factory.post('/test_api/foo/', data={"f2": "foo"})
request.user = user
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test post instance (update) gives back 200
APIUrl.ADDITIONAL_FIELDS = list()<|fim▁hole|> request = self.factory.post('/test_api/foo/{}/'.format(foo.id), data={"f1": True})
request.user = user
response = t.post(request)
self.assertEqual(response.status_code, StatusCode.OK)
def test_get_all(self):
user = User.objects.get(id=1)
t = TestAPIView()
# Test get first page of Foo's gives back 10 results
request = self.factory.get('/test_api/foo/')
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertEqual(len(json.loads(response.content)), 10)
# Test second page of Foo's gives back 1 results
request = self.factory.get('/test_api/foo/', data={"page": 2})
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertEqual(len(json.loads(response.content)), 1)
# Test third page of Foo's gives back 404
request = self.factory.get('/test_api/foo/', data={"page": 3})
request.user = user
t._endpoint_model = Foo
response = t._get_all(request)
self.assertIsNone(json.loads(response.content))
def test_get_instance(self):
user = User.objects.get(id=1)
t = TestAPIView()
# Test Foo ID = 1 gives back 200/ correct Foo
foo = Foo.objects.get(id=1)
foo_dict = foo.dictify_with_auth(user, short_dict=False)
request = self.factory.get('/test_api/foo/{}/'.format(foo.id))
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertDictEqual(json.loads(response.content), foo_dict)
self.assertEqual(response.status_code, StatusCode.OK)
# Test Foo ID = 22 gives back 404/ none
request = self.factory.get('/test_api/foo/22/')
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
self.assertIsNone(json.loads(response.content))
# Test Foo ID = "foo" gives back 404
request = self.factory.get('/test_api/foo/foo/')
request.user = user
t._endpoint_model = Foo
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
self.assertIsNone(json.loads(response.content))
# Test Qux /custom/ gives back 200/ correct value
request = self.factory.get('/test_api/qux/custom/')
request.user = user
t._endpoint_model = Qux
t._url_validator = APIUrl(request)
response = t._get_instance(request)
self.assertEqual(response.status_code, StatusCode.OK)
self.assertEqual(json.loads(response.content), Qux.api_custom_request(request))
def test_post_handler(self):
t = TestAPIView()
# Test non-authenticated user and private endpoint gives back 404
request = self.factory.post('/test_api/qux/')
request.user = AnonymousUser()
public_endpoints = (Foo, )
t._endpoint_model = Qux
response = t._post_handler(request, public_endpoints)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test create:
f2_val = "hello"
user = User.objects.get(id=1)
request = self.factory.post('/test_api/foo/', data={"f2": f2_val})
request.user = user
public_endpoints = (Qux, )
t._endpoint_model = Foo
response = t._post_handler(request, public_endpoints)
foo_dict = json.loads(response.content)
self.assertEqual(response.status_code, StatusCode.OK)
self.assertEqual(foo_dict['f2'], f2_val)
self.assertEqual(foo_dict, Foo.objects.get(id=foo_dict['id']).dictify_with_auth(user, short_dict=False))
# Test create Foo with bad/missing fields returns 404
f1_val = "hello"
request = self.factory.post('/test_api/foo/', data={"f1": f1_val})
request.user = user
response = t._post_handler(request, public_endpoints)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test update with owner returns 200 + updated foo object
foo = Foo.objects.get(id=1)
f1_before = foo.f1
foo1_url = '/test_api/foo/{}/'.format(foo.id)
request = self.factory.post(foo1_url, data={"f1": True})
request.user = user
t._url_validator = APIUrl(request)
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.OK)
response_content = json.loads(response.content)
self.assertEqual(response_content['f1'], f1_before + 1)
new_foo = Foo.objects.get(id=1)
self.assertDictEqual(new_foo.dictify_with_auth(user, False), response_content)
# Test update with non owner returns 404
request = self.factory.post(foo1_url, data={"f1": True})
request.user = AnonymousUser()
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test deactivate gives back 404 + Test that the deactivate date is set
request = self.factory.post(foo1_url, data={"deactivate": True})
request.user = user
response = t._post_handler(request, public_endpoints, create=False)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_get_json_response_for_instance(self):
foo = Foo.objects.get(id=1)
t = TestAPIView()
# Test Anonymous user gives back public fields
user = AnonymousUser()
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), Foo.public_fields)
# Test registered user gives back all fields
user = User.objects.get(id=2)
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), list(Foo.public_fields + Foo.registered_user_fields))
# Test owner gives back all fields
user = User.objects.get(id=1)
response_content = t.get_json_response_for_instance(foo, user).content
self.assertDictKeysEqual(json.loads(response_content), list(Foo.public_fields + Foo.registered_user_fields + Foo.owner_only_fields))
def test_validate_request(self):
t = TestAPIView()
# Test invalid request returns False
request = self.factory.get('/test_api/fob/')
self.assertFalse(t._validate_request(request))
request = self.factory.get('/test_api/123/123/123/')
self.assertFalse(t._validate_request(request))
# Test valid request returns True
request = self.factory.get('/test_api/foo/')
self.assertTrue(t._validate_request(request))
# Test reserved URL returns True
request = self.factory.get('/test_api/{}/'.format(ReservedURL.LOGIN))
self.assertTrue(t._validate_request(request))
def test_handle_login_logout_request(self):
# We need to use Django's Client to test the login
# as RequestFactory doesn't offer any middleware by default
c = Client()
login_url = "/test_api/{}/".format(ReservedURL.LOGIN)
# Test valid user login returns the user's profile + sets cookies
valid_user = User.objects.get(id=1)
new_password = "newpassword1"
valid_user.set_password(new_password)
valid_user.save()
response = c.post(login_url, data={"username": valid_user.username, "password": new_password})
self.assertEqual(response.status_code, StatusCode.OK)
self.assertDictEqual(json.loads(response.content), valid_user.test_profile.dictify_with_auth(valid_user, short_dict=False))
# Test that logout deletes the authenticated session
session_val_before = response.cookies['sessionid'].value
response = c.post("/test_api/{}/".format(ReservedURL.LOGOUT))
session_val_after = response.cookies['sessionid'].value
self.assertNotEqual(session_val_before, session_val_after)
# Test an invalid login returns 404
response = c.post(login_url, data={"username": valid_user.username, "password": "badpassword"})
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
# Test inactive user login returns 404
valid_user.is_active = False
valid_user.save()
response = c.post(login_url, data={"username": valid_user.username, "password": new_password})
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
def test_handle_csrf_request(self):
# Test csrf request sets a token
c = Client()
response = c.get("/test_api/{}".format(ReservedURL.CSRFTOKEN))
self.assertIsNotNone(response.cookies['csrftoken'].value)
def test_handle_custom_request(self):
t = TestAPIView()
# Test model which handles custom request returns 200
request = self.factory.get('/test_api/qux/custom/')
t._endpoint_model = Qux
response = t.handle_custom_request(request)
self.assertEqual(response.status_code, StatusCode.OK)
# Test model which doesn't handle custom request returns 404
request = self.factory.get('/test_api/foo/custom/')
t._endpoint_model = Foo
response = t.handle_custom_request(request)
self.assertEqual(response.status_code, StatusCode.NOT_FOUND)
class APIUrlTestCase(APIToolsTestCase):
def setUp(self):
self.factory = RequestFactory()
def test_split_url_components(self):
# Test an invalid request
request = self.factory.get("/api/")
splitter = APIUrl(request)
self.assertFalse(splitter.is_valid_request())
# Test a model request
MODEL_NAME = "foo"
request = self.factory.get("/api/{}/".format(MODEL_NAME))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_model_request())
self.assertEqual(MODEL_NAME, splitter.REQUESTED_MODEL)
# Test a model instance request
MODEL_INSTANCE = "1"
request = self.factory.get("/api/{}/{}/".format(MODEL_NAME, MODEL_INSTANCE))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_model_instance_request())
self.assertEqual(MODEL_NAME, splitter.REQUESTED_MODEL)
self.assertEqual(MODEL_INSTANCE, splitter.REQUESTED_MODEL_INSTANCE)
# Test a reserved URL request
reserved_url = ReservedURL.LOGOUT
request = self.factory.get("/api/{}/".format(reserved_url))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_reserved_url())
self.assertEqual(reserved_url, splitter.RESERVED_URL)
# Test a custom request
reserved_url = ReservedURL.LOGOUT
request = self.factory.get("/api/{}/".format(reserved_url))
splitter = APIUrl(request)
self.assertTrue(splitter.is_valid_request())
self.assertTrue(splitter.is_reserved_url())
self.assertEqual(reserved_url, splitter.RESERVED_URL)<|fim▁end|>
|
foo = Foo.objects.get(id=1)
|
<|file_name|>KernelMulticoreTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2013-2014 Kametic <[email protected]>
*
* Licensed under the GNU LESSER GENERAL PUBLIC LICENSE, Version 3, 29 June 2007;
* or any later version
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.gnu.org/licenses/lgpl-3.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.nuun.kernel.core.internal;
import static io.nuun.kernel.core.NuunCore.createKernel;
import static io.nuun.kernel.core.NuunCore.newKernelConfiguration;
import static org.fest.assertions.Assertions.assertThat;
import io.nuun.kernel.api.Kernel;
import io.nuun.kernel.core.pluginsit.dummy1.DummyPlugin;
import io.nuun.kernel.core.pluginsit.dummy23.DummyPlugin2;
import io.nuun.kernel.core.pluginsit.dummy23.DummyPlugin3;
import io.nuun.kernel.core.pluginsit.dummy4.DummyPlugin4;
<|fim▁hole|>
import org.junit.Test;
public class KernelMulticoreTest
{
@Test
public void dependee_plugins_that_misses_should_be_source_of_error() throws InterruptedException
{
CountDownLatch startLatch = new CountDownLatch(1);
for (int threadNo = 0; threadNo < 2; threadNo++) {
Thread t = new KernelHolder(startLatch);
t.start();
}
// give the threads chance to start up; we could perform
// initialisation code here as well.
Thread.sleep(200);
startLatch.countDown();
}
static class KernelHolder extends Thread
{
public KernelHolder(CountDownLatch startLatch)
{
}
@SuppressWarnings("unchecked")
@Override
public void run()
{
// try
{
System.out.println("Before");
// startLatch.await();
KernelCore underTest;
DummyPlugin4 plugin4 = new DummyPlugin4();
underTest = (KernelCore) createKernel(
//
newKernelConfiguration() //
.params (
DummyPlugin.ALIAS_DUMMY_PLUGIN1 , "WAZAAAA",
DummyPlugin.NUUNROOTALIAS , "internal,"+KernelCoreTest.class.getPackage().getName()
)
);
assertThat(underTest.name()).startsWith(Kernel.KERNEL_PREFIX_NAME);
System.out.println(">" + underTest.name());
underTest.addPlugins( DummyPlugin2.class);
underTest.addPlugins( DummyPlugin3.class);
underTest.addPlugins( plugin4);
underTest.addPlugins( DummyPlugin5.class);
underTest.init();
assertThat(underTest.isInitialized()).isTrue();
System.out.println(">" + underTest.name() + " initialized = " + underTest.isInitialized());
underTest.start();
assertThat(underTest.isStarted()).isTrue();
System.out.println(">" + underTest.name() + " started = " + underTest.isStarted());
underTest.stop();
}
// catch (InterruptedException e)
// {
// e.printStackTrace();
// }
}
}
}<|fim▁end|>
|
import io.nuun.kernel.core.pluginsit.dummy5.DummyPlugin5;
import java.util.concurrent.CountDownLatch;
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.request import jsonified
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from datetime import datetime
from dateutil.parser import parse
from git.repository import LocalRepository
import json
import os
import shutil
import tarfile
import time
import traceback
import version
log = CPLog(__name__)
class Updater(Plugin):
available_notified = False
def __init__(self):
if Env.get('desktop'):
self.updater = DesktopUpdater()
elif os.path.isdir(os.path.join(Env.get('app_dir'), '.git')):
self.updater = GitUpdater(self.conf('git_command', default = 'git'))
else:
self.updater = SourceUpdater()
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
addEvent('app.load', self.autoUpdate)
addEvent('updater.info', self.info)
addApiView('updater.info', self.getInfo, docs = {
'desc': 'Get updater information',
'return': {
'type': 'object',
'example': """{
'last_check': "last checked for update",
'update_version': "available update version or empty",
'version': current_cp_version
}"""}
})
addApiView('updater.update', self.doUpdateView)
addApiView('updater.check', self.checkView, docs = {
'desc': 'Check for available update',
'return': {'type': 'see updater.info'}
})
def autoUpdate(self):
if self.check() and self.conf('automatic') and not self.updater.update_failed:
if self.updater.doUpdate():
# Notify before restarting
try:
if self.conf('notification'):
info = self.updater.info()
version_date = datetime.fromtimestamp(info['update_version']['date'])
fireEvent('updater.updated', 'Updated to a new version with hash "%s", this version is from %s' % (info['update_version']['hash'], version_date), data = info)
except:
log.error('Failed notifying for update: %s', traceback.format_exc())
fireEventAsync('app.restart')
return True
return False
def check(self):
if self.isDisabled():
return
if self.updater.check():
if not self.available_notified and self.conf('notification') and not self.conf('automatic'):
fireEvent('updater.available', message = 'A new update is available', data = self.updater.info())
self.available_notified = True
return True
return False
def info(self):
return self.updater.info()
def getInfo(self):
return jsonified(self.updater.info())
def checkView(self):
return jsonified({
'update_available': self.check(),
'info': self.updater.info()
})
def doUpdateView(self):
self.check()
if not self.updater.update_version:
log.error('Trying to update when no update is available.')
success = False
else:
success = self.updater.doUpdate()
if success:
fireEventAsync('app.restart')
# Assume the updater handles things
if not success:
success = True
return jsonified({
'success': success
})
class BaseUpdater(Plugin):
repo_user = 'jayme-github'
repo_name = 'CouchPotatoServer'
branch = version.BRANCH
version = None
update_failed = False
update_version = None
last_check = 0
def doUpdate(self):
pass
def getInfo(self):
return jsonified(self.info())
def info(self):
return {
'last_check': self.last_check,
'update_version': self.update_version,
'version': self.getVersion(),
'repo_name': '%s/%s' % (self.repo_user, self.repo_name),
'branch': self.branch,
}
def check(self):
pass
def deletePyc(self, only_excess = True):
for root, dirs, files in os.walk(ss(Env.get('app_dir'))):
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
for excess_pyc_file in excess_pyc_files:
full_path = os.path.join(root, excess_pyc_file)
log.debug('Removing old PYC file: %s', full_path)
try:
os.remove(full_path)
except:
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
for dir_name in dirs:
full_path = os.path.join(root, dir_name)
if len(os.listdir(full_path)) == 0:
try:
os.rmdir(full_path)
except:
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
class GitUpdater(BaseUpdater):
def __init__(self, git_command):
self.repo = LocalRepository(Env.get('app_dir'), command = git_command)
def doUpdate(self):
try:
log.debug('Stashing local changes')
self.repo.saveStash()
log.info('Updating to latest version')
self.repo.pull()
# Delete leftover .pyc files
self.deletePyc()
return True
except:
log.error('Failed updating via GIT: %s', traceback.format_exc())
self.update_failed = True
return False
def getVersion(self):
if not self.version:
try:
output = self.repo.getHead() # Yes, please
log.debug('Git version output: %s', output.hash)
self.version = {
'hash': output.hash[:8],
'date': output.getDate(),
'type': 'git',
}
except Exception, e:
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
return 'No GIT'
return self.version
def check(self):
if self.update_version:
return True
log.info('Checking for new version on github for %s', self.repo_name)
if not Env.get('dev'):
self.repo.fetch()
current_branch = self.repo.getCurrentBranch().name
for branch in self.repo.getRemoteByName('origin').getBranches():
if current_branch == branch.name:
local = self.repo.getHead()
remote = branch.getHead()
log.info('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
if local.getDate() < remote.getDate():
self.update_version = {
'hash': remote.hash[:8],
'date': remote.getDate(),
}
return True
self.last_check = time.time()
return False
class SourceUpdater(BaseUpdater):
def __init__(self):
# Create version file in cache
self.version_file = os.path.join(Env.get('cache_dir'), 'version')
if not os.path.isfile(self.version_file):
self.createFile(self.version_file, json.dumps(self.latestCommit()))
def doUpdate(self):
try:
url = 'https://github.com/%s/%s/tarball/%s' % (self.repo_user, self.repo_name, self.branch)
destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash') + '.tar.gz')
extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater')
destination = fireEvent('file.download', url = url, dest = destination, single = True)
# Cleanup leftover from last time
if os.path.isdir(extracted_path):
self.removeDir(extracted_path)
self.makeDir(extracted_path)
# Extract
tar = tarfile.open(destination)
tar.extractall(path = extracted_path)
tar.close()
os.remove(destination)
if self.replaceWith(os.path.join(extracted_path, os.listdir(extracted_path)[0])):
self.removeDir(extracted_path)
# Write update version to file
self.createFile(self.version_file, json.dumps(self.update_version))
return True
except:
log.error('Failed updating: %s', traceback.format_exc())
self.update_failed = True
return False
def replaceWith(self, path):
app_dir = ss(Env.get('app_dir'))
# Get list of files we want to overwrite
self.deletePyc()
existing_files = []
for root, subfiles, filenames in os.walk(app_dir):
for filename in filenames:
existing_files.append(os.path.join(root, filename))
for root, subfiles, filenames in os.walk(path):
for filename in filenames:
fromfile = os.path.join(root, filename)
tofile = os.path.join(app_dir, fromfile.replace(path + os.path.sep, ''))
if not Env.get('dev'):
try:
if os.path.isfile(tofile):
os.remove(tofile)
dirname = os.path.dirname(tofile)
if not os.path.isdir(dirname):
self.makeDir(dirname)
shutil.move(fromfile, tofile)
try:
existing_files.remove(tofile)
except ValueError:
pass
except:
log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc()))
return False
if Env.get('app_dir') not in Env.get('data_dir'):
for still_exists in existing_files:
try:
os.remove(still_exists)
except:
log.error('Failed removing non-used file: %s', traceback.format_exc())
return True
def removeDir(self, path):
try:
if os.path.isdir(path):
shutil.rmtree(path)
except OSError, inst:
os.chmod(inst.filename, 0777)
self.removeDir(path)
def getVersion(self):
if not self.version:
try:
f = open(self.version_file, 'r')
output = json.loads(f.read())
f.close()
<|fim▁hole|> log.debug('Source version output: %s', output)
self.version = output
self.version['type'] = 'source'
except Exception, e:
log.error('Failed using source updater. %s', e)
return {}
return self.version
def check(self):
current_version = self.getVersion()
try:
latest = self.latestCommit()
if latest.get('hash') != current_version.get('hash') and latest.get('date') >= current_version.get('date'):
self.update_version = latest
self.last_check = time.time()
except:
log.error('Failed updating via source: %s', traceback.format_exc())
return self.update_version is not None
def latestCommit(self):
try:
url = 'https://api.github.com/repos/%s/%s/commits?per_page=1&sha=%s' % (self.repo_user, self.repo_name, self.branch)
data = self.getCache('github.commit', url = url)
commit = json.loads(data)[0]
return {
'hash': commit['sha'],
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
}
except:
log.error('Failed getting latest request from github: %s', traceback.format_exc())
return {}
class DesktopUpdater(BaseUpdater):
def __init__(self):
self.desktop = Env.get('desktop')
def doUpdate(self):
try:
def do_restart(e):
if e['status'] == 'done':
fireEventAsync('app.restart')
elif e['status'] == 'error':
log.error('Failed updating desktop: %s', e['exception'])
self.update_failed = True
self.desktop._esky.auto_update(callback = do_restart)
return
except:
self.update_failed = True
return False
def info(self):
return {
'last_check': self.last_check,
'update_version': self.update_version,
'version': self.getVersion(),
'branch': self.branch,
}
def check(self):
current_version = self.getVersion()
try:
latest = self.desktop._esky.find_update()
if latest and latest != current_version.get('hash'):
self.update_version = {
'hash': latest,
'date': None,
'changelog': self.desktop._changelogURL,
}
self.last_check = time.time()
except:
log.error('Failed updating desktop: %s', traceback.format_exc())
return self.update_version is not None
def getVersion(self):
return {
'hash': self.desktop._esky.active_version,
'date': None,
'type': 'desktop',
}<|fim▁end|>
| |
<|file_name|>socks5.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 The Phtevencoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Dummy Socks5 server for testing.
'''
from __future__ import print_function, division, unicode_literals
import socket, threading, Queue
import traceback, sys
### Protocol constants
class Command:
CONNECT = 0x01
class AddressType:
IPV4 = 0x01
DOMAINNAME = 0x03
IPV6 = 0x04
### Utility functions
def recvall(s, n):
'''Receive n bytes from a socket, or fail'''
rv = bytearray()
while n > 0:
d = s.recv(n)
if not d:
raise IOError('Unexpected end of stream')
rv.extend(d)
n -= len(d)
return rv
### Implementation classes
class Socks5Configuration(object):
'''Proxy configuration'''
def __init__(self):
self.addr = None # Bind address (must be set)
self.af = socket.AF_INET # Bind address family<|fim▁hole|> self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
class Socks5Command(object):
'''Information about an incoming socks5 command'''
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd # Command (one of Command.*)
self.atyp = atyp # Address type (one of AddressType.*)
self.addr = addr # Address
self.port = port # Port to connect to
self.username = username
self.password = password
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection(object):
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
self.peer = peer
def handle(self):
'''
Handle socks5 request according to RFC1928
'''
try:
# Verify socks version
ver = recvall(self.conn, 1)[0]
if ver != 0x05:
raise IOError('Invalid socks version %i' % ver)
# Choose authentication method
nmethods = recvall(self.conn, 1)[0]
methods = bytearray(recvall(self.conn, nmethods))
method = None
if 0x02 in methods and self.serv.conf.auth:
method = 0x02 # username/password
elif 0x00 in methods and self.serv.conf.unauth:
method = 0x00 # unauthenticated
if method is None:
raise IOError('No supported authentication method was offered')
# Send response
self.conn.sendall(bytearray([0x05, method]))
# Read authentication (optional)
username = None
password = None
if method == 0x02:
ver = recvall(self.conn, 1)[0]
if ver != 0x01:
raise IOError('Invalid auth packet version %i' % ver)
ulen = recvall(self.conn, 1)[0]
username = str(recvall(self.conn, ulen))
plen = recvall(self.conn, 1)[0]
password = str(recvall(self.conn, plen))
# Send authentication response
self.conn.sendall(bytearray([0x01, 0x00]))
# Read connect request
(ver,cmd,rsv,atyp) = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
raise IOError('Unhandled command %i in connect request' % cmd)
if atyp == AddressType.IPV4:
addr = recvall(self.conn, 4)
elif atyp == AddressType.DOMAINNAME:
n = recvall(self.conn, 1)[0]
addr = str(recvall(self.conn, n))
elif atyp == AddressType.IPV6:
addr = recvall(self.conn, 16)
else:
raise IOError('Unknown address type %i' % atyp)
port_hi,port_lo = recvall(self.conn, 2)
port = (port_hi << 8) | port_lo
# Send dummy response
self.conn.sendall(bytearray([0x05, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
print('Proxy: ', cmdin)
# Fall through to disconnect
except Exception,e:
traceback.print_exc(file=sys.stderr)
self.serv.queue.put(e)
finally:
self.conn.close()
class Socks5Server(object):
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(conf.addr)
self.s.listen(5)
self.running = False
self.thread = None
self.queue = Queue.Queue() # report connections and exceptions to client
def run(self):
while self.running:
(sockconn, peer) = self.s.accept()
if self.running:
conn = Socks5Connection(self, sockconn, peer)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
def start(self):
assert(not self.running)
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
self.running = False
# connect to self to end run loop
s = socket.socket(self.conf.af)
s.connect(self.conf.addr)
s.close()
self.thread.join()<|fim▁end|>
| |
<|file_name|>test-generator.py<|end_file_name|><|fim▁begin|># ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|># ==================================================================================================
__author__ = 'John Sirios'
from twitter.pants.base.generator import TemplateData
import unittest
class TemplateDataTest(unittest.TestCase):
def setUp(self):
self.data = TemplateData(foo = 'bar', baz = 42)
def test_member_access(self):
try:
self.data.bip
self.fail("Access to undefined template data slots should raise")
except AttributeError:
# expected
pass
def test_member_mutation(self):
try:
self.data.baz = 1 / 137
self.fail("Mutation of a template data's slots should not be allowed")
except AttributeError:
# expected
pass
def test_extend(self):
self.assertEqual(self.data.extend(jake = 0.3), TemplateData(baz = 42, foo = 'bar', jake = 0.3))
def test_equals(self):
self.assertEqual(self.data, TemplateData(baz = 42).extend(foo = 'bar'))<|fim▁end|>
|
# See the License for the specific language governing permissions and
# limitations under the License.
|
<|file_name|>v1_server_address_by_client_cidr.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1ServerAddressByClientCIDR(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'client_cidr': 'str',
'server_address': 'str'
}
attribute_map = {
'client_cidr': 'clientCIDR',
'server_address': 'serverAddress'
}
def __init__(self, client_cidr=None, server_address=None, local_vars_configuration=None): # noqa: E501
"""V1ServerAddressByClientCIDR - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._client_cidr = None
self._server_address = None
self.discriminator = None
self.client_cidr = client_cidr
self.server_address = server_address
@property
def client_cidr(self):
"""Gets the client_cidr of this V1ServerAddressByClientCIDR. # noqa: E501
The CIDR with which clients can match their IP to figure out the server address that they should use. # noqa: E501
:return: The client_cidr of this V1ServerAddressByClientCIDR. # noqa: E501
:rtype: str
"""<|fim▁hole|>
@client_cidr.setter
def client_cidr(self, client_cidr):
"""Sets the client_cidr of this V1ServerAddressByClientCIDR.
The CIDR with which clients can match their IP to figure out the server address that they should use. # noqa: E501
:param client_cidr: The client_cidr of this V1ServerAddressByClientCIDR. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and client_cidr is None: # noqa: E501
raise ValueError("Invalid value for `client_cidr`, must not be `None`") # noqa: E501
self._client_cidr = client_cidr
@property
def server_address(self):
"""Gets the server_address of this V1ServerAddressByClientCIDR. # noqa: E501
Address of this server, suitable for a client that matches the above CIDR. This can be a hostname, hostname:port, IP or IP:port. # noqa: E501
:return: The server_address of this V1ServerAddressByClientCIDR. # noqa: E501
:rtype: str
"""
return self._server_address
@server_address.setter
def server_address(self, server_address):
"""Sets the server_address of this V1ServerAddressByClientCIDR.
Address of this server, suitable for a client that matches the above CIDR. This can be a hostname, hostname:port, IP or IP:port. # noqa: E501
:param server_address: The server_address of this V1ServerAddressByClientCIDR. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and server_address is None: # noqa: E501
raise ValueError("Invalid value for `server_address`, must not be `None`") # noqa: E501
self._server_address = server_address
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ServerAddressByClientCIDR):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ServerAddressByClientCIDR):
return True
return self.to_dict() != other.to_dict()<|fim▁end|>
|
return self._client_cidr
|
<|file_name|>animatedText.ts<|end_file_name|><|fim▁begin|>/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals {
export interface AnimatedTextConfigurationSettings {
align?: string;
maxFontSize?: number;
}
/**
* Base class for values that are animated when resized.
*/
export class AnimatedText {
/** Note: Public for testability */
public static formatStringProp: DataViewObjectPropertyIdentifier = {
objectName: 'general',
propertyName: 'formatString',
};
protected animator: IGenericAnimator;
private name: string;
/** Note: Public for testability */
public svg: D3.Selection;
public currentViewport: IViewport;
public value: any;
public hostServices: IVisualHostServices;
public style: IVisualStyle;
public visualConfiguration: AnimatedTextConfigurationSettings;
public metaDataColumn: DataViewMetadataColumn;
private mainText: ClassAndSelector = {
class: 'mainText',
selector: '.mainText'
};
public constructor(name: string) {
this.name = name;
this.visualConfiguration = { maxFontSize: 60 };
}
public getMetaDataColumn(dataView: DataView) {
if (dataView && dataView.metadata && dataView.metadata.columns) {
for (let i = 0, ilen = dataView.metadata.columns.length; i < ilen; i++) {
let column = dataView.metadata.columns[i];
if (column.isMeasure) {
this.metaDataColumn = column;
break;
}
}
}
}
public getAdjustedFontHeight(
availableWidth: number,
textToMeasure: string,
seedFontHeight: number): number {
// set up the node so we don't keep appending/removing it during the computation
let nodeSelection = this.svg.append('text').text(textToMeasure);
let fontHeight = this.getAdjustedFontHeightCore(
nodeSelection,
availableWidth,
seedFontHeight,
0);
nodeSelection.remove();
return fontHeight;
}
private getAdjustedFontHeightCore(
nodeToMeasure: D3.Selection,
availableWidth: number,
seedFontHeight: number,
iteration: number): number {
// Too many attempts - just return what we have so we don't sacrifice perf
if (iteration > 10)
return seedFontHeight;
nodeToMeasure.attr('font-size', seedFontHeight);
let candidateLength = TextMeasurementService.measureSvgTextElementWidth(nodeToMeasure[0][0]);
if (candidateLength < availableWidth)
return seedFontHeight;
return this.getAdjustedFontHeightCore(nodeToMeasure, availableWidth, seedFontHeight * 0.9, iteration + 1);
}
public clear() {
this.svg.select(this.mainText.selector).text('');
}
public doValueTransition(
startValue: any,
endValue: any,
displayUnitSystemType: DisplayUnitSystemType,
animationOptions: AnimationOptions,
duration: number,
forceUpdate: boolean,
formatter?: IValueFormatter): void {
if (!forceUpdate && startValue === endValue && endValue != null)
return;<|fim▁hole|> startValue = 0;
let svg = this.svg,
viewport = this.currentViewport,
height = viewport.height,
width = viewport.width,
endValueArr = [endValue],
seedFontHeight = this.getSeedFontHeight(width, height),
translateX = this.getTranslateX(width),
translateY = this.getTranslateY(seedFontHeight),
metaDataColumn = this.metaDataColumn;
// Respect the formatter default value
if (!formatter) {
formatter = valueFormatter.create({
format: this.getFormatString(metaDataColumn),
value: endValue,
displayUnitSystemType: displayUnitSystemType,
formatSingleValues: true,
allowFormatBeautification: true,
columnType: metaDataColumn ? metaDataColumn.type : undefined
});
}
let startText = formatter.format(startValue),
endText = formatter.format(endValue);
svg.attr('class', this.name);
let textElement = svg
.selectAll('text')
.data(endValueArr);
textElement
.enter()
.append('text')
.attr('class', this.mainText.class);
let fontHeight = this.getAdjustedFontHeight(width, endText, seedFontHeight);
translateY = this.getTranslateY(fontHeight + (height - fontHeight) / 2);
let textElementUpdate = textElement
.text(startText)
.attr({
'text-anchor': this.getTextAnchor(),
'font-size': fontHeight,
'transform': SVGUtil.translate(translateX, translateY)
});
if (endValue == null) {
textElementUpdate.text(endText);
}
else if (metaDataColumn && AxisHelper.isDateTime(metaDataColumn.type)) {
textElementUpdate.text(endText);
}
else {
let interpolatedValue = startValue;
textElementUpdate
.transition()
.duration(duration)
.tween('text', function (d) {
let i = d3.interpolate(interpolatedValue, d);
return function (t) {
let num = i(t);
this.textContent = formatter.format(num);
};
});
}
SVGUtil.flushAllD3TransitionsIfNeeded(animationOptions);
}
public getSeedFontHeight(boundingWidth: number, boundingHeight: number) {
// Simply an estimate - it should eventually be modified based on the actual text length
let estimatedSize = Math.floor(Math.min(boundingWidth, boundingHeight) * 0.75);
let maxFontSize = this.visualConfiguration.maxFontSize;
if (maxFontSize)
return Math.min(maxFontSize, estimatedSize);
return estimatedSize;
}
public getTranslateX(width: number): number {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 0;
case 'right':
return width;
}
}
return width / 2;
}
public getTranslateY(height: number): number {
return height;
}
public getTextAnchor(): string {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 'start';
case 'right':
return 'end';
}
}
return 'middle';
}
protected getFormatString(column: DataViewMetadataColumn): string {
debug.assertAnyValue(column, 'column');
return valueFormatter.getFormatString(column, AnimatedText.formatStringProp);
}
}
}<|fim▁end|>
|
if (!startValue)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
__version__ = '0.3.0'
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.sites.models import Site
# Create your models here.
class Link(models.Model):
url = models.URLField(max_length=512)
site = models.ForeignKey(Site, on_delete=models.SET_NULL, null=True)
request_times = models.PositiveIntegerField(default=0)
updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{}-{}'.format(self.pk, self.url)
class RateLimit(models.Model):
ip = models.GenericIPAddressField(unique=True)
start_time = models.DateTimeField()
count = models.PositiveIntegerField(default=0)<|fim▁hole|>
def __str__(self):
return self.ip<|fim▁end|>
| |
<|file_name|>doc.rs<|end_file_name|><|fim▁begin|>// rustfmt-normalize_comments: true<|fim▁hole|>//sdffsdfasdf<|fim▁end|>
|
// Part of multiple.rs
// sadfsdfa
|
<|file_name|>helloworld.py<|end_file_name|><|fim▁begin|># coding=utf-8
__author__ = 'litao'
# -*- coding:utf-8 -*-
import urllib
import urllib2
import re
import thread
import time
class QSBK:
def __init__(self):
self.pageIndex = 1
self.user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
self.headers = { 'User-Agent' : self.user_agent }
self.stories = []
self.enable = False
def getPage(self,pageIndex):
try:
url = 'http://www.qiushibaike.com/hot/page/' + str(pageIndex)
request = urllib2.Request(url,headers = self.headers)
response = urllib2.urlopen(request)
pageCode = response.read().decode('utf-8')
return pageCode
except urllib2.URLError, e:
if hasattr(e,"reason"):
print u"连接糗事百科失败,错误原因",e.reason
return None
#传入某一页代码,返回本页不带图片的段子列表
def getPageItems(self,pageIndex):
pageCode = self.getPage(pageIndex)
if not pageCode:
print "页面加载失败...."
return None
pattern = re.compile('<div.*?author">.*?<a.*?<img.*?>(.*?)</a>.*?<div.*?'+
'content">(.*?)<!--(.*?)-->.*?</div>(.*?)<div class="stats.*?class="number">(.*?)</i>',re.S)
items = re.findall(pattern,pageCode)
#用来存储每页的段子们
pageStories = []
#遍历正则表达式匹配的信息
for item in items:
#是否含有图片
haveImg = re.search("img",item[3])
#如果不含有图片,把它加入list中
if not haveImg:
replaceBR = re.compile('<br/>')
text = re.sub(replaceBR,"\n",item[1])
#item[0]是一个段子的发布者,item[1]是内容,item[2]是发布时间,item[4]是点赞数
pageStories.append([item[0].strip(),text.strip(),item[2].strip(),item[4].strip()])
return pageStories
#加载并提取页面的内容,加入到列表中
def loadPage(self):
#如果当前未看的页数少于2页,则加载新一页
if self.enable == True:
if len(self.stories) < 2:
#获取新一页
pageStories = self.getPageItems(self.pageIndex)
#将该页的段子存放到全局list中
if pageStories:
self.stories.append(pageStories)
#获取完之后页码索引加一,表示下次读取下一页
self.pageIndex += 1
#调用该方法,每次敲回车打印输出一个段子
def getOneStory(self,pageStories,page):
#遍历一页的段子
for story in pageStories:
#等待用户输入
input = raw_input()
#每当输入回车一次,判断一下是否要加载新页面
self.loadPage()
#如果输入Q则程序结束
if input == "Q":
self.enable = False
return
print u"第%d页\t发布人:%s\t发布时间:%s\t赞:%s\n%s" %(page,story[0],story[2],story[3],story[1])
<|fim▁hole|> def start(self):
print u"正在读取糗事百科,按回车查看新段子,Q退出"
#使变量为True,程序可以正常运行
self.enable = True
#先加载一页内容
self.loadPage()
#局部变量,控制当前读到了第几页
nowPage = 0
while self.enable:
if len(self.stories)>0:
#从全局list中获取一页的段子
pageStories = self.stories[0]
#当前读到的页数加一
nowPage += 1
#将全局list中第一个元素删除,因为已经取出
del self.stories[0]
#输出该页的段子
self.getOneStory(pageStories,nowPage)
spider = QSBK()
spider.start()<|fim▁end|>
|
#开始方法
|
<|file_name|>test_platforms.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import errno
import os
import sys
import signal
from celery import _find_option_with_arg
from celery import platforms
from celery.five import open_fqdn
from celery.platforms import (
get_fdmax,
ignore_errno,
set_process_title,
signals,
maybe_drop_privileges,
setuid,
setgid,
initgroups,
parse_uid,
parse_gid,
detached,
DaemonContext,
create_pidlock,
Pidfile,
LockFailed,
setgroups,
_setgroups_hack,
close_open_fds,
)
try:
import resource
except ImportError: # pragma: no cover
resource = None # noqa
from celery.tests.case import (
Case, WhateverIO, Mock, SkipTest,
call, override_stdouts, mock_open, patch,
)
class test_find_option_with_arg(Case):
def test_long_opt(self):
self.assertEqual(
_find_option_with_arg(['--foo=bar'], long_opts=['--foo']),
'bar'
)
def test_short_opt(self):
self.assertEqual(
_find_option_with_arg(['-f', 'bar'], short_opts=['-f']),
'bar'
)
class test_close_open_fds(Case):
def test_closes(self):
with patch('os.close') as _close:
with patch('os.closerange', create=True) as closerange:
with patch('celery.platforms.get_fdmax') as fdmax:
fdmax.return_value = 3
close_open_fds()
if not closerange.called:
_close.assert_has_calls([call(2), call(1), call(0)])
_close.side_effect = OSError()
_close.side_effect.errno = errno.EBADF
close_open_fds()
class test_ignore_errno(Case):
def test_raises_EBADF(self):
with ignore_errno('EBADF'):
exc = OSError()
exc.errno = errno.EBADF
raise exc
def test_otherwise(self):
with self.assertRaises(OSError):
with ignore_errno('EBADF'):
exc = OSError()
exc.errno = errno.ENOENT
raise exc
class test_set_process_title(Case):
def when_no_setps(self):
prev = platforms._setproctitle = platforms._setproctitle, None
try:
set_process_title('foo')
finally:
platforms._setproctitle = prev
class test_Signals(Case):
@patch('signal.getsignal')
def test_getitem(self, getsignal):
signals['SIGINT']
getsignal.assert_called_with(signal.SIGINT)
def test_supported(self):
self.assertTrue(signals.supported('INT'))
self.assertFalse(signals.supported('SIGIMAGINARY'))
def test_reset_alarm(self):
if sys.platform == 'win32':
raise SkipTest('signal.alarm not available on Windows')
with patch('signal.alarm') as _alarm:
signals.reset_alarm()
_alarm.assert_called_with(0)
def test_arm_alarm(self):
if hasattr(signal, 'setitimer'):
with patch('signal.setitimer', create=True) as seti:
signals.arm_alarm(30)
self.assertTrue(seti.called)
def test_signum(self):
self.assertEqual(signals.signum(13), 13)
self.assertEqual(signals.signum('INT'), signal.SIGINT)
self.assertEqual(signals.signum('SIGINT'), signal.SIGINT)
with self.assertRaises(TypeError):
signals.signum('int')
signals.signum(object())
@patch('signal.signal')
def test_ignore(self, set):
signals.ignore('SIGINT')
set.assert_called_with(signals.signum('INT'), signals.ignored)
signals.ignore('SIGTERM')
set.assert_called_with(signals.signum('TERM'), signals.ignored)
@patch('signal.signal')
def test_setitem(self, set):
handle = lambda *a: a
signals['INT'] = handle
set.assert_called_with(signal.SIGINT, handle)
@patch('signal.signal')
def test_setitem_raises(self, set):
set.side_effect = ValueError()
signals['INT'] = lambda *a: a
if not platforms.IS_WINDOWS:
class test_get_fdmax(Case):
@patch('resource.getrlimit')
def test_when_infinity(self, getrlimit):
with patch('os.sysconf') as sysconfig:
sysconfig.side_effect = KeyError()
getrlimit.return_value = [None, resource.RLIM_INFINITY]
default = object()
self.assertIs(get_fdmax(default), default)
@patch('resource.getrlimit')
def test_when_actual(self, getrlimit):
with patch('os.sysconf') as sysconfig:
sysconfig.side_effect = KeyError()
getrlimit.return_value = [None, 13]
self.assertEqual(get_fdmax(None), 13)
class test_maybe_drop_privileges(Case):
@patch('celery.platforms.parse_uid')
@patch('pwd.getpwuid')
@patch('celery.platforms.setgid')
@patch('celery.platforms.setuid')
@patch('celery.platforms.initgroups')
def test_with_uid(self, initgroups, setuid, setgid,
getpwuid, parse_uid):<|fim▁hole|> class pw_struct(object):
pw_gid = 50001
def raise_on_second_call(*args, **kwargs):
setuid.side_effect = OSError()
setuid.side_effect.errno = errno.EPERM
setuid.side_effect = raise_on_second_call
getpwuid.return_value = pw_struct()
parse_uid.return_value = 5001
maybe_drop_privileges(uid='user')
parse_uid.assert_called_with('user')
getpwuid.assert_called_with(5001)
setgid.assert_called_with(50001)
initgroups.assert_called_with(5001, 50001)
setuid.assert_has_calls([call(5001), call(0)])
@patch('celery.platforms.parse_uid')
@patch('celery.platforms.parse_gid')
@patch('celery.platforms.setgid')
@patch('celery.platforms.setuid')
@patch('celery.platforms.initgroups')
def test_with_guid(self, initgroups, setuid, setgid,
parse_gid, parse_uid):
def raise_on_second_call(*args, **kwargs):
setuid.side_effect = OSError()
setuid.side_effect.errno = errno.EPERM
setuid.side_effect = raise_on_second_call
parse_uid.return_value = 5001
parse_gid.return_value = 50001
maybe_drop_privileges(uid='user', gid='group')
parse_uid.assert_called_with('user')
parse_gid.assert_called_with('group')
setgid.assert_called_with(50001)
initgroups.assert_called_with(5001, 50001)
setuid.assert_has_calls([call(5001), call(0)])
setuid.side_effect = None
with self.assertRaises(RuntimeError):
maybe_drop_privileges(uid='user', gid='group')
setuid.side_effect = OSError()
setuid.side_effect.errno = errno.EINVAL
with self.assertRaises(OSError):
maybe_drop_privileges(uid='user', gid='group')
@patch('celery.platforms.setuid')
@patch('celery.platforms.setgid')
@patch('celery.platforms.parse_gid')
def test_only_gid(self, parse_gid, setgid, setuid):
parse_gid.return_value = 50001
maybe_drop_privileges(gid='group')
parse_gid.assert_called_with('group')
setgid.assert_called_with(50001)
self.assertFalse(setuid.called)
class test_setget_uid_gid(Case):
@patch('celery.platforms.parse_uid')
@patch('os.setuid')
def test_setuid(self, _setuid, parse_uid):
parse_uid.return_value = 5001
setuid('user')
parse_uid.assert_called_with('user')
_setuid.assert_called_with(5001)
@patch('celery.platforms.parse_gid')
@patch('os.setgid')
def test_setgid(self, _setgid, parse_gid):
parse_gid.return_value = 50001
setgid('group')
parse_gid.assert_called_with('group')
_setgid.assert_called_with(50001)
def test_parse_uid_when_int(self):
self.assertEqual(parse_uid(5001), 5001)
@patch('pwd.getpwnam')
def test_parse_uid_when_existing_name(self, getpwnam):
class pwent(object):
pw_uid = 5001
getpwnam.return_value = pwent()
self.assertEqual(parse_uid('user'), 5001)
@patch('pwd.getpwnam')
def test_parse_uid_when_nonexisting_name(self, getpwnam):
getpwnam.side_effect = KeyError('user')
with self.assertRaises(KeyError):
parse_uid('user')
def test_parse_gid_when_int(self):
self.assertEqual(parse_gid(50001), 50001)
@patch('grp.getgrnam')
def test_parse_gid_when_existing_name(self, getgrnam):
class grent(object):
gr_gid = 50001
getgrnam.return_value = grent()
self.assertEqual(parse_gid('group'), 50001)
@patch('grp.getgrnam')
def test_parse_gid_when_nonexisting_name(self, getgrnam):
getgrnam.side_effect = KeyError('group')
with self.assertRaises(KeyError):
parse_gid('group')
class test_initgroups(Case):
@patch('pwd.getpwuid')
@patch('os.initgroups', create=True)
def test_with_initgroups(self, initgroups_, getpwuid):
getpwuid.return_value = ['user']
initgroups(5001, 50001)
initgroups_.assert_called_with('user', 50001)
@patch('celery.platforms.setgroups')
@patch('grp.getgrall')
@patch('pwd.getpwuid')
def test_without_initgroups(self, getpwuid, getgrall, setgroups):
prev = getattr(os, 'initgroups', None)
try:
delattr(os, 'initgroups')
except AttributeError:
pass
try:
getpwuid.return_value = ['user']
class grent(object):
gr_mem = ['user']
def __init__(self, gid):
self.gr_gid = gid
getgrall.return_value = [grent(1), grent(2), grent(3)]
initgroups(5001, 50001)
setgroups.assert_called_with([1, 2, 3])
finally:
if prev:
os.initgroups = prev
class test_detached(Case):
def test_without_resource(self):
prev, platforms.resource = platforms.resource, None
try:
with self.assertRaises(RuntimeError):
detached()
finally:
platforms.resource = prev
@patch('celery.platforms._create_pidlock')
@patch('celery.platforms.signals')
@patch('celery.platforms.maybe_drop_privileges')
@patch('os.geteuid')
@patch(open_fqdn)
def test_default(self, open, geteuid, maybe_drop,
signals, pidlock):
geteuid.return_value = 0
context = detached(uid='user', gid='group')
self.assertIsInstance(context, DaemonContext)
signals.reset.assert_called_with('SIGCLD')
maybe_drop.assert_called_with(uid='user', gid='group')
open.return_value = Mock()
geteuid.return_value = 5001
context = detached(uid='user', gid='group', logfile='/foo/bar')
self.assertIsInstance(context, DaemonContext)
self.assertTrue(context.after_chdir)
context.after_chdir()
open.assert_called_with('/foo/bar', 'a')
open.return_value.close.assert_called_with()
context = detached(pidfile='/foo/bar/pid')
self.assertIsInstance(context, DaemonContext)
self.assertTrue(context.after_chdir)
context.after_chdir()
pidlock.assert_called_with('/foo/bar/pid')
class test_DaemonContext(Case):
@patch('os.fork')
@patch('os.setsid')
@patch('os._exit')
@patch('os.chdir')
@patch('os.umask')
@patch('os.close')
@patch('os.closerange')
@patch('os.open')
@patch('os.dup2')
def test_open(self, dup2, open, close, closer, umask, chdir,
_exit, setsid, fork):
x = DaemonContext(workdir='/opt/workdir', umask=0o22)
x.stdfds = [0, 1, 2]
fork.return_value = 0
with x:
self.assertTrue(x._is_open)
with x:
pass
self.assertEqual(fork.call_count, 2)
setsid.assert_called_with()
self.assertFalse(_exit.called)
chdir.assert_called_with(x.workdir)
umask.assert_called_with(0o22)
self.assertTrue(dup2.called)
fork.reset_mock()
fork.return_value = 1
x = DaemonContext(workdir='/opt/workdir')
x.stdfds = [0, 1, 2]
with x:
pass
self.assertEqual(fork.call_count, 1)
_exit.assert_called_with(0)
x = DaemonContext(workdir='/opt/workdir', fake=True)
x.stdfds = [0, 1, 2]
x._detach = Mock()
with x:
pass
self.assertFalse(x._detach.called)
x.after_chdir = Mock()
with x:
pass
x.after_chdir.assert_called_with()
class test_Pidfile(Case):
@patch('celery.platforms.Pidfile')
def test_create_pidlock(self, Pidfile):
p = Pidfile.return_value = Mock()
p.is_locked.return_value = True
p.remove_if_stale.return_value = False
with override_stdouts() as (_, err):
with self.assertRaises(SystemExit):
create_pidlock('/var/pid')
self.assertIn('already exists', err.getvalue())
p.remove_if_stale.return_value = True
ret = create_pidlock('/var/pid')
self.assertIs(ret, p)
def test_context(self):
p = Pidfile('/var/pid')
p.write_pid = Mock()
p.remove = Mock()
with p as _p:
self.assertIs(_p, p)
p.write_pid.assert_called_with()
p.remove.assert_called_with()
def test_acquire_raises_LockFailed(self):
p = Pidfile('/var/pid')
p.write_pid = Mock()
p.write_pid.side_effect = OSError()
with self.assertRaises(LockFailed):
with p:
pass
@patch('os.path.exists')
def test_is_locked(self, exists):
p = Pidfile('/var/pid')
exists.return_value = True
self.assertTrue(p.is_locked())
exists.return_value = False
self.assertFalse(p.is_locked())
def test_read_pid(self):
with mock_open() as s:
s.write('1816\n')
s.seek(0)
p = Pidfile('/var/pid')
self.assertEqual(p.read_pid(), 1816)
def test_read_pid_partially_written(self):
with mock_open() as s:
s.write('1816')
s.seek(0)
p = Pidfile('/var/pid')
with self.assertRaises(ValueError):
p.read_pid()
def test_read_pid_raises_ENOENT(self):
exc = IOError()
exc.errno = errno.ENOENT
with mock_open(side_effect=exc):
p = Pidfile('/var/pid')
self.assertIsNone(p.read_pid())
def test_read_pid_raises_IOError(self):
exc = IOError()
exc.errno = errno.EAGAIN
with mock_open(side_effect=exc):
p = Pidfile('/var/pid')
with self.assertRaises(IOError):
p.read_pid()
def test_read_pid_bogus_pidfile(self):
with mock_open() as s:
s.write('eighteensixteen\n')
s.seek(0)
p = Pidfile('/var/pid')
with self.assertRaises(ValueError):
p.read_pid()
@patch('os.unlink')
def test_remove(self, unlink):
unlink.return_value = True
p = Pidfile('/var/pid')
p.remove()
unlink.assert_called_with(p.path)
@patch('os.unlink')
def test_remove_ENOENT(self, unlink):
exc = OSError()
exc.errno = errno.ENOENT
unlink.side_effect = exc
p = Pidfile('/var/pid')
p.remove()
unlink.assert_called_with(p.path)
@patch('os.unlink')
def test_remove_EACCES(self, unlink):
exc = OSError()
exc.errno = errno.EACCES
unlink.side_effect = exc
p = Pidfile('/var/pid')
p.remove()
unlink.assert_called_with(p.path)
@patch('os.unlink')
def test_remove_OSError(self, unlink):
exc = OSError()
exc.errno = errno.EAGAIN
unlink.side_effect = exc
p = Pidfile('/var/pid')
with self.assertRaises(OSError):
p.remove()
unlink.assert_called_with(p.path)
@patch('os.kill')
def test_remove_if_stale_process_alive(self, kill):
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.return_value = 1816
kill.return_value = 0
self.assertFalse(p.remove_if_stale())
kill.assert_called_with(1816, 0)
p.read_pid.assert_called_with()
kill.side_effect = OSError()
kill.side_effect.errno = errno.ENOENT
self.assertFalse(p.remove_if_stale())
@patch('os.kill')
def test_remove_if_stale_process_dead(self, kill):
with override_stdouts():
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.return_value = 1816
p.remove = Mock()
exc = OSError()
exc.errno = errno.ESRCH
kill.side_effect = exc
self.assertTrue(p.remove_if_stale())
kill.assert_called_with(1816, 0)
p.remove.assert_called_with()
def test_remove_if_stale_broken_pid(self):
with override_stdouts():
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.side_effect = ValueError()
p.remove = Mock()
self.assertTrue(p.remove_if_stale())
p.remove.assert_called_with()
def test_remove_if_stale_no_pidfile(self):
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.return_value = None
p.remove = Mock()
self.assertTrue(p.remove_if_stale())
p.remove.assert_called_with()
@patch('os.fsync')
@patch('os.getpid')
@patch('os.open')
@patch('os.fdopen')
@patch(open_fqdn)
def test_write_pid(self, open_, fdopen, osopen, getpid, fsync):
getpid.return_value = 1816
osopen.return_value = 13
w = fdopen.return_value = WhateverIO()
w.close = Mock()
r = open_.return_value = WhateverIO()
r.write('1816\n')
r.seek(0)
p = Pidfile('/var/pid')
p.write_pid()
w.seek(0)
self.assertEqual(w.readline(), '1816\n')
self.assertTrue(w.close.called)
getpid.assert_called_with()
osopen.assert_called_with(p.path, platforms.PIDFILE_FLAGS,
platforms.PIDFILE_MODE)
fdopen.assert_called_with(13, 'w')
fsync.assert_called_with(13)
open_.assert_called_with(p.path)
@patch('os.fsync')
@patch('os.getpid')
@patch('os.open')
@patch('os.fdopen')
@patch(open_fqdn)
def test_write_reread_fails(self, open_, fdopen,
osopen, getpid, fsync):
getpid.return_value = 1816
osopen.return_value = 13
w = fdopen.return_value = WhateverIO()
w.close = Mock()
r = open_.return_value = WhateverIO()
r.write('11816\n')
r.seek(0)
p = Pidfile('/var/pid')
with self.assertRaises(LockFailed):
p.write_pid()
class test_setgroups(Case):
@patch('os.setgroups', create=True)
def test_setgroups_hack_ValueError(self, setgroups):
def on_setgroups(groups):
if len(groups) <= 200:
setgroups.return_value = True
return
raise ValueError()
setgroups.side_effect = on_setgroups
_setgroups_hack(list(range(400)))
setgroups.side_effect = ValueError()
with self.assertRaises(ValueError):
_setgroups_hack(list(range(400)))
@patch('os.setgroups', create=True)
def test_setgroups_hack_OSError(self, setgroups):
exc = OSError()
exc.errno = errno.EINVAL
def on_setgroups(groups):
if len(groups) <= 200:
setgroups.return_value = True
return
raise exc
setgroups.side_effect = on_setgroups
_setgroups_hack(list(range(400)))
setgroups.side_effect = exc
with self.assertRaises(OSError):
_setgroups_hack(list(range(400)))
exc2 = OSError()
exc.errno = errno.ESRCH
setgroups.side_effect = exc2
with self.assertRaises(OSError):
_setgroups_hack(list(range(400)))
@patch('os.sysconf')
@patch('celery.platforms._setgroups_hack')
def test_setgroups(self, hack, sysconf):
sysconf.return_value = 100
setgroups(list(range(400)))
hack.assert_called_with(list(range(100)))
@patch('os.sysconf')
@patch('celery.platforms._setgroups_hack')
def test_setgroups_sysconf_raises(self, hack, sysconf):
sysconf.side_effect = ValueError()
setgroups(list(range(400)))
hack.assert_called_with(list(range(400)))
@patch('os.getgroups')
@patch('os.sysconf')
@patch('celery.platforms._setgroups_hack')
def test_setgroups_raises_ESRCH(self, hack, sysconf, getgroups):
sysconf.side_effect = ValueError()
esrch = OSError()
esrch.errno = errno.ESRCH
hack.side_effect = esrch
with self.assertRaises(OSError):
setgroups(list(range(400)))
@patch('os.getgroups')
@patch('os.sysconf')
@patch('celery.platforms._setgroups_hack')
def test_setgroups_raises_EPERM(self, hack, sysconf, getgroups):
sysconf.side_effect = ValueError()
eperm = OSError()
eperm.errno = errno.EPERM
hack.side_effect = eperm
getgroups.return_value = list(range(400))
setgroups(list(range(400)))
getgroups.assert_called_with()
getgroups.return_value = [1000]
with self.assertRaises(OSError):
setgroups(list(range(400)))
getgroups.assert_called_with()<|fim▁end|>
| |
<|file_name|>cpqScsiPhyDrv.py<|end_file_name|><|fim▁begin|>################################################################################
#
# This program is part of the HPMon Zenpack for Zenoss.
# Copyright (C) 2008, 2009, 2010, 2011 Egor Puzanov.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""cpqScsiPhyDrv
cpqScsiPhyDrv is an abstraction of a HP SCSI Hard Disk.
$Id: cpqScsiPhyDrv.py,v 1.2 2011/01/04 23:27:26 egor Exp $"""
__version__ = "$Revision: 1.2 $"[11:-2]
from HPHardDisk import HPHardDisk
from HPComponent import *
class cpqScsiPhyDrv(HPHardDisk):
"""cpqScsiPhyDrv object
"""
statusmap ={1: (DOT_GREY, SEV_WARNING, 'other'),
2: (DOT_GREEN, SEV_CLEAN, 'Ok'),
3: (DOT_RED, SEV_CRITICAL, 'Failed'),
4: (DOT_YELLOW, SEV_WARNING, 'Not Configured'),
5: (DOT_ORANGE, SEV_ERROR, 'Bad Cable'),
6: (DOT_RED, SEV_CRITICAL, 'Missing was Ok'),
7: (DOT_RED, SEV_CRITICAL, 'Missing was Failed'),
8: (DOT_ORANGE, SEV_ERROR, 'Predictive Failure'),
9: (DOT_RED, SEV_CRITICAL, 'Missing was Predictive Failure'),
10:(DOT_RED, SEV_CRITICAL, 'Offline'),
11:(DOT_RED, SEV_CRITICAL, 'Missing was Offline'),
12:(DOT_RED, SEV_CRITICAL, 'Hard Error'),
}
<|fim▁hole|>InitializeClass(cpqScsiPhyDrv)<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
import sys
import warnings<|fim▁hole|>from setuptools import setup
version_contents = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "shippo", "version.py"), encoding="utf-8") as f:
exec(f.read(), version_contents)
setup(
name='shippo',
version=version_contents['VERSION'],
description='Shipping API Python library (USPS, FedEx, UPS and more)',
author='Shippo',
author_email='[email protected]',
url='https://goshippo.com/',
packages=['shippo', 'shippo.test', 'shippo.test.integration'],
package_data={'shippo': ['../VERSION']},
install_requires=[
'requests >= 2.21.0, <= 2.27.1',
'simplejson >= 3.16.0, <= 3.17.2',
],
test_suite='shippo.test.all',
tests_require=['unittest2', 'mock', 'vcrpy'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)<|fim▁end|>
| |
<|file_name|>continent_manager.cpp<|end_file_name|><|fim▁begin|>// Ryzom - MMORPG Framework <http://dev.ryzom.com/projects/ryzom/>
// Copyright (C) 2010 Winch Gate Property Limited
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "stdpch.h"
/////////////
// INCLUDE //
/////////////
// misc
#include "nel/misc/path.h"
#include "nel/misc/vectord.h"
#include "nel/misc/i18n.h"
#include "nel/misc/progress_callback.h"
// 3D Interface.
#include "nel/3d/u_landscape.h"
// Georges
#include "nel/georges/u_form.h"
#include "nel/georges/u_form_elm.h"
#include "nel/georges/u_form_loader.h"
// Client
#include "continent_manager.h"
#include "client_cfg.h"
#include "sheet_manager.h"
#include "sound_manager.h"
#include "entities.h" // \todo Hld : a enlever lorsque unselect aura son bool bien pris en compte
#include "init_main_loop.h"
#include "weather.h"
#include "weather_manager_client.h"
#include "interface_v3/interface_manager.h"
#include "interface_v3/group_map.h"
//
#include "input.h"
#include "continent_manager_build.h"
///////////
// USING //
///////////
using namespace NLPACS;
using namespace NLMISC;
using namespace NL3D;
using namespace std;
using namespace NLGEORGES;
////////////
// EXTERN //
////////////
extern ULandscape *Landscape;
extern UMoveContainer *PACS;
extern UGlobalRetriever *GR;
extern URetrieverBank *RB;
extern class CIGCallback *IGCallbacks;
extern NLLIGO::CLigoConfig LigoConfig;
UMoveContainer *PACSHibernated = NULL;
UGlobalRetriever *GRHibernated = NULL;
URetrieverBank *RBHibernated = NULL;
CIGCallback *IGCallbacksHibernated = NULL;
////////////
// GLOBAL //
////////////
// Hierarchical timer
H_AUTO_DECL ( RZ_Client_Continent_Mngr_Update_Streamable )
/////////////
// METHODS //
/////////////
//-----------------------------------------------
// CContinentManager :
// Constructor.
//-----------------------------------------------
CContinentManager::CContinentManager()
{
_Current = 0;
_Hibernated = NULL;
}// CContinentManager //
void CContinentManager::reset()
{
// stop the background sound
if (SoundMngr)
SoundMngr->stopBackgroundSound();
// Unselect continent
if (_Current)
_Current->unselect();
// Shared data must be NULL now
_Current = NULL;
nlassert (GR == NULL);
nlassert (RB == NULL);
nlassert (PACS == NULL);
nlassert (IGCallbacks == NULL);
// Swap the hibernated data
std::swap(GR, GRHibernated);
std::swap(RB, RBHibernated);
std::swap(PACS, PACSHibernated);
std::swap(_Current, _Hibernated);
std::swap(IGCallbacks, IGCallbacksHibernated);
// Unselect continent
if (_Current)
_Current->unselect();
// remove villages
removeVillages();
// NB: landscape zones are deleted in USCene::deleteLandscape()
// clear continents DB
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
delete it->second;
}
_Continents.clear();
_Current = NULL;
_Hibernated = NULL;
}
//-----------------------------------------------
// load :
// Load all continent.
//-----------------------------------------------
/*
// Oldies now all these data are stored in the ryzom.world
const uint32 NBCONTINENT = 8;
SContInit vAllContinents[NBCONTINENT] = {
{ "fyros", "fyros", 15767, 20385, -27098, -23769 },
{ "tryker", "tryker", 15285, 18638, -34485, -30641 },
{ "matis", "lesfalaises", 235, 6316, -7920, -256 },
{ "zorai", "lepaysmalade", 6805, 12225, -5680, -1235 },
{ "bagne", "lebagne", 434, 1632, -11230, -9715 },
{ "route", "laroutedesombres", 5415, 7400, -17000, -9575 },
{ "sources", "sources", 2520, 3875, -11400, -9720 },
{ "terres", "lesterres", 100, 3075, -15900, -13000 }
};
*/
// Read the ryzom.world which give the names and bboxes
//-----------------------------------------------
void CContinentManager::preloadSheets()
{
reset();
CEntitySheet *sheet = SheetMngr.get(CSheetId("ryzom.world"));
if (!sheet || sheet->type() != CEntitySheet::WORLD)
{
nlerror("World sheet not found or bad type");
}
uint32 i;
CWorldSheet *ws = (CWorldSheet *) sheet;
// Copy datas from the sheet
for (i = 0; i < ws->ContLocs.size(); ++i)
{
const SContLoc &clTmp = ws->ContLocs[i];
std::string continentSheetName = NLMISC::strlwr(clTmp.ContinentName);
if (continentSheetName.find(".continent") == std::string::npos)
{
continentSheetName += ".continent";
}
// Get the continent form
CSheetId continentId(continentSheetName);
sheet = SheetMngr.get(continentId);
if (sheet)
{
if (sheet->type() == CEntitySheet::CONTINENT)
{
CContinent *pCont = new CContinent;
pCont->SheetName = continentSheetName;
_Continents.insert(make_pair(clTmp.SelectionName, pCont));
}
else
{
nlwarning("Bad type for continent form %s.", continentSheetName.c_str());
}
}
else
{
nlwarning("cant find continent sheet : %s.", continentSheetName.c_str());
}
}
}
//-----------------------------------------------
void CContinentManager::load ()
{
// Continents are preloaded so setup them
TContinents::iterator it = _Continents.begin();
while (it != _Continents.end())
{
it->second->setup();
it++;
}
loadContinentLandMarks();
// \todo GUIGUI : Do it better when there will be "ecosystem"/Wind/etc.
// Initialize the Landscape Vegetable.
if(ClientCfg.MicroVeget)
{
if (Landscape)
{
// if configured, enable the vegetable and load the texture.
Landscape->enableVegetable(true);
// Default setup. TODO later by gameDev.
Landscape->setVegetableWind(CVector(0.5, 0.5, 0).normed(), 0.5, 1, 0);
// Default setup. should work well for night/day transition in 30 minutes.
// Because all vegetables will be updated every 20 seconds => 90 steps.
Landscape->setVegetableUpdateLightingFrequency(1/20.f);
// Density (percentage to ratio)
Landscape->setVegetableDensity(ClientCfg.MicroVegetDensity/100.f);
}
}
}// load //
//-----------------------------------------------
// select :
// Select continent from a name.
// \param const string &name : name of the continent to select.
//-----------------------------------------------
void CContinentManager::select(const string &name, const CVectorD &pos, NLMISC::IProgressCallback &progress)
{
CNiceInputAuto niceInputs;
// Find the continent.
TContinents::iterator itCont = _Continents.find(name);
if(itCont == _Continents.end())
{
nlwarning("CContinentManager::select: Continent '%s' is Unknown. Cannot Select it.", name.c_str());
return;
}
// Dirt weather
{
H_AUTO(InitRZWorldSetLoadingContinent)
// Set the loading continent
setLoadingContinent (itCont->second);
}
// ** Update the weather manager for loading information
// Update the weather manager
{
H_AUTO(InitRZWorldUpdateWeatherManager )
updateWeatherManager (itCont->second);
}
// startup season can be changed now the player is safe
StartupSeason = RT.getRyzomSeason();
// Modify this season according to the continent reached. eg: newbieland continent force the winter to be autumn
if(StartupSeason<EGSPD::CSeason::Invalid)
StartupSeason= (*itCont).second->ForceDisplayedSeason[StartupSeason];
// Compute the current season according to StartupSeason, Server driver season, R2 Editor season or manual debug season
CurrSeason = computeCurrSeason();
// Is it the same continent than the old one.
{
H_AUTO(InitRZWorldSelectCont)
if(((*itCont).second != _Current) || ((*itCont).second->Season != CurrSeason))
{
// New continent is not an indoor ?
if (!(*itCont).second->Indoor && _Current)
{
// Unselect the current continent
_Current->unselect();
// Shared data must be NULL now
_Current = NULL;
nlassert (GR == NULL);
nlassert (RB == NULL);
nlassert (PACS == NULL);
nlassert (IGCallbacks == NULL);
}
else
{
// Remove the primitive for all entitites (new PACS coming soon and need new primitives).
EntitiesMngr.removeCollision();
}
// Swap the hibernated data
std::swap(GR, GRHibernated);
std::swap(RB, RBHibernated);
std::swap(PACS, PACSHibernated);
std::swap(_Current, _Hibernated);
std::swap(IGCallbacks, IGCallbacksHibernated);
// Is it the same continent than the old one.
if(((*itCont).second != _Current) || ((*itCont).second->Season != CurrSeason))
{
// Unselect the old continent.
if(_Current)
_Current->unselect();
_Current = (*itCont).second;
// Teleport in a new continent, complete load
_Current->select(pos, progress, true, false, CurrSeason);
// New continent is not an indoor ?
if (!_Current->Indoor)
{
// Stop the background sound
if (SoundMngr)
SoundMngr->stopBackgroundSound();
}
}
else
{
// Teleport in the hibernated continent
_Current->select(pos, progress, false, true, CurrSeason);
}
}
else
{
// Teleport in the same continent
_Current->select(pos, progress, false, false, CurrSeason);
}
}
{
H_AUTO(InitRZWorldSound)
// New continent is not an indoor ?
if (!_Current->Indoor)
{
if(SoundMngr)
SoundMngr->loadContinent(name, pos);
}
}
// Map handling
{
H_AUTO(InitRZWorldMapHandling)
CWorldSheet *pWS = dynamic_cast<CWorldSheet*>(SheetMngr.get(CSheetId("ryzom.world")));
for (uint32 i = 0; i < pWS->Maps.size(); ++i)
if (pWS->Maps[i].ContinentName == name)
{
CInterfaceManager *pIM = CInterfaceManager::getInstance();
CGroupMap *pMap = dynamic_cast<CGroupMap*>(CWidgetManager::getInstance()->getElementFromId("ui:interface:map:content:map_content:actual_map"));
if (pMap != NULL)
pMap->setMap(pWS->Maps[i].Name);
pMap = dynamic_cast<CGroupMap*>(CWidgetManager::getInstance()->getElementFromId("ui:interface:respawn_map:content:map_content:actual_map"));
if (pMap != NULL)
pMap->setMap(pWS->Maps[i].Name);
break;
}
}
}// select //
//-----------------------------------------------
// select :
// Select closest continent from a vector.
//-----------------------------------------------
void CContinentManager::select(const CVectorD &pos, NLMISC::IProgressCallback &progress)
{
CVector2f fPos;
fPos.x = (float)pos.x;
fPos.y = (float)pos.y;
TContinents::iterator it = _Continents.begin();
while (it != _Continents.end())
{
CContinent *pCont = it->second;
nlinfo("Looking into %s", pCont->SheetName.c_str());
if (pCont->Zone.VPoints.size() > 0) // Patch because some continent have not been done yet
{
if (pCont->Zone.contains(fPos))
{
// load the continent selected.
select (it->first, pos, progress);
return;
}
else
{
/*
nlwarning("**********************************************");
nlwarning("Start position (%s) not found in continent %s", NLMISC::toString(pos.asVector()).c_str(), it->first.c_str());
for(uint k = 0; k < pCont->Zone.VPoints.size(); ++k)
{
nlwarning("zone point %d = %s", (int)k, NLMISC::toString(pCont->Zone.VPoints[k]).c_str());
}
*/
}
}
it++;
}
nlwarning("cannot select any continent at pos (%f, %f)", fPos.x, fPos.y);
/* *****************
PLEASE DO *****NOT***** PUT AN ASSERT HERE
While this is a bug, it is a data bug. Crashing is not a good solution in this case.
If you put an assert, it can happens this scenario for example:
- A levelDesigner put a bad Teleporter which teleport to an invalid position
- The player teleport, but its position is invalid => crash
- the next time he logs, it crashes at start, AND HENCE CANNOT ASK A GM TO TELEPORT HIM AWAY
Other scenarios can happens like Data change, Continent change => Player no more at valid position etc...
HENCE MUST NOT CRASH, but display a "Lost In Space screen" leaving the player the possibility to ask HELP.
*****************
*/
//nlassertex(0, ("can't select any continent"));
}// select //
bool CContinentManager::isLoadingforced(const NLMISC::CVector &playerPos) const
{
if(_Current == 0)
return false;
return _Current->isLoadingforced(playerPos);
}
void CContinentManager::updateStreamable(const NLMISC::CVector &playerPos)
{
H_AUTO_USE ( RZ_Client_Continent_Mngr_Update_Streamable )
if(_Current)
_Current->updateStreamable(playerPos);
}
void CContinentManager::forceUpdateStreamable(const NLMISC::CVector &playerPos, NLMISC::IProgressCallback &progress)
{
H_AUTO_USE ( RZ_Client_Continent_Mngr_Update_Streamable )
if (ClientCfg.VillagesEnabled)
{
if(_Current)
_Current->forceUpdateStreamable(playerPos, progress);
}
}
void CContinentManager::removeVillages()
{
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
if (it->second)
it->second->removeVillages();
}
}
void CContinentManager::getFogState(TFogType fogType, float dayNight, float duskRatio, CLightCycleManager::TLightState lightState, const NLMISC::CVectorD &pos, CFogState &result)
{
if(_Current)
_Current->getFogState(fogType, dayNight, duskRatio, lightState, pos, result);
}
CContinent *CContinentManager::get(const std::string &contName)
{
TContinents::iterator it = _Continents.find(contName);
if (it != _Continents.end())
return it->second;
return NULL;
}
void CContinentManager::serialUserLandMarks(NLMISC::IStream &f)<|fim▁hole|>{
f.serialVersion(1);
if (!f.isReading())
{
uint32 numCont = (uint32)_Continents.size();
f.serial(numCont);
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
std::string name = it->first;
f.serial(name);
if (it->second)
{
f.serialCont(it->second->UserLandMarks);
}
else
{
std::vector<CUserLandMark> dummy;
f.serialCont(dummy);
}
}
}
else
{
uint32 numCont;
f.serial(numCont);
for(uint k = 0; k < numCont; ++k)
{
std::string contName;
f.serial(contName);
TContinents::iterator it = _Continents.find(contName);
if (it != _Continents.end() && it->second)
{
f.serialCont(it->second->UserLandMarks);
}
else
{
std::vector<CUserLandMark> dummy;
f.serialCont(dummy);
}
}
// The number of stored landmarks is not checked at this time, but if we receive a
// lower value in the server database, we will cut down using checkNumberOfUserLandmarks()
}
}
//-----------------------------------------------
// checkNumberOfLandmarks
//-----------------------------------------------
void CContinentManager::checkNumberOfUserLandmarks( uint maxNumber )
{
for ( TContinents::iterator it=_Continents.begin(); it!=_Continents.end(); ++it )
{
CContinent *cont = (*it).second;
if ( cont->UserLandMarks.size() > maxNumber )
{
// Just cut down the last landmarks (in case of hacked file)
if ( cont == _Current )
{
CGroupMap *pMap = dynamic_cast<CGroupMap*>(CWidgetManager::getInstance()->getElementFromId("ui:interface:map:content:map_content:actual_map"));
if ( pMap )
pMap->removeExceedingUserLandMarks( maxNumber );
}
else
{
cont->UserLandMarks.resize( maxNumber );
}
}
}
}
//-----------------------------------------------
// serialFOWMaps
//-----------------------------------------------
void CContinentManager::serialFOWMaps()
{
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
CContinent *pCont = it->second;
//nlinfo("Saving fow continent %s of name %s", it->first.c_str(), pCont->Name.c_str());
it->second->FoW.save(pCont->Name);
}
}
const std::string &CContinentManager::getCurrentContinentSelectName()
{
TContinents::iterator it;
for (it = _Continents.begin(); it != _Continents.end(); ++it)
{
if (it->second == _Current)
return it->first;
}
static const string emptyString;
return emptyString;
}
void CContinentManager::reloadWeather()
{
WeatherManager.release();
// reload the sheet
std::vector<std::string> extensions;
extensions.push_back("weather_setup");
extensions.push_back("weather_function_params");
extensions.push_back("continent");
extensions.push_back("light_cycle");
NLMISC::IProgressCallback pc;
SheetMngr.loadAllSheet(pc, true, false, true, true, &extensions);
WeatherManager.init();
// Load description of light cycles for each season.
loadWorldLightCycle();
// Load global weather function parameters
loadWeatherFunctionParams();
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
NLMISC::CSheetId contSI(it->second->SheetName);
CContinentSheet *cs = dynamic_cast<CContinentSheet *>(SheetMngr.get(contSI));
if (cs)
{
// update continent weather part
for(uint l = 0; l < EGSPD::CSeason::Invalid; ++l)
{
it->second->WeatherFunction[l].buildFromSheet(cs->WeatherFunction[l], WeatherManager);
}
// update misc params
it->second->FogStart = cs->Continent.FogStart;
it->second->FogEnd = cs->Continent.FogEnd;
it->second->RootFogStart = cs->Continent.RootFogStart;
it->second->RootFogEnd = cs->Continent.RootFogEnd;
it->second->LandscapeLightDay = cs->Continent.LandscapeLightDay;
it->second->LandscapeLightDusk = cs->Continent.LandscapeLightDusk;
it->second->LandscapeLightNight = cs->Continent.LandscapeLightNight;
it->second->EntityLightDay = cs->Continent.EntityLightDay;
it->second->EntityLightDusk = cs->Continent.EntityLightDusk;
it->second->EntityLightNight = cs->Continent.EntityLightNight;
it->second->RootLightDay = cs->Continent.RootLightDay;
it->second->RootLightDusk = cs->Continent.RootLightDusk;
it->second->RootLightNight = cs->Continent.RootLightNight;
}
}
}
void CContinentManager::reloadSky()
{
// reload new style sky
std::vector<std::string> exts;
CSheetManager sheetManager;
exts.push_back("sky");
exts.push_back("continent");
NLMISC::IProgressCallback progress;
sheetManager.loadAllSheet(progress, true, false, false, true, &exts);
//
const CSheetManager::TEntitySheetMap &sm = SheetMngr.getSheets();
for(CSheetManager::TEntitySheetMap::const_iterator it = sm.begin(); it != sm.end(); ++it)
{
if (it->second.EntitySheet)
{
CEntitySheet::TType type = it->second.EntitySheet->Type;
if (type == CEntitySheet::CONTINENT)
{
// find matching sheet in new sheetManager
const CEntitySheet *other = sheetManager.get(it->first);
if (other)
{
const CContinentParameters &cp = static_cast<const CContinentSheet *>(other)->Continent;
// find matching continent in manager
for(TContinents::iterator it = _Continents.begin(); it != _Continents.end(); ++it)
{
if (it->second && nlstricmp(it->second->Name, cp.Name) == 0)
{
std::copy(cp.SkySheet, cp.SkySheet + EGSPD::CSeason::Invalid, it->second->SkySheet);
break;
}
}
}
}
else if(type == CEntitySheet::SKY)
{
// find matching sheet in new sheetManager
const CEntitySheet *other = sheetManager.get(it->first);
if (other)
{
// replace data in place
((CSkySheet &) *it->second.EntitySheet) = ((const CSkySheet &) *other);
}
}
}
}
if (_Current)
{
_Current->releaseSky();
_Current->initSky();
}
}
// ***************************************************************************
void CContinentManager::loadContinentLandMarks()
{
std::string dataPath = "../../client/data";
if (ClientCfg.UpdatePackedSheet == false)
{
readLMConts(dataPath);
}
else
{
buildLMConts("ryzom.world", "../../common/data_leveldesign/primitives", dataPath);
readLMConts(dataPath);
}
}
// ***************************************************************************
void CContinentManager::readLMConts(const std::string &dataPath)
{
CIFile f;
string sPackedFileName = CPath::lookup(LM_PACKED_FILE, false);
if (sPackedFileName.empty())
sPackedFileName = CPath::standardizePath(dataPath) + LM_PACKED_FILE;
if (f.open(sPackedFileName))
{
uint32 nNbCont = 0;
sint ver= f.serialVersion(1);
f.serial(nNbCont);
for (uint32 i = 0; i < nNbCont; ++i)
{
string sContName;
f.serial(sContName);
TContinents::iterator itCont = _Continents.find(sContName);
if(itCont != _Continents.end())
{
CContinent *pCont = itCont->second;
f.serial(pCont->Zone);
f.serial(pCont->ZoneCenter);
f.serialCont(pCont->ContLandMarks);
}
else
{
CContinent dummy;
f.serial(dummy.Zone);
f.serial(dummy.ZoneCenter);
f.serialCont(dummy.ContLandMarks);
nlwarning("continent not found : %s", sContName.c_str());
}
}
f.serialCont(WorldMap);
if (ver >= 1)
f.serialCont(aliasToRegionMap);
}
else
{
nlwarning("cannot load " LM_PACKED_FILE);
}
}
// ***************************************************************************
string CContinentManager::getRegionNameByAlias(uint32 i)
{
return aliasToRegionMap[i];
}<|fim▁end|>
| |
<|file_name|>label_algo_test.cpp<|end_file_name|><|fim▁begin|>#include <boost/version.hpp>
#include <boost/detail/lightweight_test.hpp>
#include <iostream>
#include <mapnik/geometry.hpp>
#include <mapnik/geom_util.hpp>
int main( int, char*[] )
{
// reused these for simplicity
double x,y;
// single point
mapnik::geometry_type pt(mapnik::Point);
pt.move_to(10,10);
BOOST_TEST( mapnik::label::centroid(pt, x, y) );
BOOST_TEST( x == 10 );
BOOST_TEST( y == 10 );
// two points
pt.move_to(20,20);
BOOST_TEST( mapnik::label::centroid(pt, x, y) );
BOOST_TEST_EQ( x, 15 );
BOOST_TEST_EQ( y, 15 );
// line with two verticies
mapnik::geometry_type line(mapnik::LineString);
line.move_to(0,0);
line.move_to(50,50);
BOOST_TEST( mapnik::label::centroid(line, x, y) );
BOOST_TEST( x == 25 );
BOOST_TEST( y == 25 );
// TODO - centroid and interior should be equal but they appear not to be (check largest)
// MULTIPOLYGON(((-52 40,-60 32,-68 40,-60 48,-52 40)),((-60 50,-80 30,-100 49.9999999999999,-80.0000000000001 70,-60 50)),((-52 60,-60 52,-68 60,-60 68,-52 60)))
if (!::boost::detail::test_errors()) {
std::clog << "C++ label algorithms: \x1b[1;32m✓ \x1b[0m\n";<|fim▁hole|>#if BOOST_VERSION >= 104600
::boost::detail::report_errors_remind().called_report_errors_function = true;
#endif
} else {
return ::boost::report_errors();
}
}<|fim▁end|>
| |
<|file_name|>tutorial-10.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate glium;
#[path = "../book/tuto-07-teapot.rs"]
mod teapot;
fn main() {
#[allow(unused_imports)]
use glium::{glutin, Surface};
let event_loop = glutin::event_loop::EventLoop::new();
let wb = glutin::window::WindowBuilder::new();
let cb = glutin::ContextBuilder::new().with_depth_buffer(24);
let display = glium::Display::new(wb, cb, &event_loop).unwrap();
let positions = glium::VertexBuffer::new(&display, &teapot::VERTICES).unwrap();
let normals = glium::VertexBuffer::new(&display, &teapot::NORMALS).unwrap();
let indices = glium::IndexBuffer::new(&display, glium::index::PrimitiveType::TrianglesList,
&teapot::INDICES).unwrap();
let vertex_shader_src = r#"
#version 150
in vec3 position;
in vec3 normal;
out vec3 v_normal;
uniform mat4 perspective;
uniform mat4 matrix;
void main() {
v_normal = transpose(inverse(mat3(matrix))) * normal;
gl_Position = perspective * matrix * vec4(position, 1.0);
}
"#;
let fragment_shader_src = r#"
#version 150
in vec3 v_normal;
out vec4 color;
uniform vec3 u_light;
void main() {
float brightness = dot(normalize(v_normal), normalize(u_light));
vec3 dark_color = vec3(0.6, 0.0, 0.0);
vec3 regular_color = vec3(1.0, 0.0, 0.0);
color = vec4(mix(dark_color, regular_color, brightness), 1.0);
}
"#;
let program = glium::Program::from_source(&display, vertex_shader_src, fragment_shader_src,
None).unwrap();
event_loop.run(move |event, _, control_flow| {
let next_frame_time = std::time::Instant::now() +
std::time::Duration::from_nanos(16_666_667);
*control_flow = glutin::event_loop::ControlFlow::WaitUntil(next_frame_time);
match event {
glutin::event::Event::WindowEvent { event, .. } => match event {
glutin::event::WindowEvent::CloseRequested => {<|fim▁hole|> *control_flow = glutin::event_loop::ControlFlow::Exit;
return;
},
_ => return,
},
glutin::event::Event::NewEvents(cause) => match cause {
glutin::event::StartCause::ResumeTimeReached { .. } => (),
glutin::event::StartCause::Init => (),
_ => return,
},
_ => return,
}
let mut target = display.draw();
target.clear_color_and_depth((0.0, 0.0, 1.0, 1.0), 1.0);
let matrix = [
[0.01, 0.0, 0.0, 0.0],
[0.0, 0.01, 0.0, 0.0],
[0.0, 0.0, 0.01, 0.0],
[0.0, 0.0, 2.0, 1.0f32]
];
let perspective = {
let (width, height) = target.get_dimensions();
let aspect_ratio = height as f32 / width as f32;
let fov: f32 = 3.141592 / 3.0;
let zfar = 1024.0;
let znear = 0.1;
let f = 1.0 / (fov / 2.0).tan();
[
[f * aspect_ratio , 0.0, 0.0 , 0.0],
[ 0.0 , f , 0.0 , 0.0],
[ 0.0 , 0.0, (zfar+znear)/(zfar-znear) , 1.0],
[ 0.0 , 0.0, -(2.0*zfar*znear)/(zfar-znear), 0.0],
]
};
let light = [-1.0, 0.4, 0.9f32];
let params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
target.draw((&positions, &normals), &indices, &program,
&uniform! { matrix: matrix, perspective: perspective, u_light: light },
¶ms).unwrap();
target.finish().unwrap();
});
}<|fim▁end|>
| |
<|file_name|>app.spec.js<|end_file_name|><|fim▁begin|>describe( 'AppCtrl', function() {
describe( 'isCurrentUrl', function() {
var AppCtrl, $location, $scope;
beforeEach( module( 'app' ) );<|fim▁hole|>
beforeEach( inject( function( $controller, _$location_, $rootScope ) {
$location = _$location_;
$scope = $rootScope.$new();
AppCtrl = $controller( 'AppCtrl', { $location: $location, $scope: $scope });
}));
it( 'should pass a dummy test', inject( function() {
expect( AppCtrl ).toBeTruthy();
}));
});
});<|fim▁end|>
| |
<|file_name|>hba.rs<|end_file_name|><|fim▁begin|>use arch::memory;
use core::mem::size_of;
use core::u32;
use disk::Disk;
use drivers::io::{Io, Mmio};
use system::error::{Error, Result, EIO};
use super::fis::{FIS_TYPE_REG_H2D, FisRegH2D};
const ATA_CMD_READ_DMA_EXT: u8 = 0x25;
const ATA_CMD_WRITE_DMA_EXT: u8 = 0x35;
const ATA_DEV_BUSY: u8 = 0x80;
const ATA_DEV_DRQ: u8 = 0x08;
const HBA_PORT_CMD_CR: u32 = 1 << 15;
const HBA_PORT_CMD_FR: u32 = 1 << 14;
const HBA_PORT_CMD_FRE: u32 = 1 << 4;
const HBA_PORT_CMD_ST: u32 = 1;
const HBA_PORT_IS_TFES: u32 = 1 << 30;
const HBA_SSTS_PRESENT: u32 = 0x3;
const HBA_SIG_ATA: u32 = 0x00000101;
const HBA_SIG_ATAPI: u32 = 0xEB140101;
const HBA_SIG_PM: u32 = 0x96690101;
const HBA_SIG_SEMB: u32 = 0xC33C0101;
#[derive(Debug)]
pub enum HbaPortType {
None,
Unknown(u32),
SATA,
SATAPI,
PM,
SEMB,
}
#[repr(packed)]
pub struct HbaPort {
pub clb: Mmio<u64>, // 0x00, command list base address, 1K-byte aligned
pub fb: Mmio<u64>, // 0x08, FIS base address, 256-byte aligned
pub is: Mmio<u32>, // 0x10, interrupt status
pub ie: Mmio<u32>, // 0x14, interrupt enable
pub cmd: Mmio<u32>, // 0x18, command and status
pub rsv0: Mmio<u32>, // 0x1C, Reserved
pub tfd: Mmio<u32>, // 0x20, task file data
pub sig: Mmio<u32>, // 0x24, signature
pub ssts: Mmio<u32>, // 0x28, SATA status (SCR0:SStatus)
pub sctl: Mmio<u32>, // 0x2C, SATA control (SCR2:SControl)
pub serr: Mmio<u32>, // 0x30, SATA error (SCR1:SError)
pub sact: Mmio<u32>, // 0x34, SATA active (SCR3:SActive)
pub ci: Mmio<u32>, // 0x38, command issue
pub sntf: Mmio<u32>, // 0x3C, SATA notification (SCR4:SNotification)
pub fbs: Mmio<u32>, // 0x40, FIS-based switch control
pub rsv1: [Mmio<u32>; 11], // 0x44 ~ 0x6F, Reserved
pub vendor: [Mmio<u32>; 4], // 0x70 ~ 0x7F, vendor specific
}
impl HbaPort {
pub fn probe(&self) -> HbaPortType {
if self.ssts.readf(HBA_SSTS_PRESENT) {
let sig = self.sig.read();
match sig {
HBA_SIG_ATA => HbaPortType::SATA,
HBA_SIG_ATAPI => HbaPortType::SATAPI,
HBA_SIG_PM => HbaPortType::PM,
HBA_SIG_SEMB => HbaPortType::SEMB,
_ => HbaPortType::Unknown(sig),
}
} else {
HbaPortType::None
}
}
pub fn init(&mut self) {
self.stop();
// debugln!("Port Command List");
let clb = unsafe { memory::alloc_aligned(size_of::<HbaCmdHeader>(), 1024) };
self.clb.write(clb as u64);
// debugln!("Port FIS");
let fb = unsafe { memory::alloc_aligned(256, 256) };
self.fb.write(fb as u64);
for i in 0..32 {
// debugln!("Port Command Table {}", i);
let cmdheader = unsafe { &mut *(clb as *mut HbaCmdHeader).offset(i) };
let ctba = unsafe { memory::alloc_aligned(size_of::<HbaCmdTable>(), 256) };
cmdheader.ctba.write(ctba as u64);
cmdheader.prdtl.write(0);
}
self.start();
}
pub fn start(&mut self) {
// debugln!("Starting port");
while self.cmd.readf(HBA_PORT_CMD_CR) {}
self.cmd.writef(HBA_PORT_CMD_FRE, true);
self.cmd.writef(HBA_PORT_CMD_ST, true);
}
pub fn stop(&mut self) {
// debugln!("Stopping port");
self.cmd.writef(HBA_PORT_CMD_ST, false);
while self.cmd.readf(HBA_PORT_CMD_FR | HBA_PORT_CMD_CR) {}
self.cmd.writef(HBA_PORT_CMD_FRE, false);
}
pub fn slot(&self) -> Option<u32> {
let slots = self.sact.read() | self.ci.read();
for i in 0..32 {
if slots & 1 << i == 0 {
return Some(i);
}
}
None
}
pub fn ata_dma_small(&mut self, block: u64, sectors: usize, mut buf: usize, write: bool) -> Result<usize> {
if buf >= 0x80000000 {
buf -= 0x80000000;
}
// TODO: PRDTL for files larger than 4MB
let entries = 1;
if buf > 0 && sectors > 0 {
self.is.write(u32::MAX);
if let Some(slot) = self.slot() {
// debugln!("Slot {}", slot);
let clb = self.clb.read() as usize;
let cmdheader = unsafe { &mut *(clb as *mut HbaCmdHeader).offset(slot as isize) };
cmdheader.cfl.write(((size_of::<FisRegH2D>() / size_of::<u32>()) as u8));
cmdheader.cfl.writef(1 << 6, write);
cmdheader.prdtl.write(entries);
let ctba = cmdheader.ctba.read() as usize;
unsafe { ::memset(ctba as *mut u8, 0, size_of::<HbaCmdTable>()) };
let cmdtbl = unsafe { &mut *(ctba as *mut HbaCmdTable) };
let prdt_entry = &mut cmdtbl.prdt_entry[0];
prdt_entry.dba.write(buf as u64);
prdt_entry.dbc.write(((sectors * 512) as u32) | 1);
let cmdfis = unsafe { &mut *(cmdtbl.cfis.as_ptr() as *mut FisRegH2D) };
cmdfis.fis_type.write(FIS_TYPE_REG_H2D);
cmdfis.pm.write(1 << 7);
if write {
cmdfis.command.write(ATA_CMD_WRITE_DMA_EXT);
} else {
cmdfis.command.write(ATA_CMD_READ_DMA_EXT);
}
cmdfis.lba0.write(block as u8);
cmdfis.lba1.write((block >> 8) as u8);
cmdfis.lba2.write((block >> 16) as u8);
cmdfis.device.write(1 << 6);
cmdfis.lba3.write((block >> 24) as u8);
cmdfis.lba4.write((block >> 32) as u8);
cmdfis.lba5.write((block >> 40) as u8);
cmdfis.countl.write(sectors as u8);
cmdfis.counth.write((sectors >> 8) as u8);
// debugln!("Busy Wait");
while self.tfd.readf((ATA_DEV_BUSY | ATA_DEV_DRQ) as u32) {}
self.ci.writef(1 << slot, true);
// debugln!("Completion Wait");
while self.ci.readf(1 << slot) {
if self.is.readf(HBA_PORT_IS_TFES) {
return Err(Error::new(EIO));
}
}<|fim▁hole|> return Err(Error::new(EIO));
}
Ok(sectors * 512)
} else {
debugln!("No Command Slots");
Err(Error::new(EIO))
}
} else {
debugln!("Invalid request");
Err(Error::new(EIO))
}
}
pub fn ata_dma(&mut self, block: u64, sectors: usize, buf: usize, write: bool) -> Result<usize> {
// debugln!("AHCI {:X} DMA BLOCK: {:X} SECTORS: {} BUF: {:X} WRITE: {}", (self as *mut HbaPort) as usize, block, sectors, buf, write);
if sectors > 0 {
let contexts = ::env().contexts.lock();
let current = try!(contexts.current());
let physical_address = try!(current.translate(buf, sectors * 512));
let mut sector: usize = 0;
while sectors - sector >= 255 {
if let Err(err) = self.ata_dma_small(block + sector as u64, 255, physical_address + sector * 512, write) {
return Err(err);
}
sector += 255;
}
if sector < sectors {
if let Err(err) = self.ata_dma_small(block + sector as u64, sectors - sector, physical_address + sector * 512, write) {
return Err(err);
}
}
Ok(sectors * 512)
} else {
debugln!("Invalid request");
Err(Error::new(EIO))
}
}
}
#[repr(packed)]
pub struct HbaMem {
pub cap: Mmio<u32>, // 0x00, Host capability
pub ghc: Mmio<u32>, // 0x04, Global host control
pub is: Mmio<u32>, // 0x08, Interrupt status
pub pi: Mmio<u32>, // 0x0C, Port implemented
pub vs: Mmio<u32>, // 0x10, Version
pub ccc_ctl: Mmio<u32>, // 0x14, Command completion coalescing control
pub ccc_pts: Mmio<u32>, // 0x18, Command completion coalescing ports
pub em_loc: Mmio<u32>, // 0x1C, Enclosure management location
pub em_ctl: Mmio<u32>, // 0x20, Enclosure management control
pub cap2: Mmio<u32>, // 0x24, Host capabilities extended
pub bohc: Mmio<u32>, // 0x28, BIOS/OS handoff control and status
pub rsv: [Mmio<u8>; 116], // 0x2C - 0x9F, Reserved
pub vendor: [Mmio<u8>; 96], // 0xA0 - 0xFF, Vendor specific registers
pub ports: [HbaPort; 32], // 0x100 - 0x10FF, Port control registers
}
#[repr(packed)]
struct HbaPrdtEntry {
dba: Mmio<u64>, // Data base address
rsv0: Mmio<u32>, // Reserved
dbc: Mmio<u32>, // Byte count, 4M max, interrupt = 1
}
#[repr(packed)]
struct HbaCmdTable {
// 0x00
cfis: [Mmio<u8>; 64], // Command FIS
// 0x40
acmd: [Mmio<u8>; 16], // ATAPI command, 12 or 16 bytes
// 0x50
rsv: [Mmio<u8>; 48], // Reserved
// 0x80
prdt_entry: [HbaPrdtEntry; 65536], // Physical region descriptor table entries, 0 ~ 65535
}
#[repr(packed)]
struct HbaCmdHeader {
// DW0
cfl: Mmio<u8>, /* Command FIS length in DWORDS, 2 ~ 16, atapi: 4, write - host to device: 2, prefetchable: 1 */
pm: Mmio<u8>, // Reset - 0x80, bist: 0x40, clear busy on ok: 0x20, port multiplier
prdtl: Mmio<u16>, // Physical region descriptor table length in entries
// DW1
prdbc: Mmio<u32>, // Physical region descriptor byte count transferred
// DW2, 3
ctba: Mmio<u64>, // Command table descriptor base address
// DW4 - 7
rsv1: [Mmio<u32>; 4], // Reserved
}<|fim▁end|>
|
if self.is.readf(HBA_PORT_IS_TFES) {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>@author: jmjj (Jari Juopperi, [email protected])
"""
from .main import *<|fim▁end|>
|
"""
Created on Wed Mar 2 10:56:34 2016
|
<|file_name|>p081.py<|end_file_name|><|fim▁begin|># https://projecteuler.net/problem=81
from projecteuler.FileReader import file_to_2D_array_of_ints
# this problem uses a similar solution to problem 18, "Maximum Path Sum 1."
# this problem uses a diamond instead of a pyramid
matrix = file_to_2D_array_of_ints("p081.txt", ",")
y_max = len(matrix) - 1
x_max = len(matrix[0]) - 1
for y in range(y_max, -1, -1):
for x in range(x_max, -1, -1):
if y == y_max and x == x_max:
continue<|fim▁hole|> elif y == y_max:
matrix[y][x] += matrix[y][x + 1]
elif x == x_max:
matrix[y][x] += matrix[y + 1][x]
else:
matrix[y][x] += min(matrix[y][x + 1], matrix[y + 1][x])
print(matrix[0][0])<|fim▁end|>
| |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>Resources:
https://pythonhosted.org/an_example_pypi_project/sphinx.html
https://github.com/djungelorm/sphinx-csharp
https://sublime-and-sphinx-guide.readthedocs.io/en/latest/code_blocks.html
https://docutils.sourceforge.net/docs/user/rst/quickref.html
"""
import sys
import os
extensions = [
'sphinx_csharp.csharp',
'sphinx_rtd_theme'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Cottle Documentation'
copyright = u'2019, Rémi Caput'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0.0'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'default'
# See: https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html
html_theme = 'sphinx_rtd_theme'
html_theme_path = ['_themes']
html_theme_options = {
'style_external_links': True
}
html_logo = '../res/icon.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'CottleDocumentation'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Cottle.tex', u'Cottle Documentation',
u'Cottle', 'manual'),
]
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cottle', u'Cottle Documentation',
[u'Cottle'], 1)
]
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Cottle', u'Cottle Documentation',
u'Cottle', 'Cottle', 'Cottle Documentation.',
'Miscellaneous'),
]<|fim▁end|>
|
"""
|
<|file_name|>issue-2445-b.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(dead_code)]
#![allow(non_camel_case_types)]
// pretty-expanded FIXME #23616
<|fim▁hole|>}
impl<T> c1<T> {
pub fn f1(&self, _x: isize) {
}
}
fn c1<T>(x: T) -> c1<T> {
c1 {
x: x
}
}
impl<T> c1<T> {
pub fn f2(&self, _x: isize) {
}
}
pub fn main() {
c1::<isize>(3).f1(4);
c1::<isize>(3).f2(4);
}<|fim▁end|>
|
struct c1<T> {
x: T,
|
<|file_name|>display.py<|end_file_name|><|fim▁begin|># (c) 2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ctypes.util
import errno
import fcntl
import getpass
import locale
import logging
import os
import random
import subprocess
import sys
import textwrap
import time
from struct import unpack, pack
from termios import TIOCGWINSZ
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.six import text_type
from ansible.utils.color import stringc
from ansible.utils.singleton import Singleton
from ansible.utils.unsafe_proxy import wrap_var
_LIBC = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c'))
# Set argtypes, to avoid segfault if the wrong type is provided,
# restype is assumed to be c_int
_LIBC.wcwidth.argtypes = (ctypes.c_wchar,)
_LIBC.wcswidth.argtypes = (ctypes.c_wchar_p, ctypes.c_int)
# Max for c_int
_MAX_INT = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
_LOCALE_INITIALIZED = False
_LOCALE_INITIALIZATION_ERR = None
def initialize_locale():
"""Set the locale to the users default setting
and set ``_LOCALE_INITIALIZED`` to indicate whether
``get_text_width`` may run into trouble
"""
global _LOCALE_INITIALIZED, _LOCALE_INITIALIZATION_ERR
if _LOCALE_INITIALIZED is False:
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
_LOCALE_INITIALIZATION_ERR = e
else:
_LOCALE_INITIALIZED = True
def get_text_width(text):
"""Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the
number of columns used to display a text string.
We try first with ``wcswidth``, and fallback to iterating each
character and using wcwidth individually, falling back to a value of 0
for non-printable wide characters
On Py2, this depends on ``locale.setlocale(locale.LC_ALL, '')``,
that in the case of Ansible is done in ``bin/ansible``
"""
if not isinstance(text, text_type):
raise TypeError('get_text_width requires text, not %s' % type(text))
if _LOCALE_INITIALIZATION_ERR:
Display().warning(
'An error occurred while calling ansible.utils.display.initialize_locale '
'(%s). This may result in incorrectly calculated text widths that can '
'cause Display to print incorrect line lengths' % _LOCALE_INITIALIZATION_ERR
)
elif not _LOCALE_INITIALIZED:
Display().warning(
'ansible.utils.display.initialize_locale has not been called, '
'this may result in incorrectly calculated text widths that can '
'cause Display to print incorrect line lengths'
)
try:
width = _LIBC.wcswidth(text, _MAX_INT)
except ctypes.ArgumentError:
width = -1
if width != -1:
return width
width = 0
counter = 0
for c in text:
counter += 1
if c in (u'\x08', u'\x7f', u'\x94', u'\x1b'):
# A few characters result in a subtraction of length:
# BS, DEL, CCH, ESC
# ESC is slightly different in that it's part of an escape sequence, and
# while ESC is non printable, it's part of an escape sequence, which results
# in a single non printable length
width -= 1
counter -= 1
continue
try:
w = _LIBC.wcwidth(c)
except ctypes.ArgumentError:
w = -1
if w == -1:
# -1 signifies a non-printable character
# use 0 here as a best effort
w = 0
width += w
if width == 0 and counter and not _LOCALE_INITIALIZED:
raise EnvironmentError(
'ansible.utils.display.initialize_locale has not been called, '
'and get_text_width could not calculate text width of %r' % text
)
# It doesn't make sense to have a negative printable width
return width if width >= 0 else 0
class FilterBlackList(logging.Filter):
def __init__(self, blacklist):
self.blacklist = [logging.Filter(name) for name in blacklist]
def filter(self, record):
return not any(f.filter(record) for f in self.blacklist)
class FilterUserInjector(logging.Filter):
"""
This is a filter which injects the current user as the 'user' attribute on each record. We need to add this filter
to all logger handlers so that 3rd party libraries won't print an exception due to user not being defined.
"""
try:
username = getpass.getuser()
except KeyError:
# people like to make containers w/o actual valid passwd/shadow and use host uids
username = 'uid=%s' % os.getuid()
def filter(self, record):
record.user = FilterUserInjector.username
return True
<|fim▁hole|>logger = None
# TODO: make this a callback event instead
if getattr(C, 'DEFAULT_LOG_PATH'):
path = C.DEFAULT_LOG_PATH
if path and (os.path.exists(path) and os.access(path, os.W_OK)) or os.access(os.path.dirname(path), os.W_OK):
# NOTE: level is kept at INFO to avoid security disclosures caused by certain libraries when using DEBUG
logging.basicConfig(filename=path, level=logging.INFO, # DO NOT set to logging.DEBUG
format='%(asctime)s p=%(process)d u=%(user)s n=%(name)s | %(message)s')
logger = logging.getLogger('ansible')
for handler in logging.root.handlers:
handler.addFilter(FilterBlackList(getattr(C, 'DEFAULT_LOG_FILTER', [])))
handler.addFilter(FilterUserInjector())
else:
print("[WARNING]: log file at %s is not writeable and we cannot create it, aborting\n" % path, file=sys.stderr)
# map color to log levels
color_to_log_level = {C.COLOR_ERROR: logging.ERROR,
C.COLOR_WARN: logging.WARNING,
C.COLOR_OK: logging.INFO,
C.COLOR_SKIP: logging.WARNING,
C.COLOR_UNREACHABLE: logging.ERROR,
C.COLOR_DEBUG: logging.DEBUG,
C.COLOR_CHANGED: logging.INFO,
C.COLOR_DEPRECATE: logging.WARNING,
C.COLOR_VERBOSE: logging.INFO}
b_COW_PATHS = (
b"/usr/bin/cowsay",
b"/usr/games/cowsay",
b"/usr/local/bin/cowsay", # BSD path for cowsay
b"/opt/local/bin/cowsay", # MacPorts path for cowsay
)
class Display(metaclass=Singleton):
def __init__(self, verbosity=0):
self.columns = None
self.verbosity = verbosity
# list of all deprecation messages to prevent duplicate display
self._deprecations = {}
self._warns = {}
self._errors = {}
self.b_cowsay = None
self.noncow = C.ANSIBLE_COW_SELECTION
self.set_cowsay_info()
if self.b_cowsay:
try:
cmd = subprocess.Popen([self.b_cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if cmd.returncode:
raise Exception
self.cows_available = {to_text(c) for c in out.split()} # set comprehension
if C.ANSIBLE_COW_ACCEPTLIST and any(C.ANSIBLE_COW_ACCEPTLIST):
self.cows_available = set(C.ANSIBLE_COW_ACCEPTLIST).intersection(self.cows_available)
except Exception:
# could not execute cowsay for some reason
self.b_cowsay = False
self._set_column_width()
def set_cowsay_info(self):
if C.ANSIBLE_NOCOWS:
return
if C.ANSIBLE_COW_PATH:
self.b_cowsay = C.ANSIBLE_COW_PATH
else:
for b_cow_path in b_COW_PATHS:
if os.path.exists(b_cow_path):
self.b_cowsay = b_cow_path
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False, newline=True):
""" Display a message to the user
Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
"""
nocolor = msg
if not log_only:
has_newline = msg.endswith(u'\n')
if has_newline:
msg2 = msg[:-1]
else:
msg2 = msg
if color:
msg2 = stringc(msg2, color)
if has_newline or newline:
msg2 = msg2 + u'\n'
msg2 = to_bytes(msg2, encoding=self._output_encoding(stderr=stderr))
# Convert back to text string
# We first convert to a byte string so that we get rid of
# characters that are invalid in the user's locale
msg2 = to_text(msg2, self._output_encoding(stderr=stderr), errors='replace')
# Note: After Display() class is refactored need to update the log capture
# code in 'bin/ansible-connection' (and other relevant places).
if not stderr:
fileobj = sys.stdout
else:
fileobj = sys.stderr
fileobj.write(msg2)
try:
fileobj.flush()
except IOError as e:
# Ignore EPIPE in case fileobj has been prematurely closed, eg.
# when piping to "head -n1"
if e.errno != errno.EPIPE:
raise
if logger and not screen_only:
# We first convert to a byte string so that we get rid of
# color and characters that are invalid in the user's locale
msg2 = to_bytes(nocolor.lstrip(u'\n'))
# Convert back to text string
msg2 = to_text(msg2, self._output_encoding(stderr=stderr))
lvl = logging.INFO
if color:
# set logger level based on color (not great)
try:
lvl = color_to_log_level[color]
except KeyError:
# this should not happen, but JIC
raise AnsibleAssertionError('Invalid color supplied to display: %s' % color)
# actually log
logger.log(lvl, msg2)
def v(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=0)
def vv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=1)
def vvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=2)
def vvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=3)
def vvvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=4)
def vvvvvv(self, msg, host=None):
return self.verbose(msg, host=host, caplevel=5)
def debug(self, msg, host=None):
if C.DEFAULT_DEBUG:
if host is None:
self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color=C.COLOR_DEBUG)
else:
self.display("%6d %0.5f [%s]: %s" % (os.getpid(), time.time(), host, msg), color=C.COLOR_DEBUG)
def verbose(self, msg, host=None, caplevel=2):
to_stderr = C.VERBOSE_TO_STDERR
if self.verbosity > caplevel:
if host is None:
self.display(msg, color=C.COLOR_VERBOSE, stderr=to_stderr)
else:
self.display("<%s> %s" % (host, msg), color=C.COLOR_VERBOSE, stderr=to_stderr)
def get_deprecation_message(self, msg, version=None, removed=False, date=None, collection_name=None):
''' used to print out a deprecation message.'''
msg = msg.strip()
if msg and msg[-1] not in ['!', '?', '.']:
msg += '.'
if collection_name == 'ansible.builtin':
collection_name = 'ansible-core'
if removed:
header = '[DEPRECATED]: {0}'.format(msg)
removal_fragment = 'This feature was removed'
help_text = 'Please update your playbooks.'
else:
header = '[DEPRECATION WARNING]: {0}'.format(msg)
removal_fragment = 'This feature will be removed'
# FUTURE: make this a standalone warning so it only shows up once?
help_text = 'Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.'
if collection_name:
from_fragment = 'from {0}'.format(collection_name)
else:
from_fragment = ''
if date:
when = 'in a release after {0}.'.format(date)
elif version:
when = 'in version {0}.'.format(version)
else:
when = 'in a future release.'
message_text = ' '.join(f for f in [header, removal_fragment, from_fragment, when, help_text] if f)
return message_text
def deprecated(self, msg, version=None, removed=False, date=None, collection_name=None):
if not removed and not C.DEPRECATION_WARNINGS:
return
message_text = self.get_deprecation_message(msg, version=version, removed=removed, date=date, collection_name=collection_name)
if removed:
raise AnsibleError(message_text)
wrapped = textwrap.wrap(message_text, self.columns, drop_whitespace=False)
message_text = "\n".join(wrapped) + "\n"
if message_text not in self._deprecations:
self.display(message_text.strip(), color=C.COLOR_DEPRECATE, stderr=True)
self._deprecations[message_text] = 1
def warning(self, msg, formatted=False):
if not formatted:
new_msg = "[WARNING]: %s" % msg
wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = "\n".join(wrapped) + "\n"
else:
new_msg = "\n[WARNING]: \n%s" % msg
if new_msg not in self._warns:
self.display(new_msg, color=C.COLOR_WARN, stderr=True)
self._warns[new_msg] = 1
def system_warning(self, msg):
if C.SYSTEM_WARNINGS:
self.warning(msg)
def banner(self, msg, color=None, cows=True):
'''
Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)
'''
msg = to_text(msg)
if self.b_cowsay and cows:
try:
self.banner_cowsay(msg)
return
except OSError:
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
msg = msg.strip()
try:
star_len = self.columns - get_text_width(msg)
except EnvironmentError:
star_len = self.columns - len(msg)
if star_len <= 3:
star_len = 3
stars = u"*" * star_len
self.display(u"\n%s %s" % (msg, stars), color=color)
def banner_cowsay(self, msg, color=None):
if u": [" in msg:
msg = msg.replace(u"[", u"")
if msg.endswith(u"]"):
msg = msg[:-1]
runcmd = [self.b_cowsay, b"-W", b"60"]
if self.noncow:
thecow = self.noncow
if thecow == 'random':
thecow = random.choice(list(self.cows_available))
runcmd.append(b'-f')
runcmd.append(to_bytes(thecow))
runcmd.append(to_bytes(msg))
cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
self.display(u"%s\n" % to_text(out), color=color)
def error(self, msg, wrap_text=True):
if wrap_text:
new_msg = u"\n[ERROR]: %s" % msg
wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = u"\n".join(wrapped) + u"\n"
else:
new_msg = u"ERROR! %s" % msg
if new_msg not in self._errors:
self.display(new_msg, color=C.COLOR_ERROR, stderr=True)
self._errors[new_msg] = 1
@staticmethod
def prompt(msg, private=False):
prompt_string = to_bytes(msg, encoding=Display._output_encoding())
# Convert back into text. We do this double conversion
# to get rid of characters that are illegal in the user's locale
prompt_string = to_text(prompt_string)
if private:
return getpass.getpass(prompt_string)
else:
return input(prompt_string)
def do_var_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None, unsafe=None):
result = None
if sys.__stdin__.isatty():
do_prompt = self.prompt
if prompt and default is not None:
msg = "%s [%s]: " % (prompt, default)
elif prompt:
msg = "%s: " % prompt
else:
msg = 'input for %s: ' % varname
if confirm:
while True:
result = do_prompt(msg, private)
second = do_prompt("confirm " + msg, private)
if result == second:
break
self.display("***** VALUES ENTERED DO NOT MATCH ****")
else:
result = do_prompt(msg, private)
else:
result = None
self.warning("Not prompting as we are not in interactive mode")
# if result is false and default is not None
if not result and default is not None:
result = default
if encrypt:
# Circular import because encrypt needs a display class
from ansible.utils.encrypt import do_encrypt
result = do_encrypt(result, encrypt, salt_size, salt)
# handle utf-8 chars
result = to_text(result, errors='surrogate_or_strict')
if unsafe:
result = wrap_var(result)
return result
@staticmethod
def _output_encoding(stderr=False):
encoding = locale.getpreferredencoding()
# https://bugs.python.org/issue6202
# Python2 hardcodes an obsolete value on Mac. Use MacOSX defaults
# instead.
if encoding in ('mac-roman',):
encoding = 'utf-8'
return encoding
def _set_column_width(self):
if os.isatty(1):
tty_size = unpack('HHHH', fcntl.ioctl(1, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[1]
else:
tty_size = 0
self.columns = max(79, tty_size - 1)<|fim▁end|>
| |
<|file_name|>FileLister.java<|end_file_name|><|fim▁begin|>/**
*
*/
package topology.graphParsers.common;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
/**
* @author Omer Zohar
* This class returns names of files with given extention for a given directory
*/
public class FileLister {
private String m_sfilepath=null;
private FilenameFilter m_filter= null;
/**
<|fim▁hole|> *
*/
public FileLister(String path, FilenameFilter filter) {
m_sfilepath=path;
m_filter=filter;
}
public String[] getfilesfromdir(){
File dir = null;
try {
dir = new File (m_sfilepath).getCanonicalFile();
} catch (IOException e) {
System.out.println("Error getting canonical file");
e.printStackTrace();
}
String[] s=new String[0];
if (dir.isDirectory()){
s=dir.list(m_filter);
for (int i=0;i<s.length;i++)
s[i]=m_sfilepath+s[i];
}
else {
System.out.println(m_sfilepath + "is not a directory.");
}
return s;
}
/**
* @param args
*/
public static void main(String[] args) {
FilenameFilter extFilter = new FilenameExtentionFilter("fvl");
FileLister f=new FileLister("D:\\Java\\Projects\\betweness\\res\\plankton\\www.ircache.net\\Plankton\\Data\\199810",extFilter);
String[] s=f.getfilesfromdir();
for (int i=0;i<s.length;i++)
System.out.println(s[i]);
}
}<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(globs)]
#![crate_name = "libc"]
#![experimental]
#![no_std] // we don't need std, and we can't have std, since it doesn't exist
// yet. std depends on us.
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
//! Bindings for the C standard library and other platform libraries
//!
//! **NOTE:** These are *architecture and libc* specific. On Linux, these
//! bindings are only correct for glibc.
//!
//! This module contains bindings to the C standard library, organized into
//! modules by their defining standard. Additionally, it contains some assorted
//! platform-specific definitions. For convenience, most functions and types
//! are reexported, so `use libc::*` will import the available C bindings as
//! appropriate for the target platform. The exact set of functions available
//! are platform specific.
//!
//! *Note:* Because these definitions are platform-specific, some may not appear
//! in the generated documentation.
//!
//! We consider the following specs reasonably normative with respect to
//! interoperating with the C standard library (libc/msvcrt):
//!
//! * ISO 9899:1990 ('C95', 'ANSI C', 'Standard C'), NA1, 1995.
//! * ISO 9899:1999 ('C99' or 'C9x').
//! * ISO 9945:1988 / IEEE 1003.1-1988 ('POSIX.1').
//! * ISO 9945:2001 / IEEE 1003.1-2001 ('POSIX:2001', 'SUSv3').
//! * ISO 9945:2008 / IEEE 1003.1-2008 ('POSIX:2008', 'SUSv4').
//!
//! Note that any reference to the 1996 revision of POSIX, or any revs between
//! 1990 (when '88 was approved at ISO) and 2001 (when the next actual
//! revision-revision happened), are merely additions of other chapters (1b and
//! 1c) outside the core interfaces.
//!
//! Despite having several names each, these are *reasonably* coherent
//! point-in-time, list-of-definition sorts of specs. You can get each under a
//! variety of names but will wind up with the same definition in each case.
//!
//! See standards(7) in linux-manpages for more details.
//!
//! Our interface to these libraries is complicated by the non-universality of
//! conformance to any of them. About the only thing universally supported is
//! the first (C95), beyond that definitions quickly become absent on various
//! platforms.
//!
//! We therefore wind up dividing our module-space up (mostly for the sake of
//! sanity while editing, filling-in-details and eliminating duplication) into
//! definitions common-to-all (held in modules named c95, c99, posix88, posix01
//! and posix08) and definitions that appear only on *some* platforms (named
//! 'extra'). This would be things like significant OSX foundation kit, or Windows
//! library kernel32.dll, or various fancy glibc, Linux or BSD extensions.
//!
//! In addition to the per-platform 'extra' modules, we define a module of
//! 'common BSD' libc routines that never quite made it into POSIX but show up
//! in multiple derived systems. This is the 4.4BSD r2 / 1995 release, the final
//! one from Berkeley after the lawsuits died down and the CSRG dissolved.
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(missing_docs)]
#![allow(non_snake_case)]
#![allow(raw_pointer_deriving)]
extern crate core;
#[cfg(test)] extern crate std;
#[cfg(test)] extern crate test;
pub use self::Nullable::*;
// Explicit export lists for the intersection (provided here) mean that
// you can write more-platform-agnostic code if you stick to just these
// symbols.
pub use types::common::c95::{FILE, c_void, fpos_t};
pub use types::common::c99::{int8_t, int16_t, int32_t, int64_t};
pub use types::common::c99::{uint8_t, uint16_t, uint32_t, uint64_t};
pub use types::common::posix88::{DIR, dirent_t};
pub use types::os::common::posix01::{timeval};
pub use types::os::common::bsd44::{addrinfo, in_addr, in6_addr, sockaddr_storage};
pub use types::os::common::bsd44::{ip_mreq, ip6_mreq, sockaddr, sockaddr_un};
pub use types::os::common::bsd44::{sa_family_t, sockaddr_in, sockaddr_in6, socklen_t};
pub use types::os::arch::c95::{c_char, c_double, c_float, c_int, c_uint};
pub use types::os::arch::c95::{c_long, c_short, c_uchar, c_ulong, wchar_t};
pub use types::os::arch::c95::{c_ushort, clock_t, ptrdiff_t, c_schar};
pub use types::os::arch::c95::{size_t, time_t, suseconds_t};
pub use types::os::arch::c99::{c_longlong, c_ulonglong};
pub use types::os::arch::c99::{intptr_t, uintptr_t};
pub use types::os::arch::c99::{intmax_t, uintmax_t};
pub use types::os::arch::posix88::{dev_t, ino_t, mode_t};
pub use types::os::arch::posix88::{off_t, pid_t, ssize_t};
pub use consts::os::c95::{_IOFBF, _IOLBF, _IONBF, BUFSIZ, EOF};
pub use consts::os::c95::{EXIT_FAILURE, EXIT_SUCCESS};
pub use consts::os::c95::{FILENAME_MAX, FOPEN_MAX, L_tmpnam};
pub use consts::os::c95::{RAND_MAX, SEEK_CUR, SEEK_END};
pub use consts::os::c95::{SEEK_SET, TMP_MAX};
pub use consts::os::posix88::{F_OK, O_APPEND, O_CREAT, O_EXCL};
pub use consts::os::posix88::{O_RDONLY, O_RDWR, O_TRUNC, O_WRONLY};
pub use consts::os::posix88::{R_OK, S_IEXEC, S_IFBLK, S_IFCHR};
pub use consts::os::posix88::{S_IFDIR, S_IFIFO, S_IFMT, S_IFREG, S_IFLNK};
pub use consts::os::posix88::{S_IREAD, S_IRUSR, S_IRWXU, S_IWUSR};
pub use consts::os::posix88::{STDERR_FILENO, STDIN_FILENO, S_IXUSR};
pub use consts::os::posix88::{STDOUT_FILENO, W_OK, X_OK};
pub use consts::os::bsd44::{AF_INET, AF_INET6, SOCK_STREAM, SOCK_DGRAM, SOCK_RAW};
pub use consts::os::bsd44::{IPPROTO_IP, IPPROTO_IPV6, IPPROTO_TCP, TCP_NODELAY};
pub use consts::os::bsd44::{SOL_SOCKET, SO_KEEPALIVE, SO_ERROR};
pub use consts::os::bsd44::{SO_REUSEADDR, SO_BROADCAST, SHUT_WR, IP_MULTICAST_LOOP};
pub use consts::os::bsd44::{IP_ADD_MEMBERSHIP, IP_DROP_MEMBERSHIP};
pub use consts::os::bsd44::{IPV6_ADD_MEMBERSHIP, IPV6_DROP_MEMBERSHIP};
pub use consts::os::bsd44::{IP_MULTICAST_TTL, IP_TTL, IP_HDRINCL, SHUT_RD};
pub use consts::os::extra::{IPPROTO_RAW};
pub use funcs::c95::ctype::{isalnum, isalpha, iscntrl, isdigit};
pub use funcs::c95::ctype::{islower, isprint, ispunct, isspace};
pub use funcs::c95::ctype::{isupper, isxdigit, tolower, toupper};
pub use funcs::c95::stdio::{fclose, feof, ferror, fflush, fgetc};
pub use funcs::c95::stdio::{fgetpos, fgets, fopen, fputc, fputs};
pub use funcs::c95::stdio::{fread, freopen, fseek, fsetpos, ftell};
pub use funcs::c95::stdio::{fwrite, perror, puts, remove, rename, rewind};
pub use funcs::c95::stdio::{setbuf, setvbuf, tmpfile, ungetc};
pub use funcs::c95::stdlib::{abs, atof, atoi, calloc, exit, _exit};
pub use funcs::c95::stdlib::{free, getenv, labs, malloc, rand};
pub use funcs::c95::stdlib::{realloc, srand, strtod, strtol};
pub use funcs::c95::stdlib::{strtoul, system};
pub use funcs::c95::string::{memchr, memcmp};
pub use funcs::c95::string::{strcat, strchr, strcmp};
pub use funcs::c95::string::{strcoll, strcpy, strcspn, strerror};
pub use funcs::c95::string::{strlen, strncat, strncmp, strncpy};
pub use funcs::c95::string::{strpbrk, strrchr, strspn, strstr};
pub use funcs::c95::string::{strtok, strxfrm};
pub use funcs::posix88::fcntl::{open, creat};
pub use funcs::posix88::stat_::{chmod, fstat, mkdir, stat};
pub use funcs::posix88::stdio::{fdopen, fileno, pclose, popen};
pub use funcs::posix88::unistd::{access, chdir, close, dup, dup2};
pub use funcs::posix88::unistd::{execv, execve, execvp, getcwd};
pub use funcs::posix88::unistd::{getpid, isatty, lseek, pipe, read};
pub use funcs::posix88::unistd::{rmdir, unlink, write};
pub use funcs::bsd43::{socket, setsockopt, bind, send, recv, recvfrom};
pub use funcs::bsd43::{listen, sendto, accept, connect, getpeername, getsockname};
pub use funcs::bsd43::{shutdown};
// But we also reexport most everything
// if you're interested in writing platform-specific code.
// FIXME: This is a mess, but the design of this entire module needs to be
// reconsidered, so I'm not inclined to do better right now. As part of
// #11870 I removed all the pub globs here, leaving explicit reexports
// of everything that is actually used in-tree.
//
// So the following exports don't follow any particular plan.
#[cfg(unix)] pub use consts::os::sysconf::{_SC_PAGESIZE};
#[cfg(unix)] pub use consts::os::posix88::{PROT_READ, PROT_WRITE, PROT_EXEC};
#[cfg(unix)] pub use consts::os::posix88::{MAP_FIXED, MAP_FILE, MAP_ANON, MAP_PRIVATE, MAP_FAILED};
#[cfg(unix)] pub use consts::os::posix88::{EACCES, EBADF, EINVAL, ENODEV, ENOMEM};
#[cfg(unix)] pub use consts::os::posix88::{ECONNREFUSED, ECONNRESET, EPERM, EPIPE};
#[cfg(unix)] pub use consts::os::posix88::{ENOTCONN, ECONNABORTED, EADDRNOTAVAIL, EINTR};
#[cfg(unix)] pub use consts::os::posix88::{EADDRINUSE, ENOENT, EISDIR, EAGAIN, EWOULDBLOCK};
#[cfg(unix)] pub use consts::os::posix88::{ECANCELED, SIGINT, EINPROGRESS};
#[cfg(unix)] pub use consts::os::posix88::{ENOSYS, ENOTTY, ETIMEDOUT, EMFILE};
#[cfg(unix)] pub use consts::os::posix88::{SIGTERM, SIGKILL, SIGPIPE, PROT_NONE};
#[cfg(unix)] pub use consts::os::posix01::{SIG_IGN, F_GETFL, F_SETFL};
#[cfg(unix)] pub use consts::os::bsd44::{AF_UNIX};
#[cfg(unix)] pub use consts::os::extra::{O_NONBLOCK};
#[cfg(unix)] pub use types::os::common::posix01::{pthread_t, timespec, timezone};
#[cfg(unix)] pub use types::os::arch::posix88::{uid_t, gid_t};
#[cfg(unix)] pub use types::os::arch::posix01::{pthread_attr_t};
#[cfg(unix)] pub use types::os::arch::posix01::{stat, utimbuf};
#[cfg(unix)] pub use types::os::common::bsd44::{ifaddrs};
#[cfg(unix)] pub use funcs::posix88::unistd::{sysconf, setgid, setsid, setuid, pread, pwrite};
#[cfg(unix)] pub use funcs::posix88::unistd::{getgid, getuid, getsid};
#[cfg(unix)] pub use funcs::posix88::unistd::{_PC_NAME_MAX, utime, nanosleep, pathconf, link};
#[cfg(unix)] pub use funcs::posix88::unistd::{chown};
#[cfg(unix)] pub use funcs::posix88::mman::{mmap, munmap, mprotect};
#[cfg(unix)] pub use funcs::posix88::dirent::{opendir, readdir_r, closedir};
#[cfg(unix)] pub use funcs::posix88::fcntl::{fcntl};
#[cfg(unix)] pub use funcs::posix88::net::{if_nametoindex};
#[cfg(unix)] pub use funcs::posix01::stat_::{lstat};
#[cfg(unix)] pub use funcs::posix01::unistd::{fsync, ftruncate};
#[cfg(unix)] pub use funcs::posix01::unistd::{readlink, symlink};
#[cfg(unix)] pub use funcs::bsd43::{getifaddrs, freeifaddrs};
#[cfg(windows)] pub use consts::os::c95::{WSAECONNREFUSED, WSAECONNRESET, WSAEACCES};
#[cfg(windows)] pub use consts::os::c95::{WSAEWOULDBLOCK, WSAENOTCONN, WSAECONNABORTED};
#[cfg(windows)] pub use consts::os::c95::{WSAEADDRNOTAVAIL, WSAEADDRINUSE, WSAEINTR};
#[cfg(windows)] pub use consts::os::c95::{WSAEINPROGRESS, WSAEINVAL, WSAEMFILE};
#[cfg(windows)] pub use consts::os::extra::{ERROR_INSUFFICIENT_BUFFER};
#[cfg(windows)] pub use consts::os::extra::{O_BINARY, O_NOINHERIT, PAGE_NOACCESS};
#[cfg(windows)] pub use consts::os::extra::{PAGE_READONLY, PAGE_READWRITE, PAGE_EXECUTE};
#[cfg(windows)] pub use consts::os::extra::{PAGE_EXECUTE_READ, PAGE_EXECUTE_READWRITE};
#[cfg(windows)] pub use consts::os::extra::{MEM_COMMIT, MEM_RESERVE, MEM_RELEASE};
#[cfg(windows)] pub use consts::os::extra::{FILE_MAP_READ, FILE_MAP_WRITE, FILE_MAP_EXECUTE};
#[cfg(windows)] pub use consts::os::extra::{ERROR_ALREADY_EXISTS, ERROR_NO_DATA};
#[cfg(windows)] pub use consts::os::extra::{ERROR_FILE_NOT_FOUND, ERROR_INVALID_NAME};
#[cfg(windows)] pub use consts::os::extra::{ERROR_BROKEN_PIPE, ERROR_INVALID_FUNCTION};
#[cfg(windows)] pub use consts::os::extra::{ERROR_CALL_NOT_IMPLEMENTED};
#[cfg(windows)] pub use consts::os::extra::{ERROR_NOTHING_TO_TERMINATE};
#[cfg(windows)] pub use consts::os::extra::{ERROR_INVALID_HANDLE};
#[cfg(windows)] pub use consts::os::extra::{TRUE, FALSE, INFINITE};
#[cfg(windows)] pub use consts::os::extra::{PROCESS_TERMINATE, PROCESS_QUERY_INFORMATION};
#[cfg(windows)] pub use consts::os::extra::{STILL_ACTIVE, DETACHED_PROCESS};
#[cfg(windows)] pub use consts::os::extra::{CREATE_NEW_PROCESS_GROUP, CREATE_UNICODE_ENVIRONMENT};
#[cfg(windows)] pub use consts::os::extra::{FILE_BEGIN, FILE_END, FILE_CURRENT};
#[cfg(windows)] pub use consts::os::extra::{FILE_GENERIC_READ, FILE_GENERIC_WRITE};
#[cfg(windows)] pub use consts::os::extra::{FILE_SHARE_READ, FILE_SHARE_WRITE, FILE_SHARE_DELETE};
#[cfg(windows)] pub use consts::os::extra::{TRUNCATE_EXISTING, CREATE_ALWAYS, OPEN_EXISTING};
#[cfg(windows)] pub use consts::os::extra::{CREATE_NEW, FILE_APPEND_DATA, FILE_WRITE_DATA};
#[cfg(windows)] pub use consts::os::extra::{OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL};
#[cfg(windows)] pub use consts::os::extra::{FILE_FLAG_BACKUP_SEMANTICS, INVALID_HANDLE_VALUE};
#[cfg(windows)] pub use consts::os::extra::{MOVEFILE_REPLACE_EXISTING};
#[cfg(windows)] pub use consts::os::extra::{GENERIC_READ, GENERIC_WRITE};
#[cfg(windows)] pub use consts::os::extra::{VOLUME_NAME_DOS};
#[cfg(windows)] pub use consts::os::extra::{PIPE_ACCESS_DUPLEX, FILE_FLAG_FIRST_PIPE_INSTANCE};
#[cfg(windows)] pub use consts::os::extra::{FILE_FLAG_OVERLAPPED, PIPE_TYPE_BYTE};
#[cfg(windows)] pub use consts::os::extra::{PIPE_READMODE_BYTE, PIPE_WAIT};
#[cfg(windows)] pub use consts::os::extra::{PIPE_UNLIMITED_INSTANCES, ERROR_ACCESS_DENIED};
#[cfg(windows)] pub use consts::os::extra::{FILE_WRITE_ATTRIBUTES, FILE_READ_ATTRIBUTES};
#[cfg(windows)] pub use consts::os::extra::{ERROR_PIPE_BUSY, ERROR_IO_PENDING};
#[cfg(windows)] pub use consts::os::extra::{ERROR_PIPE_CONNECTED, WAIT_OBJECT_0};
#[cfg(windows)] pub use consts::os::extra::{ERROR_NOT_FOUND};
#[cfg(windows)] pub use consts::os::extra::{ERROR_OPERATION_ABORTED};
#[cfg(windows)] pub use consts::os::extra::{FIONBIO};
#[cfg(windows)] pub use types::os::common::bsd44::{SOCKET};
#[cfg(windows)] pub use types::os::common::posix01::{stat, utimbuf};
#[cfg(windows)] pub use types::os::arch::extra::{HANDLE, BOOL, LPSECURITY_ATTRIBUTES};
#[cfg(windows)] pub use types::os::arch::extra::{LPCSTR, WORD, DWORD, BYTE, FILETIME};
#[cfg(windows)] pub use types::os::arch::extra::{LARGE_INTEGER, LPVOID, LONG};
#[cfg(windows)] pub use types::os::arch::extra::{time64_t, OVERLAPPED, LPCWSTR};
#[cfg(windows)] pub use types::os::arch::extra::{LPOVERLAPPED, SIZE_T, LPDWORD};
#[cfg(windows)] pub use types::os::arch::extra::{SECURITY_ATTRIBUTES, WIN32_FIND_DATAW};
#[cfg(windows)] pub use funcs::c95::string::{wcslen};
#[cfg(windows)] pub use funcs::posix88::stat_::{wstat, wutime, wchmod, wrmdir};
#[cfg(windows)] pub use funcs::bsd43::{closesocket};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetCurrentDirectoryW, GetLastError};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetEnvironmentVariableW, SetEnvironmentVariableW};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetModuleFileNameW, SetCurrentDirectoryW};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetSystemInfo, VirtualAlloc, VirtualFree};
#[cfg(windows)] pub use funcs::extra::kernel32::{CreateFileMappingW, MapViewOfFile};
#[cfg(windows)] pub use funcs::extra::kernel32::{UnmapViewOfFile, CloseHandle};
#[cfg(windows)] pub use funcs::extra::kernel32::{WaitForSingleObject, GetSystemTimeAsFileTime};
#[cfg(windows)] pub use funcs::extra::kernel32::{QueryPerformanceCounter};
#[cfg(windows)] pub use funcs::extra::kernel32::{QueryPerformanceFrequency};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetExitCodeProcess, TerminateProcess};
#[cfg(windows)] pub use funcs::extra::kernel32::{ReadFile, WriteFile, SetFilePointerEx};
#[cfg(windows)] pub use funcs::extra::kernel32::{SetEndOfFile, CreateFileW};
#[cfg(windows)] pub use funcs::extra::kernel32::{CreateDirectoryW, FindFirstFileW};
#[cfg(windows)] pub use funcs::extra::kernel32::{FindNextFileW, FindClose, DeleteFileW};
#[cfg(windows)] pub use funcs::extra::kernel32::{CreateHardLinkW, CreateEventW};
#[cfg(windows)] pub use funcs::extra::kernel32::{FlushFileBuffers, CreateNamedPipeW};
#[cfg(windows)] pub use funcs::extra::kernel32::{SetNamedPipeHandleState, WaitNamedPipeW};
#[cfg(windows)] pub use funcs::extra::kernel32::{GetOverlappedResult, ConnectNamedPipe};
#[cfg(windows)] pub use funcs::extra::kernel32::{DisconnectNamedPipe, OpenProcess};
#[cfg(windows)] pub use funcs::extra::kernel32::{MoveFileExW, VirtualProtect};
#[cfg(windows)] pub use funcs::extra::msvcrt::{get_osfhandle, open_osfhandle};
#[cfg(windows)] pub use funcs::extra::winsock::{ioctlsocket};
#[cfg(any(target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly"))]
pub use consts::os::posix01::{CLOCK_REALTIME, CLOCK_MONOTONIC};
#[cfg(any(target_os = "linux", target_os = "android"))]
pub use funcs::posix01::unistd::{fdatasync};
#[cfg(any(target_os = "linux", target_os = "android"))]
pub use types::os::arch::extra::{sockaddr_ll};
#[cfg(any(target_os = "linux", target_os = "android"))]
pub use consts::os::extra::{AF_PACKET};
#[cfg(all(unix, not(target_os = "freebsd")))]
pub use consts::os::extra::{MAP_STACK};
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
pub use consts::os::bsd44::{TCP_KEEPIDLE};
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub use consts::os::bsd44::{TCP_KEEPALIVE};
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub use consts::os::extra::{F_FULLFSYNC};
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub use types::os::arch::extra::{mach_timebase_info};
#[cfg(not(windows))]
#[link(name = "c")]
#[link(name = "m")]
extern {}
/// A wrapper for a nullable pointer. Don't use this except for interacting
/// with libc. Basically Option, but without the dependence on libstd.
// If/when libprim happens, this can be removed in favor of that
pub enum Nullable<T> {
Null,
NotNull(T)
}
pub mod types {
// Types tend to vary *per architecture* so we pull their definitions out
// into this module.
// Standard types that are opaque or common, so are not per-target.
pub mod common {
pub mod c95 {
/// Type used to construct void pointers for use with C.
///
/// This type is only useful as a pointer target. Do not use it as a
/// return type for FFI functions which have the `void` return type in
/// C. Use the unit type `()` or omit the return type instead.
///
/// For LLVM to recognize the void pointer type and by extension
/// functions like malloc(), we need to have it represented as i8* in
/// LLVM bitcode. The enum used here ensures this and prevents misuse
/// of the "raw" type by only having private variants.. We need two
/// variants, because the compiler complains about the repr attribute
/// otherwise.
#[repr(u8)]
#[allow(missing_copy_implementations)]
pub enum c_void {
__variant1,
__variant2,
}
#[allow(missing_copy_implementations)]
pub enum FILE {}
#[allow(missing_copy_implementations)]
pub enum fpos_t {}
}
pub mod c99 {
pub type int8_t = i8;
pub type int16_t = i16;
pub type int32_t = i32;
pub type int64_t = i64;
pub type uint8_t = u8;
pub type uint16_t = u16;
pub type uint32_t = u32;
pub type uint64_t = u64;
}
pub mod posix88 {
#[allow(missing_copy_implementations)]
pub enum DIR {}
#[allow(missing_copy_implementations)]
pub enum dirent_t {}
}
pub mod posix01 {}
pub mod posix08 {}
pub mod bsd44 {}
}
// Standard types that are scalar but vary by OS and arch.
#[cfg(any(target_os = "linux", target_os = "android"))]
pub mod os {
pub mod common {
pub mod posix01 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_ulong, size_t,
time_t, suseconds_t, c_long};
pub type pthread_t = c_ulong;
#[repr(C)]
#[deriving(Copy)] pub struct glob_t {
pub gl_pathc: size_t,
pub gl_pathv: *mut *mut c_char,
pub gl_offs: size_t,
pub __unused1: *mut c_void,
pub __unused2: *mut c_void,
pub __unused3: *mut c_void,
pub __unused4: *mut c_void,
pub __unused5: *mut c_void,
}
#[repr(C)]
#[deriving(Copy)] pub struct timeval {
pub tv_sec: time_t,
pub tv_usec: suseconds_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct timespec {
pub tv_sec: time_t,
pub tv_nsec: c_long,
}
#[deriving(Copy)] pub enum timezone {}
pub type sighandler_t = size_t;
}
pub mod bsd44 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_int, c_uint};
pub type socklen_t = u32;
pub type sa_family_t = u16;
pub type in_port_t = u16;
pub type in_addr_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [u8, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_storage {
pub ss_family: sa_family_t,
pub __ss_align: i64,
pub __ss_pad2: [u8, ..112],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in {
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct in_addr {
pub s_addr: in_addr_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in6 {
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
#[repr(C)]
#[deriving(Copy)] pub struct in6_addr {
pub s6_addr: [u16, ..8]
}
#[repr(C)]
#[deriving(Copy)] pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
#[repr(C)]
#[deriving(Copy)] pub struct ip6_mreq {
pub ipv6mr_multiaddr: in6_addr,
pub ipv6mr_interface: c_uint,
}
#[repr(C)]
#[deriving(Copy)] pub struct addrinfo {
pub ai_flags: c_int,
pub ai_family: c_int,
pub ai_socktype: c_int,
pub ai_protocol: c_int,
pub ai_addrlen: socklen_t,
#[cfg(target_os = "linux")]
pub ai_addr: *mut sockaddr,
#[cfg(target_os = "linux")]
pub ai_canonname: *mut c_char,
#[cfg(target_os = "android")]
pub ai_canonname: *mut c_char,
#[cfg(target_os = "android")]
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_un {
pub sun_family: sa_family_t,
pub sun_path: [c_char, ..108]
}
#[repr(C)]
#[deriving(Copy)] pub struct ifaddrs {
pub ifa_next: *mut ifaddrs,
pub ifa_name: *mut c_char,
pub ifa_flags: c_uint,
pub ifa_addr: *mut sockaddr,
pub ifa_netmask: *mut sockaddr,
pub ifa_ifu: *mut sockaddr, // FIXME This should be a union
pub ifa_data: *mut c_void
}
}
}
#[cfg(any(target_arch = "x86",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel"))]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i32;
pub type c_ulong = u32;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u32;
pub type ptrdiff_t = i32;
pub type clock_t = i32;
pub type time_t = i32;
pub type suseconds_t = i32;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i32;
pub type uintptr_t = u32;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
#[cfg(any(target_arch = "x86",
target_arch = "mips",
target_arch = "mipsel"))]
pub mod posix88 {
pub type off_t = i32;
pub type dev_t = u64;
pub type ino_t = u32;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u32;
pub type ssize_t = i32;
}
#[cfg(target_arch = "arm")]
pub mod posix88 {
pub type off_t = i32;
pub type dev_t = u32;
pub type ino_t = u32;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
pub type ssize_t = i32;
}
#[cfg(target_arch = "x86")]
pub mod posix01 {
use types::os::arch::c95::{c_short, c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t};
use types::os::arch::posix88::{mode_t, off_t};
use types::os::arch::posix88::{uid_t};
pub type nlink_t = u32;
pub type blksize_t = i32;
pub type blkcnt_t = i32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub __pad1: c_short,
pub st_ino: ino_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub __pad2: c_short,
pub st_size: off_t,
pub st_blksize: blksize_t,
pub st_blocks: blkcnt_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub __unused4: c_long,
pub __unused5: c_long,
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __size: [u32, ..9]
}
}
#[cfg(target_arch = "arm")]
pub mod posix01 {
use types::os::arch::c95::{c_uchar, c_uint, c_ulong, time_t};
use types::os::arch::c99::{c_longlong, c_ulonglong};
use types::os::arch::posix88::{uid_t, gid_t, ino_t};
pub type nlink_t = u16;
pub type blksize_t = u32;
pub type blkcnt_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: c_ulonglong,
pub __pad0: [c_uchar, ..4],
pub __st_ino: ino_t,
pub st_mode: c_uint,
pub st_nlink: c_uint,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: c_ulonglong,
pub __pad3: [c_uchar, ..4],
pub st_size: c_longlong,
pub st_blksize: blksize_t,
pub st_blocks: c_ulonglong,
pub st_atime: time_t,
pub st_atime_nsec: c_ulong,
pub st_mtime: time_t,
pub st_mtime_nsec: c_ulong,
pub st_ctime: time_t,
pub st_ctime_nsec: c_ulong,
pub st_ino: c_ulonglong,
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __size: [u32, ..9]
}
}
#[cfg(any(target_arch = "mips", target_arch = "mipsel"))]
pub mod posix01 {
use types::os::arch::c95::{c_long, c_ulong, time_t};
use types::os::arch::posix88::{gid_t, ino_t};
use types::os::arch::posix88::{mode_t, off_t};
use types::os::arch::posix88::{uid_t};
pub type nlink_t = u32;
pub type blksize_t = i32;
pub type blkcnt_t = i32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: c_ulong,
pub st_pad1: [c_long, ..3],
pub st_ino: ino_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: c_ulong,
pub st_pad2: [c_long, ..2],
pub st_size: off_t,
pub st_pad3: c_long,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_blksize: blksize_t,
pub st_blocks: blkcnt_t,
pub st_pad5: [c_long, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __size: [u32, ..9]
}
}
pub mod posix08 {}
pub mod bsd44 {}
pub mod extra {
use types::os::arch::c95::{c_ushort, c_int, c_uchar};
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_ll {
pub sll_family: c_ushort,
pub sll_protocol: c_ushort,
pub sll_ifindex: c_int,
pub sll_hatype: c_ushort,
pub sll_pkttype: c_uchar,
pub sll_halen: c_uchar,
pub sll_addr: [c_uchar, ..8]
}
}
}
#[cfg(target_arch = "x86_64")]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i64;
pub type c_ulong = u64;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u64;
pub type ptrdiff_t = i64;
pub type clock_t = i64;
pub type time_t = i64;
pub type suseconds_t = i64;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i64;
pub type uintptr_t = u64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i64;
pub type dev_t = u64;
pub type ino_t = u64;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u32;
pub type ssize_t = i64;
}
pub mod posix01 {
use types::os::arch::c95::{c_int, c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t};
use types::os::arch::posix88::{mode_t, off_t};
use types::os::arch::posix88::{uid_t};
pub type nlink_t = u64;
pub type blksize_t = i64;
pub type blkcnt_t = i64;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub st_ino: ino_t,
pub st_nlink: nlink_t,
pub st_mode: mode_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub __pad0: c_int,
pub st_rdev: dev_t,
pub st_size: off_t,
pub st_blksize: blksize_t,
pub st_blocks: blkcnt_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub __unused: [c_long, ..3],
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __size: [u64, ..7]
}
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
use types::os::arch::c95::{c_ushort, c_int, c_uchar};
#[deriving(Copy)] pub struct sockaddr_ll {
pub sll_family: c_ushort,
pub sll_protocol: c_ushort,
pub sll_ifindex: c_int,
pub sll_hatype: c_ushort,
pub sll_pkttype: c_uchar,
pub sll_halen: c_uchar,
pub sll_addr: [c_uchar, ..8]
}
}
}
}
#[cfg(target_os = "freebsd")]
pub mod os {
pub mod common {
pub mod posix01 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_int, size_t,
time_t, suseconds_t, c_long};
use types::os::arch::c99::{uintptr_t};
pub type pthread_t = uintptr_t;
#[repr(C)]
#[deriving(Copy)] pub struct glob_t {
pub gl_pathc: size_t,
pub __unused1: size_t,
pub gl_offs: size_t,
pub __unused2: c_int,
pub gl_pathv: *mut *mut c_char,
pub __unused3: *mut c_void,
pub __unused4: *mut c_void,
pub __unused5: *mut c_void,
pub __unused6: *mut c_void,
pub __unused7: *mut c_void,
pub __unused8: *mut c_void,
}
#[repr(C)]
#[deriving(Copy)] pub struct timeval {
pub tv_sec: time_t,
pub tv_usec: suseconds_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct timespec {
pub tv_sec: time_t,
pub tv_nsec: c_long,
}
#[deriving(Copy)] pub enum timezone {}
pub type sighandler_t = size_t;
}
pub mod bsd44 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_int, c_uint};
pub type socklen_t = u32;
pub type sa_family_t = u8;
pub type in_port_t = u16;
pub type in_addr_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr {
pub sa_len: u8,
pub sa_family: sa_family_t,
pub sa_data: [u8, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_storage {
pub ss_len: u8,
pub ss_family: sa_family_t,
pub __ss_pad1: [u8, ..6],
pub __ss_align: i64,
pub __ss_pad2: [u8, ..112],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in {
pub sin_len: u8,
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct in_addr {
pub s_addr: in_addr_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in6 {
pub sin6_len: u8,
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
#[repr(C)]
#[deriving(Copy)] pub struct in6_addr {
pub s6_addr: [u16, ..8]
}
#[repr(C)]
#[deriving(Copy)] pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
#[repr(C)]
#[deriving(Copy)] pub struct ip6_mreq {
pub ipv6mr_multiaddr: in6_addr,
pub ipv6mr_interface: c_uint,
}
#[repr(C)]
#[deriving(Copy)] pub struct addrinfo {
pub ai_flags: c_int,
pub ai_family: c_int,
pub ai_socktype: c_int,
pub ai_protocol: c_int,
pub ai_addrlen: socklen_t,
pub ai_canonname: *mut c_char,
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_un {
pub sun_len: u8,
pub sun_family: sa_family_t,
pub sun_path: [c_char, ..104]
}
#[repr(C)]
#[deriving(Copy)] pub struct ifaddrs {
pub ifa_next: *mut ifaddrs,
pub ifa_name: *mut c_char,
pub ifa_flags: c_uint,
pub ifa_addr: *mut sockaddr,
pub ifa_netmask: *mut sockaddr,
pub ifa_dstaddr: *mut sockaddr,
pub ifa_data: *mut c_void
}
}
}
#[cfg(target_arch = "x86_64")]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i64;
pub type c_ulong = u64;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u64;
pub type ptrdiff_t = i64;
pub type clock_t = i32;
pub type time_t = i64;
pub type suseconds_t = i64;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i64;
pub type uintptr_t = u64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i64;
pub type dev_t = u32;
pub type ino_t = u32;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
pub type ssize_t = i64;
}
pub mod posix01 {
use types::common::c95::{c_void};
use types::common::c99::{uint8_t, uint32_t, int32_t};
use types::os::arch::c95::{c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t};
use types::os::arch::posix88::{mode_t, off_t};
use types::os::arch::posix88::{uid_t};
pub type nlink_t = u16;
pub type blksize_t = i64;
pub type blkcnt_t = i64;
pub type fflags_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub st_ino: ino_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_size: off_t,
pub st_blocks: blkcnt_t,
pub st_blksize: blksize_t,
pub st_flags: fflags_t,
pub st_gen: uint32_t,
pub st_lspare: int32_t,
pub st_birthtime: time_t,
pub st_birthtime_nsec: c_long,
pub __unused: [uint8_t, ..2],
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
pub type pthread_attr_t = *mut c_void;
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
}
}
}
#[cfg(target_os = "dragonfly")]
pub mod os {
pub mod common {
pub mod posix01 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_int, size_t,
time_t, suseconds_t, c_long};
use types::os::arch::c99::{uintptr_t};
pub type pthread_t = uintptr_t;
#[repr(C)]
#[deriving(Copy)] pub struct glob_t {
pub gl_pathc: size_t,
pub __unused1: size_t,
pub gl_offs: size_t,
pub __unused2: c_int,
pub gl_pathv: *mut *mut c_char,
pub __unused3: *mut c_void,
pub __unused4: *mut c_void,
pub __unused5: *mut c_void,
pub __unused6: *mut c_void,
pub __unused7: *mut c_void,
pub __unused8: *mut c_void,
}
#[repr(C)]
#[deriving(Copy)] pub struct timeval {
pub tv_sec: time_t,
pub tv_usec: suseconds_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct timespec {
pub tv_sec: time_t,
pub tv_nsec: c_long,
}
#[deriving(Copy)] pub enum timezone {}
pub type sighandler_t = size_t;
}
pub mod bsd44 {
use types::os::arch::c95::{c_char, c_int, c_uint};
pub type socklen_t = u32;
pub type sa_family_t = u8;
pub type in_port_t = u16;
pub type in_addr_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr {
pub sa_len: u8,
pub sa_family: sa_family_t,
pub sa_data: [u8, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_storage {
pub ss_len: u8,
pub ss_family: sa_family_t,
pub __ss_pad1: [u8, ..6],
pub __ss_align: i64,
pub __ss_pad2: [u8, ..112],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in {
pub sin_len: u8,
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct in_addr {
pub s_addr: in_addr_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in6 {
pub sin6_len: u8,
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
#[repr(C)]
#[deriving(Copy)] pub struct in6_addr {
pub s6_addr: [u16, ..8]
}
#[repr(C)]
#[deriving(Copy)] pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
#[repr(C)]
#[deriving(Copy)] pub struct ip6_mreq {
pub ipv6mr_multiaddr: in6_addr,
pub ipv6mr_interface: c_uint,
}
#[repr(C)]
#[deriving(Copy)] pub struct addrinfo {
pub ai_flags: c_int,
pub ai_family: c_int,
pub ai_socktype: c_int,
pub ai_protocol: c_int,
pub ai_addrlen: socklen_t,
pub ai_canonname: *mut c_char,
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_un {
pub sun_len: u8,
pub sun_family: sa_family_t,
pub sun_path: [c_char, ..104]
}
}
}
#[cfg(target_arch = "x86_64")]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i64;
pub type c_ulong = u64;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u64;
pub type ptrdiff_t = i64;
pub type clock_t = i32;
pub type time_t = i64;
pub type suseconds_t = i64;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i64;
pub type uintptr_t = u64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i64;
pub type dev_t = u32;
pub type ino_t = u32;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
pub type ssize_t = i64;
}
pub mod posix01 {
use types::common::c95::{c_void};
use types::common::c99::{uint16_t, uint32_t, int32_t, uint64_t, int64_t};
use types::os::arch::c95::{c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t};
use types::os::arch::posix88::{mode_t, off_t};
use types::os::arch::posix88::{uid_t};
pub type nlink_t = u16;
pub type blksize_t = uint32_t;
pub type ino_t = uint64_t;
pub type blkcnt_t = i64;
pub type fflags_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_ino: ino_t,
pub st_nlink: nlink_t,
pub st_dev: dev_t,
pub st_mode: mode_t,
pub st_padding1: uint16_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_size: off_t,
pub st_blocks: blkcnt_t,
pub st_blksize: blksize_t,
pub st_flags: fflags_t,
pub st_gen: uint32_t,
pub st_lspare: int32_t,
pub st_qspare1: int64_t,
pub st_qspare2: int64_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
pub type pthread_attr_t = *mut c_void;
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
}
}
}
#[cfg(target_os = "windows")]
pub mod os {
pub mod common {
pub mod posix01 {
use types::os::arch::c95::{c_short, time_t, c_long};
use types::os::arch::extra::{int64, time64_t};
use types::os::arch::posix88::{dev_t, ino_t};
// pub Note: this is the struct called stat64 in Windows. Not stat,
// nor stati64.
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub st_ino: ino_t,
pub st_mode: u16,
pub st_nlink: c_short,
pub st_uid: c_short,
pub st_gid: c_short,
pub st_rdev: dev_t,
pub st_size: int64,
pub st_atime: time64_t,
pub st_mtime: time64_t,
pub st_ctime: time64_t,
}
// note that this is called utimbuf64 in Windows
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time64_t,
pub modtime: time64_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct timeval {
pub tv_sec: c_long,
pub tv_usec: c_long,
}
#[repr(C)]
#[deriving(Copy)] pub struct timespec {
pub tv_sec: time_t,
pub tv_nsec: c_long,
}
#[deriving(Copy)] pub enum timezone {}
}
pub mod bsd44 {
use types::os::arch::c95::{c_char, c_int, c_uint, size_t};
use types::os::arch::c99::uintptr_t;
pub type SOCKET = uintptr_t;
pub type socklen_t = c_int;
pub type sa_family_t = u16;
pub type in_port_t = u16;
pub type in_addr_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [u8, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_storage {
pub ss_family: sa_family_t,
pub __ss_pad1: [u8, ..6],
pub __ss_align: i64,
pub __ss_pad2: [u8, ..112],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in {
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct in_addr {
pub s_addr: in_addr_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in6 {
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
#[repr(C)]
#[deriving(Copy)] pub struct in6_addr {
pub s6_addr: [u16, ..8]
}
#[repr(C)]
#[deriving(Copy)] pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
#[repr(C)]
#[deriving(Copy)] pub struct ip6_mreq {
pub ipv6mr_multiaddr: in6_addr,
pub ipv6mr_interface: c_uint,
}
#[repr(C)]
#[deriving(Copy)] pub struct addrinfo {
pub ai_flags: c_int,
pub ai_family: c_int,
pub ai_socktype: c_int,
pub ai_protocol: c_int,
pub ai_addrlen: size_t,
pub ai_canonname: *mut c_char,
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_un {
pub sun_family: sa_family_t,
pub sun_path: [c_char, ..108]
}
}
}
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i32;
pub type c_ulong = u32;
pub type c_float = f32;
pub type c_double = f64;
#[cfg(target_arch = "x86")]
pub type size_t = u32;
#[cfg(target_arch = "x86_64")]
pub type size_t = u64;
#[cfg(target_arch = "x86")]
pub type ptrdiff_t = i32;
#[cfg(target_arch = "x86_64")]
pub type ptrdiff_t = i64;
pub type clock_t = i32;
#[cfg(target_arch = "x86")]
pub type time_t = i32;
#[cfg(target_arch = "x86_64")]
pub type time_t = i64;
#[cfg(target_arch = "x86")]
pub type suseconds_t = i32;
#[cfg(target_arch = "x86_64")]
pub type suseconds_t = i64;
pub type wchar_t = u16;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
#[cfg(target_arch = "x86")]
pub type intptr_t = i32;
#[cfg(target_arch = "x86_64")]
pub type intptr_t = i64;
#[cfg(target_arch = "x86")]
pub type uintptr_t = u32;
#[cfg(target_arch = "x86_64")]
pub type uintptr_t = u64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i32;
pub type dev_t = u32;
pub type ino_t = u16;
pub type pid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
#[cfg(target_arch = "x86")]
pub type ssize_t = i32;
#[cfg(target_arch = "x86_64")]
pub type ssize_t = i64;
}
pub mod posix01 {
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
use consts::os::extra::{MAX_PROTOCOL_CHAIN,
WSAPROTOCOL_LEN};
use types::common::c95::c_void;
use types::os::arch::c95::{c_char, c_int, c_uint, size_t};
use types::os::arch::c95::{c_long, c_ulong};
use types::os::arch::c95::{wchar_t};
use types::os::arch::c99::{c_ulonglong, c_longlong, uintptr_t};
pub type BOOL = c_int;
pub type BYTE = u8;
pub type BOOLEAN = BYTE;
pub type CCHAR = c_char;
pub type CHAR = c_char;
pub type DWORD = c_ulong;
pub type DWORDLONG = c_ulonglong;
pub type HANDLE = LPVOID;
pub type HMODULE = c_uint;
pub type LONG = c_long;
pub type PLONG = *mut c_long;
#[cfg(target_arch = "x86")]
pub type LONG_PTR = c_long;
#[cfg(target_arch = "x86_64")]
pub type LONG_PTR = i64;
pub type LARGE_INTEGER = c_longlong;
pub type PLARGE_INTEGER = *mut c_longlong;
pub type LPCWSTR = *const WCHAR;
pub type LPCSTR = *const CHAR;
pub type LPWSTR = *mut WCHAR;
pub type LPSTR = *mut CHAR;
pub type LPWCH = *mut WCHAR;
pub type LPCH = *mut CHAR;
#[repr(C)]
#[deriving(Copy)] pub struct SECURITY_ATTRIBUTES {
pub nLength: DWORD,
pub lpSecurityDescriptor: LPVOID,
pub bInheritHandle: BOOL,
}
pub type LPSECURITY_ATTRIBUTES = *mut SECURITY_ATTRIBUTES;
pub type LPVOID = *mut c_void;
pub type LPCVOID = *const c_void;
pub type LPBYTE = *mut BYTE;
pub type LPWORD = *mut WORD;
pub type LPDWORD = *mut DWORD;
pub type LPHANDLE = *mut HANDLE;
pub type LRESULT = LONG_PTR;
pub type PBOOL = *mut BOOL;
pub type WCHAR = wchar_t;
pub type WORD = u16;
pub type SIZE_T = size_t;
pub type time64_t = i64;
pub type int64 = i64;
#[repr(C)]
#[deriving(Copy)] pub struct STARTUPINFO {
pub cb: DWORD,
pub lpReserved: LPWSTR,
pub lpDesktop: LPWSTR,
pub lpTitle: LPWSTR,
pub dwX: DWORD,
pub dwY: DWORD,
pub dwXSize: DWORD,
pub dwYSize: DWORD,
pub dwXCountChars: DWORD,
pub dwYCountCharts: DWORD,
pub dwFillAttribute: DWORD,
pub dwFlags: DWORD,
pub wShowWindow: WORD,
pub cbReserved2: WORD,
pub lpReserved2: LPBYTE,
pub hStdInput: HANDLE,
pub hStdOutput: HANDLE,
pub hStdError: HANDLE,
}
pub type LPSTARTUPINFO = *mut STARTUPINFO;
#[repr(C)]
#[deriving(Copy)] pub struct PROCESS_INFORMATION {
pub hProcess: HANDLE,
pub hThread: HANDLE,
pub dwProcessId: DWORD,
pub dwThreadId: DWORD,
}
pub type LPPROCESS_INFORMATION = *mut PROCESS_INFORMATION;
#[repr(C)]
#[deriving(Copy)] pub struct SYSTEM_INFO {
pub wProcessorArchitecture: WORD,
pub wReserved: WORD,
pub dwPageSize: DWORD,
pub lpMinimumApplicationAddress: LPVOID,
pub lpMaximumApplicationAddress: LPVOID,
pub dwActiveProcessorMask: uintptr_t,
pub dwNumberOfProcessors: DWORD,
pub dwProcessorType: DWORD,
pub dwAllocationGranularity: DWORD,
pub wProcessorLevel: WORD,
pub wProcessorRevision: WORD,
}
pub type LPSYSTEM_INFO = *mut SYSTEM_INFO;
#[repr(C)]
#[deriving(Copy)] pub struct MEMORY_BASIC_INFORMATION {
pub BaseAddress: LPVOID,
pub AllocationBase: LPVOID,
pub AllocationProtect: DWORD,
pub RegionSize: SIZE_T,
pub State: DWORD,
pub Protect: DWORD,
pub Type: DWORD,
}
pub type LPMEMORY_BASIC_INFORMATION = *mut MEMORY_BASIC_INFORMATION;
#[repr(C)]
#[deriving(Copy)] pub struct OVERLAPPED {
pub Internal: *mut c_ulong,
pub InternalHigh: *mut c_ulong,
pub Offset: DWORD,
pub OffsetHigh: DWORD,
pub hEvent: HANDLE,
}
pub type LPOVERLAPPED = *mut OVERLAPPED;
#[repr(C)]
#[deriving(Copy)] pub struct FILETIME {
pub dwLowDateTime: DWORD,
pub dwHighDateTime: DWORD,
}
pub type LPFILETIME = *mut FILETIME;
#[repr(C)]
#[deriving(Copy)] pub struct GUID {
pub Data1: DWORD,
pub Data2: WORD,
pub Data3: WORD,
pub Data4: [BYTE, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct WSAPROTOCOLCHAIN {
pub ChainLen: c_int,
pub ChainEntries: [DWORD, ..MAX_PROTOCOL_CHAIN as uint],
}
pub type LPWSAPROTOCOLCHAIN = *mut WSAPROTOCOLCHAIN;
#[repr(C)]
#[deriving(Copy)] pub struct WSAPROTOCOL_INFO {
pub dwServiceFlags1: DWORD,
pub dwServiceFlags2: DWORD,
pub dwServiceFlags3: DWORD,
pub dwServiceFlags4: DWORD,
pub dwProviderFlags: DWORD,
pub ProviderId: GUID,
pub dwCatalogEntryId: DWORD,
pub ProtocolChain: WSAPROTOCOLCHAIN,
pub iVersion: c_int,
pub iAddressFamily: c_int,
pub iMaxSockAddr: c_int,
pub iMinSockAddr: c_int,
pub iSocketType: c_int,
pub iProtocol: c_int,
pub iProtocolMaxOffset: c_int,
pub iNetworkByteOrder: c_int,
pub iSecurityScheme: c_int,
pub dwMessageSize: DWORD,
pub dwProviderReserved: DWORD,
pub szProtocol: [u8, ..(WSAPROTOCOL_LEN as uint) + 1u],
}
pub type LPWSAPROTOCOL_INFO = *mut WSAPROTOCOL_INFO;
pub type GROUP = c_uint;
#[repr(C)]
#[deriving(Copy)] pub struct WIN32_FIND_DATAW {
pub dwFileAttributes: DWORD,
pub ftCreationTime: FILETIME,
pub ftLastAccessTime: FILETIME,
pub ftLastWriteTime: FILETIME,
pub nFileSizeHigh: DWORD,
pub nFileSizeLow: DWORD,
pub dwReserved0: DWORD,
pub dwReserved1: DWORD,
pub cFileName: [wchar_t, ..260], // #define MAX_PATH 260
pub cAlternateFileName: [wchar_t, ..14],
}
pub type LPWIN32_FIND_DATAW = *mut WIN32_FIND_DATAW;
}
}
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub mod os {
pub mod common {
pub mod posix01 {
use types::common::c95::c_void;
use types::os::arch::c95::{c_char, c_int, size_t, time_t};
use types::os::arch::c95::{suseconds_t, c_long};
use types::os::arch::c99::{uintptr_t};
pub type pthread_t = uintptr_t;
#[repr(C)]
#[deriving(Copy)] pub struct glob_t {
pub gl_pathc: size_t,
pub __unused1: c_int,
pub gl_offs: size_t,
pub __unused2: c_int,
pub gl_pathv: *mut *mut c_char,
pub __unused3: *mut c_void,
pub __unused4: *mut c_void,
pub __unused5: *mut c_void,
pub __unused6: *mut c_void,
pub __unused7: *mut c_void,
pub __unused8: *mut c_void,
}
#[repr(C)]
#[deriving(Copy)] pub struct timeval {
pub tv_sec: time_t,
pub tv_usec: suseconds_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct timespec {
pub tv_sec: time_t,
pub tv_nsec: c_long,
}
#[deriving(Copy)] pub enum timezone {}
pub type sighandler_t = size_t;
}
pub mod bsd44 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_int, c_uint};
pub type socklen_t = c_int;
pub type sa_family_t = u8;
pub type in_port_t = u16;
pub type in_addr_t = u32;
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr {
pub sa_len: u8,
pub sa_family: sa_family_t,
pub sa_data: [u8, ..14],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_storage {
pub ss_len: u8,
pub ss_family: sa_family_t,
pub __ss_pad1: [u8, ..6],
pub __ss_align: i64,
pub __ss_pad2: [u8, ..112],
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in {
pub sin_len: u8,
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8, ..8],
}
#[repr(C)]
#[deriving(Copy)] pub struct in_addr {
pub s_addr: in_addr_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_in6 {
pub sin6_len: u8,
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
#[repr(C)]
#[deriving(Copy)] pub struct in6_addr {
pub s6_addr: [u16, ..8]
}
#[repr(C)]
#[deriving(Copy)] pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
#[repr(C)]
#[deriving(Copy)] pub struct ip6_mreq {
pub ipv6mr_multiaddr: in6_addr,
pub ipv6mr_interface: c_uint,
}
#[repr(C)]
#[deriving(Copy)] pub struct addrinfo {
pub ai_flags: c_int,
pub ai_family: c_int,
pub ai_socktype: c_int,
pub ai_protocol: c_int,
pub ai_addrlen: socklen_t,
pub ai_canonname: *mut c_char,
pub ai_addr: *mut sockaddr,
pub ai_next: *mut addrinfo,
}
#[repr(C)]
#[deriving(Copy)] pub struct sockaddr_un {
pub sun_len: u8,
pub sun_family: sa_family_t,
pub sun_path: [c_char, ..104]
}
#[repr(C)]
#[deriving(Copy)] pub struct ifaddrs {
pub ifa_next: *mut ifaddrs,
pub ifa_name: *mut c_char,
pub ifa_flags: c_uint,
pub ifa_addr: *mut sockaddr,
pub ifa_netmask: *mut sockaddr,
pub ifa_dstaddr: *mut sockaddr,
pub ifa_data: *mut c_void
}
}
}
#[cfg(any(target_arch = "arm", target_arch = "x86"))]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i32;
pub type c_ulong = u32;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u32;
pub type ptrdiff_t = i32;
pub type clock_t = u32;
pub type time_t = i32;
pub type suseconds_t = i32;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i32;
pub type uintptr_t = u32;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i64;
pub type dev_t = i32;
pub type ino_t = u64;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
pub type ssize_t = i32;
}
pub mod posix01 {
use types::common::c99::{int32_t, int64_t, uint32_t};
use types::os::arch::c95::{c_char, c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t,
mode_t, off_t, uid_t};
pub type nlink_t = u16;
pub type blksize_t = i64;
pub type blkcnt_t = i32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_ino: ino_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_birthtime: time_t,
pub st_birthtime_nsec: c_long,
pub st_size: off_t,
pub st_blocks: blkcnt_t,
pub st_blksize: blksize_t,
pub st_flags: uint32_t,
pub st_gen: uint32_t,
pub st_lspare: int32_t,
pub st_qspare: [int64_t, ..2],
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __sig: c_long,
pub __opaque: [c_char, ..36]
}
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
#[repr(C)]
#[deriving(Copy)] pub struct mach_timebase_info {
pub numer: u32,
pub denom: u32,
}
pub type mach_timebase_info_data_t = mach_timebase_info;
}
}
#[cfg(target_arch = "x86_64")]
pub mod arch {
pub mod c95 {
pub type c_char = i8;
pub type c_schar = i8;
pub type c_uchar = u8;
pub type c_short = i16;
pub type c_ushort = u16;
pub type c_int = i32;
pub type c_uint = u32;
pub type c_long = i64;
pub type c_ulong = u64;
pub type c_float = f32;
pub type c_double = f64;
pub type size_t = u64;
pub type ptrdiff_t = i64;
pub type clock_t = u64;
pub type time_t = i64;
pub type suseconds_t = i32;
pub type wchar_t = i32;
}
pub mod c99 {
pub type c_longlong = i64;
pub type c_ulonglong = u64;
pub type intptr_t = i64;
pub type uintptr_t = u64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
}
pub mod posix88 {
pub type off_t = i64;
pub type dev_t = i32;
pub type ino_t = u64;
pub type pid_t = i32;
pub type uid_t = u32;
pub type gid_t = u32;
pub type useconds_t = u32;
pub type mode_t = u16;
pub type ssize_t = i64;
}
pub mod posix01 {
use types::common::c99::{int32_t, int64_t};
use types::common::c99::{uint32_t};
use types::os::arch::c95::{c_char, c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t};
use types::os::arch::posix88::{mode_t, off_t, uid_t};
pub type nlink_t = u16;
pub type blksize_t = i64;
pub type blkcnt_t = i32;
#[repr(C)]
#[deriving(Copy)] pub struct stat {
pub st_dev: dev_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_ino: ino_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_birthtime: time_t,
pub st_birthtime_nsec: c_long,
pub st_size: off_t,
pub st_blocks: blkcnt_t,
pub st_blksize: blksize_t,
pub st_flags: uint32_t,
pub st_gen: uint32_t,
pub st_lspare: int32_t,
pub st_qspare: [int64_t, ..2],
}
#[repr(C)]
#[deriving(Copy)] pub struct utimbuf {
pub actime: time_t,
pub modtime: time_t,
}
#[repr(C)]
#[deriving(Copy)] pub struct pthread_attr_t {
pub __sig: c_long,
pub __opaque: [c_char, ..56]
}
}
pub mod posix08 {
}
pub mod bsd44 {
}
pub mod extra {
#[repr(C)]
#[deriving(Copy)] pub struct mach_timebase_info {
pub numer: u32,
pub denom: u32,
}
pub type mach_timebase_info_data_t = mach_timebase_info;
}
}
}
}
pub mod consts {
// Consts tend to vary per OS so we pull their definitions out
// into this module.
#[cfg(target_os = "windows")]
pub mod os {
pub mod c95 {
use types::os::arch::c95::{c_int, c_uint};
pub const EXIT_FAILURE : c_int = 1;
pub const EXIT_SUCCESS : c_int = 0;
pub const RAND_MAX : c_int = 32767;
pub const EOF : c_int = -1;
pub const SEEK_SET : c_int = 0;
pub const SEEK_CUR : c_int = 1;
pub const SEEK_END : c_int = 2;
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 4;
pub const _IOLBF : c_int = 64;
pub const BUFSIZ : c_uint = 512_u32;
pub const FOPEN_MAX : c_uint = 20_u32;
pub const FILENAME_MAX : c_uint = 260_u32;
pub const L_tmpnam : c_uint = 16_u32;
pub const TMP_MAX : c_uint = 32767_u32;
pub const WSAEINTR: c_int = 10004;
pub const WSAEBADF: c_int = 10009;
pub const WSAEACCES: c_int = 10013;
pub const WSAEFAULT: c_int = 10014;
pub const WSAEINVAL: c_int = 10022;
pub const WSAEMFILE: c_int = 10024;
pub const WSAEWOULDBLOCK: c_int = 10035;
pub const WSAEINPROGRESS: c_int = 10036;
pub const WSAEALREADY: c_int = 10037;
pub const WSAENOTSOCK: c_int = 10038;
pub const WSAEDESTADDRREQ: c_int = 10039;
pub const WSAEMSGSIZE: c_int = 10040;
pub const WSAEPROTOTYPE: c_int = 10041;
pub const WSAENOPROTOOPT: c_int = 10042;
pub const WSAEPROTONOSUPPORT: c_int = 10043;
pub const WSAESOCKTNOSUPPORT: c_int = 10044;
pub const WSAEOPNOTSUPP: c_int = 10045;
pub const WSAEPFNOSUPPORT: c_int = 10046;
pub const WSAEAFNOSUPPORT: c_int = 10047;
pub const WSAEADDRINUSE: c_int = 10048;
pub const WSAEADDRNOTAVAIL: c_int = 10049;
pub const WSAENETDOWN: c_int = 10050;
pub const WSAENETUNREACH: c_int = 10051;
pub const WSAENETRESET: c_int = 10052;
pub const WSAECONNABORTED: c_int = 10053;
pub const WSAECONNRESET: c_int = 10054;
pub const WSAENOBUFS: c_int = 10055;
pub const WSAEISCONN: c_int = 10056;
pub const WSAENOTCONN: c_int = 10057;
pub const WSAESHUTDOWN: c_int = 10058;
pub const WSAETOOMANYREFS: c_int = 10059;
pub const WSAETIMEDOUT: c_int = 10060;
pub const WSAECONNREFUSED: c_int = 10061;
pub const WSAELOOP: c_int = 10062;
pub const WSAENAMETOOLONG: c_int = 10063;
pub const WSAEHOSTDOWN: c_int = 10064;
pub const WSAEHOSTUNREACH: c_int = 10065;
pub const WSAENOTEMPTY: c_int = 10066;
pub const WSAEPROCLIM: c_int = 10067;
pub const WSAEUSERS: c_int = 10068;
pub const WSAEDQUOT: c_int = 10069;
pub const WSAESTALE: c_int = 10070;
pub const WSAEREMOTE: c_int = 10071;
pub const WSASYSNOTREADY: c_int = 10091;
pub const WSAVERNOTSUPPORTED: c_int = 10092;
pub const WSANOTINITIALISED: c_int = 10093;
pub const WSAEDISCON: c_int = 10101;
pub const WSAENOMORE: c_int = 10102;
pub const WSAECANCELLED: c_int = 10103;
pub const WSAEINVALIDPROCTABLE: c_int = 10104;
pub const WSAEINVALIDPROVIDER: c_int = 10105;
pub const WSAEPROVIDERFAILEDINIT: c_int = 10106;
}
pub mod c99 {
}
pub mod posix88 {
use types::os::arch::c95::c_int;
pub const O_RDONLY : c_int = 0;
pub const O_WRONLY : c_int = 1;
pub const O_RDWR : c_int = 2;
pub const O_APPEND : c_int = 8;
pub const O_CREAT : c_int = 256;
pub const O_EXCL : c_int = 1024;
pub const O_TRUNC : c_int = 512;
pub const S_IFIFO : c_int = 4096;
pub const S_IFCHR : c_int = 8192;
pub const S_IFBLK : c_int = 12288;
pub const S_IFDIR : c_int = 16384;
pub const S_IFREG : c_int = 32768;
pub const S_IFLNK : c_int = 40960;
pub const S_IFMT : c_int = 61440;
pub const S_IEXEC : c_int = 64;
pub const S_IWRITE : c_int = 128;
pub const S_IREAD : c_int = 256;
pub const S_IRWXU : c_int = 448;
pub const S_IXUSR : c_int = 64;
pub const S_IWUSR : c_int = 128;
pub const S_IRUSR : c_int = 256;
pub const F_OK : c_int = 0;
pub const R_OK : c_int = 4;
pub const W_OK : c_int = 2;
pub const X_OK : c_int = 1;
pub const STDIN_FILENO : c_int = 0;
pub const STDOUT_FILENO : c_int = 1;
pub const STDERR_FILENO : c_int = 2;
}
pub mod posix01 {
}
pub mod posix08 {
}
pub mod bsd44 {
use types::os::arch::c95::c_int;
pub const AF_INET: c_int = 2;
pub const AF_INET6: c_int = 23;
pub const SOCK_STREAM: c_int = 1;
pub const SOCK_DGRAM: c_int = 2;
pub const SOCK_RAW: c_int = 3;
pub const IPPROTO_TCP: c_int = 6;
pub const IPPROTO_IP: c_int = 0;
pub const IPPROTO_IPV6: c_int = 41;
pub const IP_MULTICAST_TTL: c_int = 3;
pub const IP_MULTICAST_LOOP: c_int = 4;
pub const IP_ADD_MEMBERSHIP: c_int = 5;
pub const IP_DROP_MEMBERSHIP: c_int = 6;
pub const IPV6_ADD_MEMBERSHIP: c_int = 5;
pub const IPV6_DROP_MEMBERSHIP: c_int = 6;
pub const IP_TTL: c_int = 4;
pub const IP_HDRINCL: c_int = 2;
pub const TCP_NODELAY: c_int = 0x0001;
pub const SOL_SOCKET: c_int = 0xffff;
pub const SO_KEEPALIVE: c_int = 8;
pub const SO_BROADCAST: c_int = 32;
pub const SO_REUSEADDR: c_int = 4;
pub const SO_ERROR: c_int = 0x1007;
pub const IFF_LOOPBACK: c_int = 4;
pub const SHUT_RD: c_int = 0;
pub const SHUT_WR: c_int = 1;
pub const SHUT_RDWR: c_int = 2;
}
pub mod extra {
use types::os::arch::c95::{c_int, c_long};
use types::os::arch::extra::{WORD, DWORD, BOOL, HANDLE};
pub const TRUE : BOOL = 1;
pub const FALSE : BOOL = 0;
pub const O_TEXT : c_int = 16384;
pub const O_BINARY : c_int = 32768;
pub const O_NOINHERIT: c_int = 128;
pub const ERROR_SUCCESS : c_int = 0;
pub const ERROR_INVALID_FUNCTION: c_int = 1;
pub const ERROR_FILE_NOT_FOUND: c_int = 2;
pub const ERROR_ACCESS_DENIED: c_int = 5;
pub const ERROR_INVALID_HANDLE : c_int = 6;
pub const ERROR_BROKEN_PIPE: c_int = 109;
pub const ERROR_DISK_FULL : c_int = 112;
pub const ERROR_CALL_NOT_IMPLEMENTED : c_int = 120;
pub const ERROR_INSUFFICIENT_BUFFER : c_int = 122;
pub const ERROR_INVALID_NAME : c_int = 123;
pub const ERROR_ALREADY_EXISTS : c_int = 183;
pub const ERROR_PIPE_BUSY: c_int = 231;
pub const ERROR_NO_DATA: c_int = 232;
pub const ERROR_INVALID_ADDRESS : c_int = 487;
pub const ERROR_PIPE_CONNECTED: c_int = 535;
pub const ERROR_NOTHING_TO_TERMINATE: c_int = 758;
pub const ERROR_OPERATION_ABORTED: c_int = 995;
pub const ERROR_IO_PENDING: c_int = 997;
pub const ERROR_FILE_INVALID : c_int = 1006;
pub const ERROR_NOT_FOUND: c_int = 1168;
pub const INVALID_HANDLE_VALUE: HANDLE = -1 as HANDLE;
pub const DELETE : DWORD = 0x00010000;
pub const READ_CONTROL : DWORD = 0x00020000;
pub const SYNCHRONIZE : DWORD = 0x00100000;
pub const WRITE_DAC : DWORD = 0x00040000;
pub const WRITE_OWNER : DWORD = 0x00080000;
pub const PROCESS_CREATE_PROCESS : DWORD = 0x0080;
pub const PROCESS_CREATE_THREAD : DWORD = 0x0002;
pub const PROCESS_DUP_HANDLE : DWORD = 0x0040;
pub const PROCESS_QUERY_INFORMATION : DWORD = 0x0400;
pub const PROCESS_QUERY_LIMITED_INFORMATION : DWORD = 0x1000;
pub const PROCESS_SET_INFORMATION : DWORD = 0x0200;
pub const PROCESS_SET_QUOTA : DWORD = 0x0100;
pub const PROCESS_SUSPEND_RESUME : DWORD = 0x0800;
pub const PROCESS_TERMINATE : DWORD = 0x0001;
pub const PROCESS_VM_OPERATION : DWORD = 0x0008;
pub const PROCESS_VM_READ : DWORD = 0x0010;
pub const PROCESS_VM_WRITE : DWORD = 0x0020;
pub const STARTF_FORCEONFEEDBACK : DWORD = 0x00000040;
pub const STARTF_FORCEOFFFEEDBACK : DWORD = 0x00000080;
pub const STARTF_PREVENTPINNING : DWORD = 0x00002000;
pub const STARTF_RUNFULLSCREEN : DWORD = 0x00000020;
pub const STARTF_TITLEISAPPID : DWORD = 0x00001000;
pub const STARTF_TITLEISLINKNAME : DWORD = 0x00000800;
pub const STARTF_USECOUNTCHARS : DWORD = 0x00000008;
pub const STARTF_USEFILLATTRIBUTE : DWORD = 0x00000010;
pub const STARTF_USEHOTKEY : DWORD = 0x00000200;
pub const STARTF_USEPOSITION : DWORD = 0x00000004;
pub const STARTF_USESHOWWINDOW : DWORD = 0x00000001;
pub const STARTF_USESIZE : DWORD = 0x00000002;
pub const STARTF_USESTDHANDLES : DWORD = 0x00000100;
pub const WAIT_ABANDONED : DWORD = 0x00000080;
pub const WAIT_OBJECT_0 : DWORD = 0x00000000;
pub const WAIT_TIMEOUT : DWORD = 0x00000102;
pub const WAIT_FAILED : DWORD = -1;
pub const DUPLICATE_CLOSE_SOURCE : DWORD = 0x00000001;
pub const DUPLICATE_SAME_ACCESS : DWORD = 0x00000002;
pub const INFINITE : DWORD = -1;
pub const STILL_ACTIVE : DWORD = 259;
pub const MEM_COMMIT : DWORD = 0x00001000;
pub const MEM_RESERVE : DWORD = 0x00002000;
pub const MEM_DECOMMIT : DWORD = 0x00004000;
pub const MEM_RELEASE : DWORD = 0x00008000;
pub const MEM_RESET : DWORD = 0x00080000;
pub const MEM_RESET_UNDO : DWORD = 0x1000000;
pub const MEM_LARGE_PAGES : DWORD = 0x20000000;
pub const MEM_PHYSICAL : DWORD = 0x00400000;
pub const MEM_TOP_DOWN : DWORD = 0x00100000;
pub const MEM_WRITE_WATCH : DWORD = 0x00200000;
pub const PAGE_EXECUTE : DWORD = 0x10;
pub const PAGE_EXECUTE_READ : DWORD = 0x20;
pub const PAGE_EXECUTE_READWRITE : DWORD = 0x40;
pub const PAGE_EXECUTE_WRITECOPY : DWORD = 0x80;
pub const PAGE_NOACCESS : DWORD = 0x01;
pub const PAGE_READONLY : DWORD = 0x02;
pub const PAGE_READWRITE : DWORD = 0x04;
pub const PAGE_WRITECOPY : DWORD = 0x08;
pub const PAGE_GUARD : DWORD = 0x100;
pub const PAGE_NOCACHE : DWORD = 0x200;
pub const PAGE_WRITECOMBINE : DWORD = 0x400;
pub const SEC_COMMIT : DWORD = 0x8000000;
pub const SEC_IMAGE : DWORD = 0x1000000;
pub const SEC_IMAGE_NO_EXECUTE : DWORD = 0x11000000;
pub const SEC_LARGE_PAGES : DWORD = 0x80000000;
pub const SEC_NOCACHE : DWORD = 0x10000000;
pub const SEC_RESERVE : DWORD = 0x4000000;
pub const SEC_WRITECOMBINE : DWORD = 0x40000000;
pub const FILE_MAP_ALL_ACCESS : DWORD = 0xf001f;
pub const FILE_MAP_READ : DWORD = 0x4;
pub const FILE_MAP_WRITE : DWORD = 0x2;
pub const FILE_MAP_COPY : DWORD = 0x1;
pub const FILE_MAP_EXECUTE : DWORD = 0x20;
pub const PROCESSOR_ARCHITECTURE_INTEL : WORD = 0;
pub const PROCESSOR_ARCHITECTURE_ARM : WORD = 5;
pub const PROCESSOR_ARCHITECTURE_IA64 : WORD = 6;
pub const PROCESSOR_ARCHITECTURE_AMD64 : WORD = 9;
pub const PROCESSOR_ARCHITECTURE_UNKNOWN : WORD = 0xffff;
pub const MOVEFILE_COPY_ALLOWED: DWORD = 2;
pub const MOVEFILE_CREATE_HARDLINK: DWORD = 16;
pub const MOVEFILE_DELAY_UNTIL_REBOOT: DWORD = 4;
pub const MOVEFILE_FAIL_IF_NOT_TRACKABLE: DWORD = 32;
pub const MOVEFILE_REPLACE_EXISTING: DWORD = 1;
pub const MOVEFILE_WRITE_THROUGH: DWORD = 8;
pub const SYMBOLIC_LINK_FLAG_DIRECTORY: DWORD = 1;
pub const FILE_SHARE_DELETE: DWORD = 0x4;
pub const FILE_SHARE_READ: DWORD = 0x1;
pub const FILE_SHARE_WRITE: DWORD = 0x2;
pub const CREATE_ALWAYS: DWORD = 2;
pub const CREATE_NEW: DWORD = 1;
pub const OPEN_ALWAYS: DWORD = 4;
pub const OPEN_EXISTING: DWORD = 3;
pub const TRUNCATE_EXISTING: DWORD = 5;
pub const FILE_APPEND_DATA: DWORD = 0x00000004;
pub const FILE_READ_DATA: DWORD = 0x00000001;
pub const FILE_WRITE_DATA: DWORD = 0x00000002;
pub const FILE_ATTRIBUTE_ARCHIVE: DWORD = 0x20;
pub const FILE_ATTRIBUTE_COMPRESSED: DWORD = 0x800;
pub const FILE_ATTRIBUTE_DEVICE: DWORD = 0x40;
pub const FILE_ATTRIBUTE_DIRECTORY: DWORD = 0x10;
pub const FILE_ATTRIBUTE_ENCRYPTED: DWORD = 0x4000;
pub const FILE_ATTRIBUTE_HIDDEN: DWORD = 0x2;
pub const FILE_ATTRIBUTE_INTEGRITY_STREAM: DWORD = 0x8000;
pub const FILE_ATTRIBUTE_NORMAL: DWORD = 0x80;
pub const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: DWORD = 0x2000;
pub const FILE_ATTRIBUTE_NO_SCRUB_DATA: DWORD = 0x20000;
pub const FILE_ATTRIBUTE_OFFLINE: DWORD = 0x1000;
pub const FILE_ATTRIBUTE_READONLY: DWORD = 0x1;
pub const FILE_ATTRIBUTE_REPARSE_POINT: DWORD = 0x400;
pub const FILE_ATTRIBUTE_SPARSE_FILE: DWORD = 0x200;
pub const FILE_ATTRIBUTE_SYSTEM: DWORD = 0x4;
pub const FILE_ATTRIBUTE_TEMPORARY: DWORD = 0x100;
pub const FILE_ATTRIBUTE_VIRTUAL: DWORD = 0x10000;
pub const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000;
pub const FILE_FLAG_DELETE_ON_CLOSE: DWORD = 0x04000000;
pub const FILE_FLAG_NO_BUFFERING: DWORD = 0x20000000;
pub const FILE_FLAG_OPEN_NO_RECALL: DWORD = 0x00100000;
pub const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000;
pub const FILE_FLAG_OVERLAPPED: DWORD = 0x40000000;
pub const FILE_FLAG_POSIX_SEMANTICS: DWORD = 0x0100000;
pub const FILE_FLAG_RANDOM_ACCESS: DWORD = 0x10000000;
pub const FILE_FLAG_SESSION_AWARE: DWORD = 0x00800000;
pub const FILE_FLAG_SEQUENTIAL_SCAN: DWORD = 0x08000000;
pub const FILE_FLAG_WRITE_THROUGH: DWORD = 0x80000000;
pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000;
pub const FILE_NAME_NORMALIZED: DWORD = 0x0;
pub const FILE_NAME_OPENED: DWORD = 0x8;
pub const VOLUME_NAME_DOS: DWORD = 0x0;
pub const VOLUME_NAME_GUID: DWORD = 0x1;
pub const VOLUME_NAME_NONE: DWORD = 0x4;
pub const VOLUME_NAME_NT: DWORD = 0x2;
pub const GENERIC_READ: DWORD = 0x80000000;
pub const GENERIC_WRITE: DWORD = 0x40000000;
pub const GENERIC_EXECUTE: DWORD = 0x20000000;
pub const GENERIC_ALL: DWORD = 0x10000000;
pub const FILE_WRITE_ATTRIBUTES: DWORD = 0x00000100;
pub const FILE_READ_ATTRIBUTES: DWORD = 0x00000080;
pub const STANDARD_RIGHTS_READ: DWORD = 0x20000;
pub const STANDARD_RIGHTS_WRITE: DWORD = 0x20000;
pub const FILE_WRITE_EA: DWORD = 0x00000010;
pub const FILE_READ_EA: DWORD = 0x00000008;
pub const FILE_GENERIC_READ: DWORD =
STANDARD_RIGHTS_READ | FILE_READ_DATA |
FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE;
pub const FILE_GENERIC_WRITE: DWORD =
STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA |
FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA |
SYNCHRONIZE;
pub const FILE_BEGIN: DWORD = 0;
pub const FILE_CURRENT: DWORD = 1;
pub const FILE_END: DWORD = 2;
pub const MAX_PROTOCOL_CHAIN: DWORD = 7;
pub const WSAPROTOCOL_LEN: DWORD = 255;
pub const INVALID_SOCKET: DWORD = !0;
pub const DETACHED_PROCESS: DWORD = 0x00000008;
pub const CREATE_NEW_PROCESS_GROUP: DWORD = 0x00000200;
pub const CREATE_UNICODE_ENVIRONMENT: DWORD = 0x00000400;
pub const PIPE_ACCESS_DUPLEX: DWORD = 0x00000003;
pub const PIPE_ACCESS_INBOUND: DWORD = 0x00000001;
pub const PIPE_ACCESS_OUTBOUND: DWORD = 0x00000002;
pub const PIPE_TYPE_BYTE: DWORD = 0x00000000;
pub const PIPE_TYPE_MESSAGE: DWORD = 0x00000004;
pub const PIPE_READMODE_BYTE: DWORD = 0x00000000;
pub const PIPE_READMODE_MESSAGE: DWORD = 0x00000002;
pub const PIPE_WAIT: DWORD = 0x00000000;
pub const PIPE_NOWAIT: DWORD = 0x00000001;
pub const PIPE_ACCEPT_REMOTE_CLIENTS: DWORD = 0x00000000;
pub const PIPE_REJECT_REMOTE_CLIENTS: DWORD = 0x00000008;
pub const PIPE_UNLIMITED_INSTANCES: DWORD = 255;
pub const IPPROTO_RAW: c_int = 255;
pub const FIONBIO: c_long = -0x7FFB9982;
}
pub mod sysconf {
}
}
#[cfg(any(target_os = "linux", target_os = "android"))]
pub mod os {
pub mod c95 {
use types::os::arch::c95::{c_int, c_uint};
pub const EXIT_FAILURE : c_int = 1;
pub const EXIT_SUCCESS : c_int = 0;
pub const RAND_MAX : c_int = 2147483647;
pub const EOF : c_int = -1;
pub const SEEK_SET : c_int = 0;
pub const SEEK_CUR : c_int = 1;
pub const SEEK_END : c_int = 2;
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
pub const BUFSIZ : c_uint = 8192_u32;
pub const FOPEN_MAX : c_uint = 16_u32;
pub const FILENAME_MAX : c_uint = 4096_u32;
pub const L_tmpnam : c_uint = 20_u32;
pub const TMP_MAX : c_uint = 238328_u32;
}
pub mod c99 {
}
#[cfg(any(target_arch = "x86",
target_arch = "x86_64",
target_arch = "arm"))]
pub mod posix88 {
use types::os::arch::c95::c_int;
use types::common::c95::c_void;
use types::os::arch::posix88::mode_t;
pub const O_RDONLY : c_int = 0;
pub const O_WRONLY : c_int = 1;
pub const O_RDWR : c_int = 2;
pub const O_APPEND : c_int = 1024;
pub const O_CREAT : c_int = 64;
pub const O_EXCL : c_int = 128;
pub const O_TRUNC : c_int = 512;
pub const S_IFIFO : mode_t = 4096;
pub const S_IFCHR : mode_t = 8192;
pub const S_IFBLK : mode_t = 24576;
pub const S_IFDIR : mode_t = 16384;
pub const S_IFREG : mode_t = 32768;
pub const S_IFLNK : mode_t = 40960;
pub const S_IFMT : mode_t = 61440;
pub const S_IEXEC : mode_t = 64;
pub const S_IWRITE : mode_t = 128;
pub const S_IREAD : mode_t = 256;
pub const S_IRWXU : mode_t = 448;
pub const S_IXUSR : mode_t = 64;
pub const S_IWUSR : mode_t = 128;
pub const S_IRUSR : mode_t = 256;
pub const F_OK : c_int = 0;
pub const R_OK : c_int = 4;
pub const W_OK : c_int = 2;
pub const X_OK : c_int = 1;
pub const STDIN_FILENO : c_int = 0;
pub const STDOUT_FILENO : c_int = 1;
pub const STDERR_FILENO : c_int = 2;
pub const F_LOCK : c_int = 1;
pub const F_TEST : c_int = 3;
pub const F_TLOCK : c_int = 2;
pub const F_ULOCK : c_int = 0;
pub const SIGHUP : c_int = 1;
pub const SIGINT : c_int = 2;
pub const SIGQUIT : c_int = 3;
pub const SIGILL : c_int = 4;
pub const SIGABRT : c_int = 6;
pub const SIGFPE : c_int = 8;
pub const SIGKILL : c_int = 9;
pub const SIGSEGV : c_int = 11;
pub const SIGPIPE : c_int = 13;
pub const SIGALRM : c_int = 14;
pub const SIGTERM : c_int = 15;
pub const PROT_NONE : c_int = 0;
pub const PROT_READ : c_int = 1;
pub const PROT_WRITE : c_int = 2;
pub const PROT_EXEC : c_int = 4;
pub const MAP_FILE : c_int = 0x0000;
pub const MAP_SHARED : c_int = 0x0001;
pub const MAP_PRIVATE : c_int = 0x0002;
pub const MAP_FIXED : c_int = 0x0010;
pub const MAP_ANON : c_int = 0x0020;
pub const MAP_FAILED : *mut c_void = -1 as *mut c_void;
pub const MCL_CURRENT : c_int = 0x0001;
pub const MCL_FUTURE : c_int = 0x0002;
pub const MS_ASYNC : c_int = 0x0001;
pub const MS_INVALIDATE : c_int = 0x0002;
pub const MS_SYNC : c_int = 0x0004;
pub const EPERM : c_int = 1;
pub const ENOENT : c_int = 2;
pub const ESRCH : c_int = 3;
pub const EINTR : c_int = 4;
pub const EIO : c_int = 5;
pub const ENXIO : c_int = 6;
pub const E2BIG : c_int = 7;
pub const ENOEXEC : c_int = 8;
pub const EBADF : c_int = 9;
pub const ECHILD : c_int = 10;
pub const EAGAIN : c_int = 11;
pub const ENOMEM : c_int = 12;
pub const EACCES : c_int = 13;
pub const EFAULT : c_int = 14;
pub const ENOTBLK : c_int = 15;
pub const EBUSY : c_int = 16;
pub const EEXIST : c_int = 17;
pub const EXDEV : c_int = 18;
pub const ENODEV : c_int = 19;
pub const ENOTDIR : c_int = 20;
pub const EISDIR : c_int = 21;
pub const EINVAL : c_int = 22;
pub const ENFILE : c_int = 23;
pub const EMFILE : c_int = 24;
pub const ENOTTY : c_int = 25;
pub const ETXTBSY : c_int = 26;
pub const EFBIG : c_int = 27;
pub const ENOSPC : c_int = 28;
pub const ESPIPE : c_int = 29;
pub const EROFS : c_int = 30;
pub const EMLINK : c_int = 31;
pub const EPIPE : c_int = 32;
pub const EDOM : c_int = 33;
pub const ERANGE : c_int = 34;
pub const EDEADLK: c_int = 35;
pub const ENAMETOOLONG: c_int = 36;
pub const ENOLCK: c_int = 37;
pub const ENOSYS: c_int = 38;
pub const ENOTEMPTY: c_int = 39;
pub const ELOOP: c_int = 40;
pub const EWOULDBLOCK: c_int = EAGAIN;
pub const ENOMSG: c_int = 42;
pub const EIDRM: c_int = 43;
pub const ECHRNG: c_int = 44;
pub const EL2NSYNC: c_int = 45;
pub const EL3HLT: c_int = 46;
pub const EL3RST: c_int = 47;
pub const ELNRNG: c_int = 48;
pub const EUNATCH: c_int = 49;
pub const ENOCSI: c_int = 50;
pub const EL2HLT: c_int = 51;
pub const EBADE: c_int = 52;
pub const EBADR: c_int = 53;
pub const EXFULL: c_int = 54;
pub const ENOANO: c_int = 55;
pub const EBADRQC: c_int = 56;
pub const EBADSLT: c_int = 57;
pub const EDEADLOCK: c_int = EDEADLK;
pub const EBFONT: c_int = 59;
pub const ENOSTR: c_int = 60;
pub const ENODATA: c_int = 61;
pub const ETIME: c_int = 62;
pub const ENOSR: c_int = 63;
pub const ENONET: c_int = 64;
pub const ENOPKG: c_int = 65;
pub const EREMOTE: c_int = 66;
pub const ENOLINK: c_int = 67;
pub const EADV: c_int = 68;
pub const ESRMNT: c_int = 69;
pub const ECOMM: c_int = 70;
pub const EPROTO: c_int = 71;
pub const EMULTIHOP: c_int = 72;
pub const EDOTDOT: c_int = 73;
pub const EBADMSG: c_int = 74;
pub const EOVERFLOW: c_int = 75;
pub const ENOTUNIQ: c_int = 76;
pub const EBADFD: c_int = 77;
pub const EREMCHG: c_int = 78;
pub const ELIBACC: c_int = 79;
pub const ELIBBAD: c_int = 80;
pub const ELIBSCN: c_int = 81;
pub const ELIBMAX: c_int = 82;
pub const ELIBEXEC: c_int = 83;
pub const EILSEQ: c_int = 84;
pub const ERESTART: c_int = 85;
pub const ESTRPIPE: c_int = 86;
pub const EUSERS: c_int = 87;
pub const ENOTSOCK: c_int = 88;
pub const EDESTADDRREQ: c_int = 89;
pub const EMSGSIZE: c_int = 90;
pub const EPROTOTYPE: c_int = 91;
pub const ENOPROTOOPT: c_int = 92;
pub const EPROTONOSUPPORT: c_int = 93;
pub const ESOCKTNOSUPPORT: c_int = 94;
pub const EOPNOTSUPP: c_int = 95;
pub const EPFNOSUPPORT: c_int = 96;
pub const EAFNOSUPPORT: c_int = 97;
pub const EADDRINUSE: c_int = 98;
pub const EADDRNOTAVAIL: c_int = 99;
pub const ENETDOWN: c_int = 100;
pub const ENETUNREACH: c_int = 101;
pub const ENETRESET: c_int = 102;
pub const ECONNABORTED: c_int = 103;
pub const ECONNRESET: c_int = 104;
pub const ENOBUFS: c_int = 105;
pub const EISCONN: c_int = 106;
pub const ENOTCONN: c_int = 107;
pub const ESHUTDOWN: c_int = 108;
pub const ETOOMANYREFS: c_int = 109;
pub const ETIMEDOUT: c_int = 110;
pub const ECONNREFUSED: c_int = 111;
pub const EHOSTDOWN: c_int = 112;
pub const EHOSTUNREACH: c_int = 113;
pub const EALREADY: c_int = 114;
pub const EINPROGRESS: c_int = 115;
pub const ESTALE: c_int = 116;
pub const EUCLEAN: c_int = 117;
pub const ENOTNAM: c_int = 118;
pub const ENAVAIL: c_int = 119;
pub const EISNAM: c_int = 120;
pub const EREMOTEIO: c_int = 121;
pub const EDQUOT: c_int = 122;
pub const ENOMEDIUM: c_int = 123;
pub const EMEDIUMTYPE: c_int = 124;
pub const ECANCELED: c_int = 125;
pub const ENOKEY: c_int = 126;
pub const EKEYEXPIRED: c_int = 127;
pub const EKEYREVOKED: c_int = 128;
pub const EKEYREJECTED: c_int = 129;
pub const EOWNERDEAD: c_int = 130;
pub const ENOTRECOVERABLE: c_int = 131;
pub const ERFKILL: c_int = 132;
pub const EHWPOISON: c_int = 133;
}
#[cfg(any(target_arch = "mips", target_arch = "mipsel"))]
pub mod posix88 {
use types::os::arch::c95::c_int;
use types::common::c95::c_void;
use types::os::arch::posix88::mode_t;
pub const O_RDONLY : c_int = 0;
pub const O_WRONLY : c_int = 1;
pub const O_RDWR : c_int = 2;
pub const O_APPEND : c_int = 8;
pub const O_CREAT : c_int = 256;
pub const O_EXCL : c_int = 1024;
pub const O_TRUNC : c_int = 512;
pub const S_IFIFO : mode_t = 4096;
pub const S_IFCHR : mode_t = 8192;
pub const S_IFBLK : mode_t = 24576;
pub const S_IFDIR : mode_t = 16384;
pub const S_IFREG : mode_t = 32768;
pub const S_IFLNK : mode_t = 40960;
pub const S_IFMT : mode_t = 61440;
pub const S_IEXEC : mode_t = 64;
pub const S_IWRITE : mode_t = 128;
pub const S_IREAD : mode_t = 256;
pub const S_IRWXU : mode_t = 448;
pub const S_IXUSR : mode_t = 64;
pub const S_IWUSR : mode_t = 128;
pub const S_IRUSR : mode_t = 256;
pub const F_OK : c_int = 0;
pub const R_OK : c_int = 4;
pub const W_OK : c_int = 2;
pub const X_OK : c_int = 1;
pub const STDIN_FILENO : c_int = 0;
pub const STDOUT_FILENO : c_int = 1;
pub const STDERR_FILENO : c_int = 2;
pub const F_LOCK : c_int = 1;
pub const F_TEST : c_int = 3;
pub const F_TLOCK : c_int = 2;
pub const F_ULOCK : c_int = 0;
pub const SIGHUP : c_int = 1;
pub const SIGINT : c_int = 2;
pub const SIGQUIT : c_int = 3;
pub const SIGILL : c_int = 4;
pub const SIGABRT : c_int = 6;
pub const SIGFPE : c_int = 8;
pub const SIGKILL : c_int = 9;
pub const SIGSEGV : c_int = 11;
pub const SIGPIPE : c_int = 13;
pub const SIGALRM : c_int = 14;
pub const SIGTERM : c_int = 15;
pub const PROT_NONE : c_int = 0;
pub const PROT_READ : c_int = 1;
pub const PROT_WRITE : c_int = 2;
pub const PROT_EXEC : c_int = 4;
pub const MAP_FILE : c_int = 0x0000;
pub const MAP_SHARED : c_int = 0x0001;
pub const MAP_PRIVATE : c_int = 0x0002;
pub const MAP_FIXED : c_int = 0x0010;
pub const MAP_ANON : c_int = 0x0800;
pub const MAP_FAILED : *mut c_void = -1 as *mut c_void;
pub const MCL_CURRENT : c_int = 0x0001;
pub const MCL_FUTURE : c_int = 0x0002;
pub const MS_ASYNC : c_int = 0x0001;
pub const MS_INVALIDATE : c_int = 0x0002;
pub const MS_SYNC : c_int = 0x0004;
pub const EPERM : c_int = 1;
pub const ENOENT : c_int = 2;
pub const ESRCH : c_int = 3;
pub const EINTR : c_int = 4;
pub const EIO : c_int = 5;
pub const ENXIO : c_int = 6;
pub const E2BIG : c_int = 7;
pub const ENOEXEC : c_int = 8;
pub const EBADF : c_int = 9;
pub const ECHILD : c_int = 10;
pub const EAGAIN : c_int = 11;
pub const ENOMEM : c_int = 12;
pub const EACCES : c_int = 13;
pub const EFAULT : c_int = 14;
pub const ENOTBLK : c_int = 15;
pub const EBUSY : c_int = 16;
pub const EEXIST : c_int = 17;
pub const EXDEV : c_int = 18;
pub const ENODEV : c_int = 19;
pub const ENOTDIR : c_int = 20;
pub const EISDIR : c_int = 21;
pub const EINVAL : c_int = 22;
pub const ENFILE : c_int = 23;
pub const EMFILE : c_int = 24;
pub const ENOTTY : c_int = 25;
pub const ETXTBSY : c_int = 26;
pub const EFBIG : c_int = 27;
pub const ENOSPC : c_int = 28;
pub const ESPIPE : c_int = 29;
pub const EROFS : c_int = 30;
pub const EMLINK : c_int = 31;
pub const EPIPE : c_int = 32;
pub const EDOM : c_int = 33;
pub const ERANGE : c_int = 34;
pub const ENOMSG: c_int = 35;
pub const EIDRM: c_int = 36;
pub const ECHRNG: c_int = 37;
pub const EL2NSYNC: c_int = 38;
pub const EL3HLT: c_int = 39;
pub const EL3RST: c_int = 40;
pub const ELNRNG: c_int = 41;
pub const EUNATCH: c_int = 42;
pub const ENOCSI: c_int = 43;
pub const EL2HLT: c_int = 44;
pub const EDEADLK: c_int = 45;
pub const ENOLCK: c_int = 46;
pub const EBADE: c_int = 50;
pub const EBADR: c_int = 51;
pub const EXFULL: c_int = 52;
pub const ENOANO: c_int = 53;<|fim▁hole|> pub const EDEADLOCK: c_int = 56;
pub const EBFONT: c_int = 59;
pub const ENOSTR: c_int = 60;
pub const ENODATA: c_int = 61;
pub const ETIME: c_int = 62;
pub const ENOSR: c_int = 63;
pub const ENONET: c_int = 64;
pub const ENOPKG: c_int = 65;
pub const EREMOTE: c_int = 66;
pub const ENOLINK: c_int = 67;
pub const EADV: c_int = 68;
pub const ESRMNT: c_int = 69;
pub const ECOMM: c_int = 70;
pub const EPROTO: c_int = 71;
pub const EDOTDOT: c_int = 73;
pub const EMULTIHOP: c_int = 74;
pub const EBADMSG: c_int = 77;
pub const ENAMETOOLONG: c_int = 78;
pub const EOVERFLOW: c_int = 79;
pub const ENOTUNIQ: c_int = 80;
pub const EBADFD: c_int = 81;
pub const EREMCHG: c_int = 82;
pub const ELIBACC: c_int = 83;
pub const ELIBBAD: c_int = 84;
pub const ELIBSCN: c_int = 95;
pub const ELIBMAX: c_int = 86;
pub const ELIBEXEC: c_int = 87;
pub const EILSEQ: c_int = 88;
pub const ENOSYS: c_int = 89;
pub const ELOOP: c_int = 90;
pub const ERESTART: c_int = 91;
pub const ESTRPIPE: c_int = 92;
pub const ENOTEMPTY: c_int = 93;
pub const EUSERS: c_int = 94;
pub const ENOTSOCK: c_int = 95;
pub const EDESTADDRREQ: c_int = 96;
pub const EMSGSIZE: c_int = 97;
pub const EPROTOTYPE: c_int = 98;
pub const ENOPROTOOPT: c_int = 99;
pub const EPROTONOSUPPORT: c_int = 120;
pub const ESOCKTNOSUPPORT: c_int = 121;
pub const EOPNOTSUPP: c_int = 122;
pub const EPFNOSUPPORT: c_int = 123;
pub const EAFNOSUPPORT: c_int = 124;
pub const EADDRINUSE: c_int = 125;
pub const EADDRNOTAVAIL: c_int = 126;
pub const ENETDOWN: c_int = 127;
pub const ENETUNREACH: c_int = 128;
pub const ENETRESET: c_int = 129;
pub const ECONNABORTED: c_int = 130;
pub const ECONNRESET: c_int = 131;
pub const ENOBUFS: c_int = 132;
pub const EISCONN: c_int = 133;
pub const ENOTCONN: c_int = 134;
pub const EUCLEAN: c_int = 135;
pub const ENOTNAM: c_int = 137;
pub const ENAVAIL: c_int = 138;
pub const EISNAM: c_int = 139;
pub const EREMOTEIO: c_int = 140;
pub const ESHUTDOWN: c_int = 143;
pub const ETOOMANYREFS: c_int = 144;
pub const ETIMEDOUT: c_int = 145;
pub const ECONNREFUSED: c_int = 146;
pub const EHOSTDOWN: c_int = 147;
pub const EHOSTUNREACH: c_int = 148;
pub const EWOULDBLOCK: c_int = EAGAIN;
pub const EALREADY: c_int = 149;
pub const EINPROGRESS: c_int = 150;
pub const ESTALE: c_int = 151;
pub const ECANCELED: c_int = 158;
pub const ENOMEDIUM: c_int = 159;
pub const EMEDIUMTYPE: c_int = 160;
pub const ENOKEY: c_int = 161;
pub const EKEYEXPIRED: c_int = 162;
pub const EKEYREVOKED: c_int = 163;
pub const EKEYREJECTED: c_int = 164;
pub const EOWNERDEAD: c_int = 165;
pub const ENOTRECOVERABLE: c_int = 166;
pub const ERFKILL: c_int = 167;
pub const EHWPOISON: c_int = 168;
pub const EDQUOT: c_int = 1133;
}
pub mod posix01 {
use types::os::arch::c95::{c_int, size_t};
pub const F_DUPFD : c_int = 0;
pub const F_GETFD : c_int = 1;
pub const F_SETFD : c_int = 2;
pub const F_GETFL : c_int = 3;
pub const F_SETFL : c_int = 4;
pub const SIGTRAP : c_int = 5;
pub const SIGPIPE: c_int = 13;
pub const SIG_IGN: size_t = 1;
pub const GLOB_ERR : c_int = 1 << 0;
pub const GLOB_MARK : c_int = 1 << 1;
pub const GLOB_NOSORT : c_int = 1 << 2;
pub const GLOB_DOOFFS : c_int = 1 << 3;
pub const GLOB_NOCHECK : c_int = 1 << 4;
pub const GLOB_APPEND : c_int = 1 << 5;
pub const GLOB_NOESCAPE : c_int = 1 << 6;
pub const GLOB_NOSPACE : c_int = 1;
pub const GLOB_ABORTED : c_int = 2;
pub const GLOB_NOMATCH : c_int = 3;
pub const POSIX_MADV_NORMAL : c_int = 0;
pub const POSIX_MADV_RANDOM : c_int = 1;
pub const POSIX_MADV_SEQUENTIAL : c_int = 2;
pub const POSIX_MADV_WILLNEED : c_int = 3;
pub const POSIX_MADV_DONTNEED : c_int = 4;
pub const _SC_MQ_PRIO_MAX : c_int = 28;
pub const _SC_IOV_MAX : c_int = 60;
pub const _SC_GETGR_R_SIZE_MAX : c_int = 69;
pub const _SC_GETPW_R_SIZE_MAX : c_int = 70;
pub const _SC_LOGIN_NAME_MAX : c_int = 71;
pub const _SC_TTY_NAME_MAX : c_int = 72;
pub const _SC_THREADS : c_int = 67;
pub const _SC_THREAD_SAFE_FUNCTIONS : c_int = 68;
pub const _SC_THREAD_DESTRUCTOR_ITERATIONS : c_int = 73;
pub const _SC_THREAD_KEYS_MAX : c_int = 74;
pub const _SC_THREAD_STACK_MIN : c_int = 75;
pub const _SC_THREAD_THREADS_MAX : c_int = 76;
pub const _SC_THREAD_ATTR_STACKADDR : c_int = 77;
pub const _SC_THREAD_ATTR_STACKSIZE : c_int = 78;
pub const _SC_THREAD_PRIORITY_SCHEDULING : c_int = 79;
pub const _SC_THREAD_PRIO_INHERIT : c_int = 80;
pub const _SC_THREAD_PRIO_PROTECT : c_int = 81;
pub const _SC_THREAD_PROCESS_SHARED : c_int = 82;
pub const _SC_ATEXIT_MAX : c_int = 87;
pub const _SC_XOPEN_VERSION : c_int = 89;
pub const _SC_XOPEN_XCU_VERSION : c_int = 90;
pub const _SC_XOPEN_UNIX : c_int = 91;
pub const _SC_XOPEN_CRYPT : c_int = 92;
pub const _SC_XOPEN_ENH_I18N : c_int = 93;
pub const _SC_XOPEN_SHM : c_int = 94;
pub const _SC_XOPEN_LEGACY : c_int = 129;
pub const _SC_XOPEN_REALTIME : c_int = 130;
pub const _SC_XOPEN_REALTIME_THREADS : c_int = 131;
pub const PTHREAD_CREATE_JOINABLE: c_int = 0;
pub const PTHREAD_CREATE_DETACHED: c_int = 1;
#[cfg(target_os = "android")]
pub const PTHREAD_STACK_MIN: size_t = 8192;
#[cfg(all(target_os = "linux",
any(target_arch = "arm",
target_arch = "x86",
target_arch = "x86_64")))]
pub const PTHREAD_STACK_MIN: size_t = 16384;
#[cfg(all(target_os = "linux",
any(target_arch = "mips", target_arch = "mipsel")))]
pub const PTHREAD_STACK_MIN: size_t = 131072;
pub const CLOCK_REALTIME: c_int = 0;
pub const CLOCK_MONOTONIC: c_int = 1;
}
pub mod posix08 {
}
#[cfg(any(target_arch = "arm",
target_arch = "x86",
target_arch = "x86_64"))]
pub mod bsd44 {
use types::os::arch::c95::c_int;
pub const MADV_NORMAL : c_int = 0;
pub const MADV_RANDOM : c_int = 1;
pub const MADV_SEQUENTIAL : c_int = 2;
pub const MADV_WILLNEED : c_int = 3;
pub const MADV_DONTNEED : c_int = 4;
pub const MADV_REMOVE : c_int = 9;
pub const MADV_DONTFORK : c_int = 10;
pub const MADV_DOFORK : c_int = 11;
pub const MADV_MERGEABLE : c_int = 12;
pub const MADV_UNMERGEABLE : c_int = 13;
pub const MADV_HWPOISON : c_int = 100;
pub const IFF_LOOPBACK: c_int = 0x8;
pub const AF_UNIX: c_int = 1;
pub const AF_INET: c_int = 2;
pub const AF_INET6: c_int = 10;
pub const SOCK_STREAM: c_int = 1;
pub const SOCK_DGRAM: c_int = 2;
pub const SOCK_RAW: c_int = 3;
pub const IPPROTO_TCP: c_int = 6;
pub const IPPROTO_IP: c_int = 0;
pub const IPPROTO_IPV6: c_int = 41;
pub const IP_MULTICAST_TTL: c_int = 33;
pub const IP_MULTICAST_LOOP: c_int = 34;
pub const IP_TTL: c_int = 2;
pub const IP_HDRINCL: c_int = 3;
pub const IP_ADD_MEMBERSHIP: c_int = 35;
pub const IP_DROP_MEMBERSHIP: c_int = 36;
pub const IPV6_ADD_MEMBERSHIP: c_int = 20;
pub const IPV6_DROP_MEMBERSHIP: c_int = 21;
pub const TCP_NODELAY: c_int = 1;
pub const SOL_SOCKET: c_int = 1;
pub const SO_KEEPALIVE: c_int = 9;
pub const SO_BROADCAST: c_int = 6;
pub const SO_REUSEADDR: c_int = 2;
pub const SO_ERROR: c_int = 4;
pub const SHUT_RD: c_int = 0;
pub const SHUT_WR: c_int = 1;
pub const SHUT_RDWR: c_int = 2;
}
#[cfg(any(target_arch = "mips", target_arch = "mipsel"))]
pub mod bsd44 {
use types::os::arch::c95::c_int;
pub const MADV_NORMAL : c_int = 0;
pub const MADV_RANDOM : c_int = 1;
pub const MADV_SEQUENTIAL : c_int = 2;
pub const MADV_WILLNEED : c_int = 3;
pub const MADV_DONTNEED : c_int = 4;
pub const MADV_REMOVE : c_int = 9;
pub const MADV_DONTFORK : c_int = 10;
pub const MADV_DOFORK : c_int = 11;
pub const MADV_MERGEABLE : c_int = 12;
pub const MADV_UNMERGEABLE : c_int = 13;
pub const MADV_HWPOISON : c_int = 100;
pub const AF_UNIX: c_int = 1;
pub const AF_INET: c_int = 2;
pub const AF_INET6: c_int = 10;
pub const SOCK_STREAM: c_int = 2;
pub const SOCK_DGRAM: c_int = 1;
pub const SOCK_RAW: c_int = 3;
pub const IPPROTO_TCP: c_int = 6;
pub const IPPROTO_IP: c_int = 0;
pub const IPPROTO_IPV6: c_int = 41;
pub const IP_MULTICAST_TTL: c_int = 33;
pub const IP_MULTICAST_LOOP: c_int = 34;
pub const IP_TTL: c_int = 2;
pub const IP_HDRINCL: c_int = 3;
pub const IP_ADD_MEMBERSHIP: c_int = 35;
pub const IP_DROP_MEMBERSHIP: c_int = 36;
pub const IPV6_ADD_MEMBERSHIP: c_int = 20;
pub const IPV6_DROP_MEMBERSHIP: c_int = 21;
pub const TCP_NODELAY: c_int = 1;
pub const SOL_SOCKET: c_int = 65535;
pub const SO_KEEPALIVE: c_int = 8;
pub const SO_BROADCAST: c_int = 32;
pub const SO_REUSEADDR: c_int = 4;
pub const SO_ERROR: c_int = 4103;
pub const SHUT_RD: c_int = 0;
pub const SHUT_WR: c_int = 1;
pub const SHUT_RDWR: c_int = 2;
}
#[cfg(any(target_arch = "x86",
target_arch = "x86_64",
target_arch = "arm"))]
pub mod extra {
use types::os::arch::c95::c_int;
pub const AF_PACKET : c_int = 17;
pub const IPPROTO_RAW : c_int = 255;
pub const O_RSYNC : c_int = 1052672;
pub const O_DSYNC : c_int = 4096;
pub const O_NONBLOCK : c_int = 2048;
pub const O_SYNC : c_int = 1052672;
pub const PROT_GROWSDOWN : c_int = 0x010000000;
pub const PROT_GROWSUP : c_int = 0x020000000;
pub const MAP_TYPE : c_int = 0x000f;
pub const MAP_ANONYMOUS : c_int = 0x0020;
pub const MAP_32BIT : c_int = 0x0040;
pub const MAP_GROWSDOWN : c_int = 0x0100;
pub const MAP_DENYWRITE : c_int = 0x0800;
pub const MAP_EXECUTABLE : c_int = 0x01000;
pub const MAP_LOCKED : c_int = 0x02000;
pub const MAP_NONRESERVE : c_int = 0x04000;
pub const MAP_POPULATE : c_int = 0x08000;
pub const MAP_NONBLOCK : c_int = 0x010000;
pub const MAP_STACK : c_int = 0x020000;
}
#[cfg(any(target_arch = "mips", target_arch = "mipsel"))]
pub mod extra {
use types::os::arch::c95::c_int;
pub const AF_PACKET : c_int = 17;
pub const IPPROTO_RAW : c_int = 255;
pub const O_RSYNC : c_int = 16400;
pub const O_DSYNC : c_int = 16;
pub const O_NONBLOCK : c_int = 128;
pub const O_SYNC : c_int = 16400;
pub const PROT_GROWSDOWN : c_int = 0x01000000;
pub const PROT_GROWSUP : c_int = 0x02000000;
pub const MAP_TYPE : c_int = 0x000f;
pub const MAP_ANONYMOUS : c_int = 0x0800;
pub const MAP_GROWSDOWN : c_int = 0x01000;
pub const MAP_DENYWRITE : c_int = 0x02000;
pub const MAP_EXECUTABLE : c_int = 0x04000;
pub const MAP_LOCKED : c_int = 0x08000;
pub const MAP_NONRESERVE : c_int = 0x0400;
pub const MAP_POPULATE : c_int = 0x010000;
pub const MAP_NONBLOCK : c_int = 0x020000;
pub const MAP_STACK : c_int = 0x040000;
}
#[cfg(target_os = "linux")]
pub mod sysconf {
use types::os::arch::c95::c_int;
pub const _SC_ARG_MAX : c_int = 0;
pub const _SC_CHILD_MAX : c_int = 1;
pub const _SC_CLK_TCK : c_int = 2;
pub const _SC_NGROUPS_MAX : c_int = 3;
pub const _SC_OPEN_MAX : c_int = 4;
pub const _SC_STREAM_MAX : c_int = 5;
pub const _SC_TZNAME_MAX : c_int = 6;
pub const _SC_JOB_CONTROL : c_int = 7;
pub const _SC_SAVED_IDS : c_int = 8;
pub const _SC_REALTIME_SIGNALS : c_int = 9;
pub const _SC_PRIORITY_SCHEDULING : c_int = 10;
pub const _SC_TIMERS : c_int = 11;
pub const _SC_ASYNCHRONOUS_IO : c_int = 12;
pub const _SC_PRIORITIZED_IO : c_int = 13;
pub const _SC_SYNCHRONIZED_IO : c_int = 14;
pub const _SC_FSYNC : c_int = 15;
pub const _SC_MAPPED_FILES : c_int = 16;
pub const _SC_MEMLOCK : c_int = 17;
pub const _SC_MEMLOCK_RANGE : c_int = 18;
pub const _SC_MEMORY_PROTECTION : c_int = 19;
pub const _SC_MESSAGE_PASSING : c_int = 20;
pub const _SC_SEMAPHORES : c_int = 21;
pub const _SC_SHARED_MEMORY_OBJECTS : c_int = 22;
pub const _SC_AIO_LISTIO_MAX : c_int = 23;
pub const _SC_AIO_MAX : c_int = 24;
pub const _SC_AIO_PRIO_DELTA_MAX : c_int = 25;
pub const _SC_DELAYTIMER_MAX : c_int = 26;
pub const _SC_MQ_OPEN_MAX : c_int = 27;
pub const _SC_VERSION : c_int = 29;
pub const _SC_PAGESIZE : c_int = 30;
pub const _SC_RTSIG_MAX : c_int = 31;
pub const _SC_SEM_NSEMS_MAX : c_int = 32;
pub const _SC_SEM_VALUE_MAX : c_int = 33;
pub const _SC_SIGQUEUE_MAX : c_int = 34;
pub const _SC_TIMER_MAX : c_int = 35;
pub const _SC_BC_BASE_MAX : c_int = 36;
pub const _SC_BC_DIM_MAX : c_int = 37;
pub const _SC_BC_SCALE_MAX : c_int = 38;
pub const _SC_BC_STRING_MAX : c_int = 39;
pub const _SC_COLL_WEIGHTS_MAX : c_int = 40;
pub const _SC_EXPR_NEST_MAX : c_int = 42;
pub const _SC_LINE_MAX : c_int = 43;
pub const _SC_RE_DUP_MAX : c_int = 44;
pub const _SC_2_VERSION : c_int = 46;
pub const _SC_2_C_BIND : c_int = 47;
pub const _SC_2_C_DEV : c_int = 48;
pub const _SC_2_FORT_DEV : c_int = 49;
pub const _SC_2_FORT_RUN : c_int = 50;
pub const _SC_2_SW_DEV : c_int = 51;
pub const _SC_2_LOCALEDEF : c_int = 52;
pub const _SC_2_CHAR_TERM : c_int = 95;
pub const _SC_2_C_VERSION : c_int = 96;
pub const _SC_2_UPE : c_int = 97;
pub const _SC_XBS5_ILP32_OFF32 : c_int = 125;
pub const _SC_XBS5_ILP32_OFFBIG : c_int = 126;
pub const _SC_XBS5_LPBIG_OFFBIG : c_int = 128;
}
#[cfg(target_os = "android")]
pub mod sysconf {
use types::os::arch::c95::c_int;
pub const _SC_ARG_MAX : c_int = 0;
pub const _SC_BC_BASE_MAX : c_int = 1;
pub const _SC_BC_DIM_MAX : c_int = 2;
pub const _SC_BC_SCALE_MAX : c_int = 3;
pub const _SC_BC_STRING_MAX : c_int = 4;
pub const _SC_CHILD_MAX : c_int = 5;
pub const _SC_CLK_TCK : c_int = 6;
pub const _SC_COLL_WEIGHTS_MAX : c_int = 7;
pub const _SC_EXPR_NEST_MAX : c_int = 8;
pub const _SC_LINE_MAX : c_int = 9;
pub const _SC_NGROUPS_MAX : c_int = 10;
pub const _SC_OPEN_MAX : c_int = 11;
pub const _SC_2_C_BIND : c_int = 13;
pub const _SC_2_C_DEV : c_int = 14;
pub const _SC_2_C_VERSION : c_int = 15;
pub const _SC_2_CHAR_TERM : c_int = 16;
pub const _SC_2_FORT_DEV : c_int = 17;
pub const _SC_2_FORT_RUN : c_int = 18;
pub const _SC_2_LOCALEDEF : c_int = 19;
pub const _SC_2_SW_DEV : c_int = 20;
pub const _SC_2_UPE : c_int = 21;
pub const _SC_2_VERSION : c_int = 22;
pub const _SC_JOB_CONTROL : c_int = 23;
pub const _SC_SAVED_IDS : c_int = 24;
pub const _SC_VERSION : c_int = 25;
pub const _SC_RE_DUP_MAX : c_int = 26;
pub const _SC_STREAM_MAX : c_int = 27;
pub const _SC_TZNAME_MAX : c_int = 28;
pub const _SC_PAGESIZE : c_int = 39;
}
}
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
pub mod os {
pub mod c95 {
use types::os::arch::c95::{c_int, c_uint};
pub const EXIT_FAILURE : c_int = 1;
pub const EXIT_SUCCESS : c_int = 0;
pub const RAND_MAX : c_int = 2147483647;
pub const EOF : c_int = -1;
pub const SEEK_SET : c_int = 0;
pub const SEEK_CUR : c_int = 1;
pub const SEEK_END : c_int = 2;
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
pub const BUFSIZ : c_uint = 1024_u32;
pub const FOPEN_MAX : c_uint = 20_u32;
pub const FILENAME_MAX : c_uint = 1024_u32;
pub const L_tmpnam : c_uint = 1024_u32;
pub const TMP_MAX : c_uint = 308915776_u32;
}
pub mod c99 {
}
pub mod posix88 {
use types::common::c95::c_void;
use types::os::arch::c95::c_int;
use types::os::arch::posix88::mode_t;
pub const O_RDONLY : c_int = 0;
pub const O_WRONLY : c_int = 1;
pub const O_RDWR : c_int = 2;
pub const O_APPEND : c_int = 8;
pub const O_CREAT : c_int = 512;
pub const O_EXCL : c_int = 2048;
pub const O_TRUNC : c_int = 1024;
pub const S_IFIFO : mode_t = 4096;
pub const S_IFCHR : mode_t = 8192;
pub const S_IFBLK : mode_t = 24576;
pub const S_IFDIR : mode_t = 16384;
pub const S_IFREG : mode_t = 32768;
pub const S_IFLNK : mode_t = 40960;
pub const S_IFMT : mode_t = 61440;
pub const S_IEXEC : mode_t = 64;
pub const S_IWRITE : mode_t = 128;
pub const S_IREAD : mode_t = 256;
pub const S_IRWXU : mode_t = 448;
pub const S_IXUSR : mode_t = 64;
pub const S_IWUSR : mode_t = 128;
pub const S_IRUSR : mode_t = 256;
pub const F_OK : c_int = 0;
pub const R_OK : c_int = 4;
pub const W_OK : c_int = 2;
pub const X_OK : c_int = 1;
pub const STDIN_FILENO : c_int = 0;
pub const STDOUT_FILENO : c_int = 1;
pub const STDERR_FILENO : c_int = 2;
pub const F_LOCK : c_int = 1;
pub const F_TEST : c_int = 3;
pub const F_TLOCK : c_int = 2;
pub const F_ULOCK : c_int = 0;
pub const SIGHUP : c_int = 1;
pub const SIGINT : c_int = 2;
pub const SIGQUIT : c_int = 3;
pub const SIGILL : c_int = 4;
pub const SIGABRT : c_int = 6;
pub const SIGFPE : c_int = 8;
pub const SIGKILL : c_int = 9;
pub const SIGSEGV : c_int = 11;
pub const SIGPIPE : c_int = 13;
pub const SIGALRM : c_int = 14;
pub const SIGTERM : c_int = 15;
pub const PROT_NONE : c_int = 0;
pub const PROT_READ : c_int = 1;
pub const PROT_WRITE : c_int = 2;
pub const PROT_EXEC : c_int = 4;
pub const MAP_FILE : c_int = 0x0000;
pub const MAP_SHARED : c_int = 0x0001;
pub const MAP_PRIVATE : c_int = 0x0002;
pub const MAP_FIXED : c_int = 0x0010;
pub const MAP_ANON : c_int = 0x1000;
pub const MAP_FAILED : *mut c_void = -1 as *mut c_void;
pub const MCL_CURRENT : c_int = 0x0001;
pub const MCL_FUTURE : c_int = 0x0002;
pub const MS_SYNC : c_int = 0x0000;
pub const MS_ASYNC : c_int = 0x0001;
pub const MS_INVALIDATE : c_int = 0x0002;
pub const EPERM : c_int = 1;
pub const ENOENT : c_int = 2;
pub const ESRCH : c_int = 3;
pub const EINTR : c_int = 4;
pub const EIO : c_int = 5;
pub const ENXIO : c_int = 6;
pub const E2BIG : c_int = 7;
pub const ENOEXEC : c_int = 8;
pub const EBADF : c_int = 9;
pub const ECHILD : c_int = 10;
pub const EDEADLK : c_int = 11;
pub const ENOMEM : c_int = 12;
pub const EACCES : c_int = 13;
pub const EFAULT : c_int = 14;
pub const ENOTBLK : c_int = 15;
pub const EBUSY : c_int = 16;
pub const EEXIST : c_int = 17;
pub const EXDEV : c_int = 18;
pub const ENODEV : c_int = 19;
pub const ENOTDIR : c_int = 20;
pub const EISDIR : c_int = 21;
pub const EINVAL : c_int = 22;
pub const ENFILE : c_int = 23;
pub const EMFILE : c_int = 24;
pub const ENOTTY : c_int = 25;
pub const ETXTBSY : c_int = 26;
pub const EFBIG : c_int = 27;
pub const ENOSPC : c_int = 28;
pub const ESPIPE : c_int = 29;
pub const EROFS : c_int = 30;
pub const EMLINK : c_int = 31;
pub const EPIPE : c_int = 32;
pub const EDOM : c_int = 33;
pub const ERANGE : c_int = 34;
pub const EAGAIN : c_int = 35;
pub const EWOULDBLOCK : c_int = 35;
pub const EINPROGRESS : c_int = 36;
pub const EALREADY : c_int = 37;
pub const ENOTSOCK : c_int = 38;
pub const EDESTADDRREQ : c_int = 39;
pub const EMSGSIZE : c_int = 40;
pub const EPROTOTYPE : c_int = 41;
pub const ENOPROTOOPT : c_int = 42;
pub const EPROTONOSUPPORT : c_int = 43;
pub const ESOCKTNOSUPPORT : c_int = 44;
pub const EOPNOTSUPP : c_int = 45;
pub const EPFNOSUPPORT : c_int = 46;
pub const EAFNOSUPPORT : c_int = 47;
pub const EADDRINUSE : c_int = 48;
pub const EADDRNOTAVAIL : c_int = 49;
pub const ENETDOWN : c_int = 50;
pub const ENETUNREACH : c_int = 51;
pub const ENETRESET : c_int = 52;
pub const ECONNABORTED : c_int = 53;
pub const ECONNRESET : c_int = 54;
pub const ENOBUFS : c_int = 55;
pub const EISCONN : c_int = 56;
pub const ENOTCONN : c_int = 57;
pub const ESHUTDOWN : c_int = 58;
pub const ETOOMANYREFS : c_int = 59;
pub const ETIMEDOUT : c_int = 60;
pub const ECONNREFUSED : c_int = 61;
pub const ELOOP : c_int = 62;
pub const ENAMETOOLONG : c_int = 63;
pub const EHOSTDOWN : c_int = 64;
pub const EHOSTUNREACH : c_int = 65;
pub const ENOTEMPTY : c_int = 66;
pub const EPROCLIM : c_int = 67;
pub const EUSERS : c_int = 68;
pub const EDQUOT : c_int = 69;
pub const ESTALE : c_int = 70;
pub const EREMOTE : c_int = 71;
pub const EBADRPC : c_int = 72;
pub const ERPCMISMATCH : c_int = 73;
pub const EPROGUNAVAIL : c_int = 74;
pub const EPROGMISMATCH : c_int = 75;
pub const EPROCUNAVAIL : c_int = 76;
pub const ENOLCK : c_int = 77;
pub const ENOSYS : c_int = 78;
pub const EFTYPE : c_int = 79;
pub const EAUTH : c_int = 80;
pub const ENEEDAUTH : c_int = 81;
pub const EIDRM : c_int = 82;
pub const ENOMSG : c_int = 83;
pub const EOVERFLOW : c_int = 84;
pub const ECANCELED : c_int = 85;
pub const EILSEQ : c_int = 86;
pub const ENOATTR : c_int = 87;
pub const EDOOFUS : c_int = 88;
pub const EBADMSG : c_int = 89;
pub const EMULTIHOP : c_int = 90;
pub const ENOLINK : c_int = 91;
pub const EPROTO : c_int = 92;
pub const ENOMEDIUM : c_int = 93;
pub const EUNUSED94 : c_int = 94;
pub const EUNUSED95 : c_int = 95;
pub const EUNUSED96 : c_int = 96;
pub const EUNUSED97 : c_int = 97;
pub const EUNUSED98 : c_int = 98;
pub const EASYNC : c_int = 99;
pub const ELAST : c_int = 99;
}
pub mod posix01 {
use types::os::arch::c95::{c_int, size_t};
pub const F_DUPFD : c_int = 0;
pub const F_GETFD : c_int = 1;
pub const F_SETFD : c_int = 2;
pub const F_GETFL : c_int = 3;
pub const F_SETFL : c_int = 4;
pub const SIGTRAP : c_int = 5;
pub const SIGPIPE: c_int = 13;
pub const SIG_IGN: size_t = 1;
pub const GLOB_APPEND : c_int = 0x0001;
pub const GLOB_DOOFFS : c_int = 0x0002;
pub const GLOB_ERR : c_int = 0x0004;
pub const GLOB_MARK : c_int = 0x0008;
pub const GLOB_NOCHECK : c_int = 0x0010;
pub const GLOB_NOSORT : c_int = 0x0020;
pub const GLOB_NOESCAPE : c_int = 0x2000;
pub const GLOB_NOSPACE : c_int = -1;
pub const GLOB_ABORTED : c_int = -2;
pub const GLOB_NOMATCH : c_int = -3;
pub const POSIX_MADV_NORMAL : c_int = 0;
pub const POSIX_MADV_RANDOM : c_int = 1;
pub const POSIX_MADV_SEQUENTIAL : c_int = 2;
pub const POSIX_MADV_WILLNEED : c_int = 3;
pub const POSIX_MADV_DONTNEED : c_int = 4;
pub const _SC_IOV_MAX : c_int = 56;
pub const _SC_GETGR_R_SIZE_MAX : c_int = 70;
pub const _SC_GETPW_R_SIZE_MAX : c_int = 71;
pub const _SC_LOGIN_NAME_MAX : c_int = 73;
pub const _SC_MQ_PRIO_MAX : c_int = 75;
pub const _SC_THREAD_ATTR_STACKADDR : c_int = 82;
pub const _SC_THREAD_ATTR_STACKSIZE : c_int = 83;
pub const _SC_THREAD_DESTRUCTOR_ITERATIONS : c_int = 85;
pub const _SC_THREAD_KEYS_MAX : c_int = 86;
pub const _SC_THREAD_PRIO_INHERIT : c_int = 87;
pub const _SC_THREAD_PRIO_PROTECT : c_int = 88;
pub const _SC_THREAD_PRIORITY_SCHEDULING : c_int = 89;
pub const _SC_THREAD_PROCESS_SHARED : c_int = 90;
pub const _SC_THREAD_SAFE_FUNCTIONS : c_int = 91;
pub const _SC_THREAD_STACK_MIN : c_int = 93;
pub const _SC_THREAD_THREADS_MAX : c_int = 94;
pub const _SC_THREADS : c_int = 96;
pub const _SC_TTY_NAME_MAX : c_int = 101;
pub const _SC_ATEXIT_MAX : c_int = 107;
pub const _SC_XOPEN_CRYPT : c_int = 108;
pub const _SC_XOPEN_ENH_I18N : c_int = 109;
pub const _SC_XOPEN_LEGACY : c_int = 110;
pub const _SC_XOPEN_REALTIME : c_int = 111;
pub const _SC_XOPEN_REALTIME_THREADS : c_int = 112;
pub const _SC_XOPEN_SHM : c_int = 113;
pub const _SC_XOPEN_UNIX : c_int = 115;
pub const _SC_XOPEN_VERSION : c_int = 116;
pub const _SC_XOPEN_XCU_VERSION : c_int = 117;
pub const PTHREAD_CREATE_JOINABLE: c_int = 0;
pub const PTHREAD_CREATE_DETACHED: c_int = 1;
#[cfg(target_arch = "arm")]
pub const PTHREAD_STACK_MIN: size_t = 4096;
#[cfg(all(target_os = "freebsd",
any(target_arch = "mips",
target_arch = "mipsel",
target_arch = "x86",
target_arch = "x86_64")))]
pub const PTHREAD_STACK_MIN: size_t = 2048;
#[cfg(target_os = "dragonfly")]
pub const PTHREAD_STACK_MIN: size_t = 1024;
pub const CLOCK_REALTIME: c_int = 0;
pub const CLOCK_MONOTONIC: c_int = 4;
}
pub mod posix08 {
}
pub mod bsd44 {
use types::os::arch::c95::c_int;
pub const MADV_NORMAL : c_int = 0;
pub const MADV_RANDOM : c_int = 1;
pub const MADV_SEQUENTIAL : c_int = 2;
pub const MADV_WILLNEED : c_int = 3;
pub const MADV_DONTNEED : c_int = 4;
pub const MADV_FREE : c_int = 5;
pub const MADV_NOSYNC : c_int = 6;
pub const MADV_AUTOSYNC : c_int = 7;
pub const MADV_NOCORE : c_int = 8;
pub const MADV_CORE : c_int = 9;
pub const MADV_PROTECT : c_int = 10;
pub const MINCORE_INCORE : c_int = 0x1;
pub const MINCORE_REFERENCED : c_int = 0x2;
pub const MINCORE_MODIFIED : c_int = 0x4;
pub const MINCORE_REFERENCED_OTHER : c_int = 0x8;
pub const MINCORE_MODIFIED_OTHER : c_int = 0x10;
pub const MINCORE_SUPER : c_int = 0x20;
pub const AF_INET: c_int = 2;
pub const AF_INET6: c_int = 28;
pub const AF_UNIX: c_int = 1;
pub const SOCK_STREAM: c_int = 1;
pub const SOCK_DGRAM: c_int = 2;
pub const SOCK_RAW: c_int = 3;
pub const IPPROTO_TCP: c_int = 6;
pub const IPPROTO_IP: c_int = 0;
pub const IPPROTO_IPV6: c_int = 41;
pub const IP_MULTICAST_TTL: c_int = 10;
pub const IP_MULTICAST_LOOP: c_int = 11;
pub const IP_TTL: c_int = 4;
pub const IP_HDRINCL: c_int = 2;
pub const IP_ADD_MEMBERSHIP: c_int = 12;
pub const IP_DROP_MEMBERSHIP: c_int = 13;
pub const IPV6_ADD_MEMBERSHIP: c_int = 12;
pub const IPV6_DROP_MEMBERSHIP: c_int = 13;
pub const TCP_NODELAY: c_int = 1;
pub const TCP_KEEPIDLE: c_int = 256;
pub const SOL_SOCKET: c_int = 0xffff;
pub const SO_KEEPALIVE: c_int = 0x0008;
pub const SO_BROADCAST: c_int = 0x0020;
pub const SO_REUSEADDR: c_int = 0x0004;
pub const SO_ERROR: c_int = 0x1007;
pub const IFF_LOOPBACK: c_int = 0x8;
pub const SHUT_RD: c_int = 0;
pub const SHUT_WR: c_int = 1;
pub const SHUT_RDWR: c_int = 2;
}
pub mod extra {
use types::os::arch::c95::c_int;
pub const O_SYNC : c_int = 128;
pub const O_NONBLOCK : c_int = 4;
pub const CTL_KERN: c_int = 1;
pub const KERN_PROC: c_int = 14;
#[cfg(target_os = "freebsd")]
pub const KERN_PROC_PATHNAME: c_int = 12;
#[cfg(target_os = "dragonfly")]
pub const KERN_PROC_PATHNAME: c_int = 9;
pub const MAP_COPY : c_int = 0x0002;
pub const MAP_RENAME : c_int = 0x0020;
pub const MAP_NORESERVE : c_int = 0x0040;
pub const MAP_HASSEMAPHORE : c_int = 0x0200;
pub const MAP_STACK : c_int = 0x0400;
pub const MAP_NOSYNC : c_int = 0x0800;
pub const MAP_NOCORE : c_int = 0x020000;
pub const IPPROTO_RAW : c_int = 255;
}
pub mod sysconf {
use types::os::arch::c95::c_int;
pub const _SC_ARG_MAX : c_int = 1;
pub const _SC_CHILD_MAX : c_int = 2;
pub const _SC_CLK_TCK : c_int = 3;
pub const _SC_NGROUPS_MAX : c_int = 4;
pub const _SC_OPEN_MAX : c_int = 5;
pub const _SC_JOB_CONTROL : c_int = 6;
pub const _SC_SAVED_IDS : c_int = 7;
pub const _SC_VERSION : c_int = 8;
pub const _SC_BC_BASE_MAX : c_int = 9;
pub const _SC_BC_DIM_MAX : c_int = 10;
pub const _SC_BC_SCALE_MAX : c_int = 11;
pub const _SC_BC_STRING_MAX : c_int = 12;
pub const _SC_COLL_WEIGHTS_MAX : c_int = 13;
pub const _SC_EXPR_NEST_MAX : c_int = 14;
pub const _SC_LINE_MAX : c_int = 15;
pub const _SC_RE_DUP_MAX : c_int = 16;
pub const _SC_2_VERSION : c_int = 17;
pub const _SC_2_C_BIND : c_int = 18;
pub const _SC_2_C_DEV : c_int = 19;
pub const _SC_2_CHAR_TERM : c_int = 20;
pub const _SC_2_FORT_DEV : c_int = 21;
pub const _SC_2_FORT_RUN : c_int = 22;
pub const _SC_2_LOCALEDEF : c_int = 23;
pub const _SC_2_SW_DEV : c_int = 24;
pub const _SC_2_UPE : c_int = 25;
pub const _SC_STREAM_MAX : c_int = 26;
pub const _SC_TZNAME_MAX : c_int = 27;
pub const _SC_ASYNCHRONOUS_IO : c_int = 28;
pub const _SC_MAPPED_FILES : c_int = 29;
pub const _SC_MEMLOCK : c_int = 30;
pub const _SC_MEMLOCK_RANGE : c_int = 31;
pub const _SC_MEMORY_PROTECTION : c_int = 32;
pub const _SC_MESSAGE_PASSING : c_int = 33;
pub const _SC_PRIORITIZED_IO : c_int = 34;
pub const _SC_PRIORITY_SCHEDULING : c_int = 35;
pub const _SC_REALTIME_SIGNALS : c_int = 36;
pub const _SC_SEMAPHORES : c_int = 37;
pub const _SC_FSYNC : c_int = 38;
pub const _SC_SHARED_MEMORY_OBJECTS : c_int = 39;
pub const _SC_SYNCHRONIZED_IO : c_int = 40;
pub const _SC_TIMERS : c_int = 41;
pub const _SC_AIO_LISTIO_MAX : c_int = 42;
pub const _SC_AIO_MAX : c_int = 43;
pub const _SC_AIO_PRIO_DELTA_MAX : c_int = 44;
pub const _SC_DELAYTIMER_MAX : c_int = 45;
pub const _SC_MQ_OPEN_MAX : c_int = 46;
pub const _SC_PAGESIZE : c_int = 47;
pub const _SC_RTSIG_MAX : c_int = 48;
pub const _SC_SEM_NSEMS_MAX : c_int = 49;
pub const _SC_SEM_VALUE_MAX : c_int = 50;
pub const _SC_SIGQUEUE_MAX : c_int = 51;
pub const _SC_TIMER_MAX : c_int = 52;
}
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub mod os {
pub mod c95 {
use types::os::arch::c95::{c_int, c_uint};
pub const EXIT_FAILURE : c_int = 1;
pub const EXIT_SUCCESS : c_int = 0;
pub const RAND_MAX : c_int = 2147483647;
pub const EOF : c_int = -1;
pub const SEEK_SET : c_int = 0;
pub const SEEK_CUR : c_int = 1;
pub const SEEK_END : c_int = 2;
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
pub const BUFSIZ : c_uint = 1024_u32;
pub const FOPEN_MAX : c_uint = 20_u32;
pub const FILENAME_MAX : c_uint = 1024_u32;
pub const L_tmpnam : c_uint = 1024_u32;
pub const TMP_MAX : c_uint = 308915776_u32;
}
pub mod c99 {
}
pub mod posix88 {
use types::common::c95::c_void;
use types::os::arch::c95::c_int;
use types::os::arch::posix88::mode_t;
pub const O_RDONLY : c_int = 0;
pub const O_WRONLY : c_int = 1;
pub const O_RDWR : c_int = 2;
pub const O_APPEND : c_int = 8;
pub const O_CREAT : c_int = 512;
pub const O_EXCL : c_int = 2048;
pub const O_TRUNC : c_int = 1024;
pub const S_IFIFO : mode_t = 4096;
pub const S_IFCHR : mode_t = 8192;
pub const S_IFBLK : mode_t = 24576;
pub const S_IFDIR : mode_t = 16384;
pub const S_IFREG : mode_t = 32768;
pub const S_IFLNK : mode_t = 40960;
pub const S_IFMT : mode_t = 61440;
pub const S_IEXEC : mode_t = 64;
pub const S_IWRITE : mode_t = 128;
pub const S_IREAD : mode_t = 256;
pub const S_IRWXU : mode_t = 448;
pub const S_IXUSR : mode_t = 64;
pub const S_IWUSR : mode_t = 128;
pub const S_IRUSR : mode_t = 256;
pub const F_OK : c_int = 0;
pub const R_OK : c_int = 4;
pub const W_OK : c_int = 2;
pub const X_OK : c_int = 1;
pub const STDIN_FILENO : c_int = 0;
pub const STDOUT_FILENO : c_int = 1;
pub const STDERR_FILENO : c_int = 2;
pub const F_LOCK : c_int = 1;
pub const F_TEST : c_int = 3;
pub const F_TLOCK : c_int = 2;
pub const F_ULOCK : c_int = 0;
pub const SIGHUP : c_int = 1;
pub const SIGINT : c_int = 2;
pub const SIGQUIT : c_int = 3;
pub const SIGILL : c_int = 4;
pub const SIGABRT : c_int = 6;
pub const SIGFPE : c_int = 8;
pub const SIGKILL : c_int = 9;
pub const SIGSEGV : c_int = 11;
pub const SIGPIPE : c_int = 13;
pub const SIGALRM : c_int = 14;
pub const SIGTERM : c_int = 15;
pub const PROT_NONE : c_int = 0;
pub const PROT_READ : c_int = 1;
pub const PROT_WRITE : c_int = 2;
pub const PROT_EXEC : c_int = 4;
pub const MAP_FILE : c_int = 0x0000;
pub const MAP_SHARED : c_int = 0x0001;
pub const MAP_PRIVATE : c_int = 0x0002;
pub const MAP_FIXED : c_int = 0x0010;
pub const MAP_ANON : c_int = 0x1000;
pub const MAP_FAILED : *mut c_void = -1 as *mut c_void;
pub const MCL_CURRENT : c_int = 0x0001;
pub const MCL_FUTURE : c_int = 0x0002;
pub const MS_ASYNC : c_int = 0x0001;
pub const MS_INVALIDATE : c_int = 0x0002;
pub const MS_SYNC : c_int = 0x0010;
pub const MS_KILLPAGES : c_int = 0x0004;
pub const MS_DEACTIVATE : c_int = 0x0008;
pub const EPERM : c_int = 1;
pub const ENOENT : c_int = 2;
pub const ESRCH : c_int = 3;
pub const EINTR : c_int = 4;
pub const EIO : c_int = 5;
pub const ENXIO : c_int = 6;
pub const E2BIG : c_int = 7;
pub const ENOEXEC : c_int = 8;
pub const EBADF : c_int = 9;
pub const ECHILD : c_int = 10;
pub const EDEADLK : c_int = 11;
pub const ENOMEM : c_int = 12;
pub const EACCES : c_int = 13;
pub const EFAULT : c_int = 14;
pub const ENOTBLK : c_int = 15;
pub const EBUSY : c_int = 16;
pub const EEXIST : c_int = 17;
pub const EXDEV : c_int = 18;
pub const ENODEV : c_int = 19;
pub const ENOTDIR : c_int = 20;
pub const EISDIR : c_int = 21;
pub const EINVAL : c_int = 22;
pub const ENFILE : c_int = 23;
pub const EMFILE : c_int = 24;
pub const ENOTTY : c_int = 25;
pub const ETXTBSY : c_int = 26;
pub const EFBIG : c_int = 27;
pub const ENOSPC : c_int = 28;
pub const ESPIPE : c_int = 29;
pub const EROFS : c_int = 30;
pub const EMLINK : c_int = 31;
pub const EPIPE : c_int = 32;
pub const EDOM : c_int = 33;
pub const ERANGE : c_int = 34;
pub const EAGAIN : c_int = 35;
pub const EWOULDBLOCK : c_int = EAGAIN;
pub const EINPROGRESS : c_int = 36;
pub const EALREADY : c_int = 37;
pub const ENOTSOCK : c_int = 38;
pub const EDESTADDRREQ : c_int = 39;
pub const EMSGSIZE : c_int = 40;
pub const EPROTOTYPE : c_int = 41;
pub const ENOPROTOOPT : c_int = 42;
pub const EPROTONOSUPPORT : c_int = 43;
pub const ESOCKTNOSUPPORT : c_int = 44;
pub const ENOTSUP : c_int = 45;
pub const EPFNOSUPPORT : c_int = 46;
pub const EAFNOSUPPORT : c_int = 47;
pub const EADDRINUSE : c_int = 48;
pub const EADDRNOTAVAIL : c_int = 49;
pub const ENETDOWN : c_int = 50;
pub const ENETUNREACH : c_int = 51;
pub const ENETRESET : c_int = 52;
pub const ECONNABORTED : c_int = 53;
pub const ECONNRESET : c_int = 54;
pub const ENOBUFS : c_int = 55;
pub const EISCONN : c_int = 56;
pub const ENOTCONN : c_int = 57;
pub const ESHUTDOWN : c_int = 58;
pub const ETOOMANYREFS : c_int = 59;
pub const ETIMEDOUT : c_int = 60;
pub const ECONNREFUSED : c_int = 61;
pub const ELOOP : c_int = 62;
pub const ENAMETOOLONG : c_int = 63;
pub const EHOSTDOWN : c_int = 64;
pub const EHOSTUNREACH : c_int = 65;
pub const ENOTEMPTY : c_int = 66;
pub const EPROCLIM : c_int = 67;
pub const EUSERS : c_int = 68;
pub const EDQUOT : c_int = 69;
pub const ESTALE : c_int = 70;
pub const EREMOTE : c_int = 71;
pub const EBADRPC : c_int = 72;
pub const ERPCMISMATCH : c_int = 73;
pub const EPROGUNAVAIL : c_int = 74;
pub const EPROGMISMATCH : c_int = 75;
pub const EPROCUNAVAIL : c_int = 76;
pub const ENOLCK : c_int = 77;
pub const ENOSYS : c_int = 78;
pub const EFTYPE : c_int = 79;
pub const EAUTH : c_int = 80;
pub const ENEEDAUTH : c_int = 81;
pub const EPWROFF : c_int = 82;
pub const EDEVERR : c_int = 83;
pub const EOVERFLOW : c_int = 84;
pub const EBADEXEC : c_int = 85;
pub const EBADARCH : c_int = 86;
pub const ESHLIBVERS : c_int = 87;
pub const EBADMACHO : c_int = 88;
pub const ECANCELED : c_int = 89;
pub const EIDRM : c_int = 90;
pub const ENOMSG : c_int = 91;
pub const EILSEQ : c_int = 92;
pub const ENOATTR : c_int = 93;
pub const EBADMSG : c_int = 94;
pub const EMULTIHOP : c_int = 95;
pub const ENODATA : c_int = 96;
pub const ENOLINK : c_int = 97;
pub const ENOSR : c_int = 98;
pub const ENOSTR : c_int = 99;
pub const EPROTO : c_int = 100;
pub const ETIME : c_int = 101;
pub const EOPNOTSUPP : c_int = 102;
pub const ENOPOLICY : c_int = 103;
pub const ENOTRECOVERABLE : c_int = 104;
pub const EOWNERDEAD : c_int = 105;
pub const EQFULL : c_int = 106;
pub const ELAST : c_int = 106;
}
pub mod posix01 {
use types::os::arch::c95::{c_int, size_t};
pub const F_DUPFD : c_int = 0;
pub const F_GETFD : c_int = 1;
pub const F_SETFD : c_int = 2;
pub const F_GETFL : c_int = 3;
pub const F_SETFL : c_int = 4;
pub const SIGTRAP : c_int = 5;
pub const SIGPIPE: c_int = 13;
pub const SIG_IGN: size_t = 1;
pub const GLOB_APPEND : c_int = 0x0001;
pub const GLOB_DOOFFS : c_int = 0x0002;
pub const GLOB_ERR : c_int = 0x0004;
pub const GLOB_MARK : c_int = 0x0008;
pub const GLOB_NOCHECK : c_int = 0x0010;
pub const GLOB_NOSORT : c_int = 0x0020;
pub const GLOB_NOESCAPE : c_int = 0x2000;
pub const GLOB_NOSPACE : c_int = -1;
pub const GLOB_ABORTED : c_int = -2;
pub const GLOB_NOMATCH : c_int = -3;
pub const POSIX_MADV_NORMAL : c_int = 0;
pub const POSIX_MADV_RANDOM : c_int = 1;
pub const POSIX_MADV_SEQUENTIAL : c_int = 2;
pub const POSIX_MADV_WILLNEED : c_int = 3;
pub const POSIX_MADV_DONTNEED : c_int = 4;
pub const _SC_IOV_MAX : c_int = 56;
pub const _SC_GETGR_R_SIZE_MAX : c_int = 70;
pub const _SC_GETPW_R_SIZE_MAX : c_int = 71;
pub const _SC_LOGIN_NAME_MAX : c_int = 73;
pub const _SC_MQ_PRIO_MAX : c_int = 75;
pub const _SC_THREAD_ATTR_STACKADDR : c_int = 82;
pub const _SC_THREAD_ATTR_STACKSIZE : c_int = 83;
pub const _SC_THREAD_DESTRUCTOR_ITERATIONS : c_int = 85;
pub const _SC_THREAD_KEYS_MAX : c_int = 86;
pub const _SC_THREAD_PRIO_INHERIT : c_int = 87;
pub const _SC_THREAD_PRIO_PROTECT : c_int = 88;
pub const _SC_THREAD_PRIORITY_SCHEDULING : c_int = 89;
pub const _SC_THREAD_PROCESS_SHARED : c_int = 90;
pub const _SC_THREAD_SAFE_FUNCTIONS : c_int = 91;
pub const _SC_THREAD_STACK_MIN : c_int = 93;
pub const _SC_THREAD_THREADS_MAX : c_int = 94;
pub const _SC_THREADS : c_int = 96;
pub const _SC_TTY_NAME_MAX : c_int = 101;
pub const _SC_ATEXIT_MAX : c_int = 107;
pub const _SC_XOPEN_CRYPT : c_int = 108;
pub const _SC_XOPEN_ENH_I18N : c_int = 109;
pub const _SC_XOPEN_LEGACY : c_int = 110;
pub const _SC_XOPEN_REALTIME : c_int = 111;
pub const _SC_XOPEN_REALTIME_THREADS : c_int = 112;
pub const _SC_XOPEN_SHM : c_int = 113;
pub const _SC_XOPEN_UNIX : c_int = 115;
pub const _SC_XOPEN_VERSION : c_int = 116;
pub const _SC_XOPEN_XCU_VERSION : c_int = 121;
pub const PTHREAD_CREATE_JOINABLE: c_int = 1;
pub const PTHREAD_CREATE_DETACHED: c_int = 2;
pub const PTHREAD_STACK_MIN: size_t = 8192;
}
pub mod posix08 {
}
pub mod bsd44 {
use types::os::arch::c95::c_int;
pub const MADV_NORMAL : c_int = 0;
pub const MADV_RANDOM : c_int = 1;
pub const MADV_SEQUENTIAL : c_int = 2;
pub const MADV_WILLNEED : c_int = 3;
pub const MADV_DONTNEED : c_int = 4;
pub const MADV_FREE : c_int = 5;
pub const MADV_ZERO_WIRED_PAGES : c_int = 6;
pub const MADV_FREE_REUSABLE : c_int = 7;
pub const MADV_FREE_REUSE : c_int = 8;
pub const MADV_CAN_REUSE : c_int = 9;
pub const MINCORE_INCORE : c_int = 0x1;
pub const MINCORE_REFERENCED : c_int = 0x2;
pub const MINCORE_MODIFIED : c_int = 0x4;
pub const MINCORE_REFERENCED_OTHER : c_int = 0x8;
pub const MINCORE_MODIFIED_OTHER : c_int = 0x10;
pub const AF_UNIX: c_int = 1;
pub const AF_INET: c_int = 2;
pub const AF_INET6: c_int = 30;
pub const SOCK_STREAM: c_int = 1;
pub const SOCK_DGRAM: c_int = 2;
pub const SOCK_RAW: c_int = 3;
pub const IPPROTO_TCP: c_int = 6;
pub const IPPROTO_IP: c_int = 0;
pub const IPPROTO_IPV6: c_int = 41;
pub const IP_MULTICAST_TTL: c_int = 10;
pub const IP_MULTICAST_LOOP: c_int = 11;
pub const IP_TTL: c_int = 4;
pub const IP_HDRINCL: c_int = 2;
pub const IP_ADD_MEMBERSHIP: c_int = 12;
pub const IP_DROP_MEMBERSHIP: c_int = 13;
pub const IPV6_ADD_MEMBERSHIP: c_int = 12;
pub const IPV6_DROP_MEMBERSHIP: c_int = 13;
pub const TCP_NODELAY: c_int = 0x01;
pub const TCP_KEEPALIVE: c_int = 0x10;
pub const SOL_SOCKET: c_int = 0xffff;
pub const SO_KEEPALIVE: c_int = 0x0008;
pub const SO_BROADCAST: c_int = 0x0020;
pub const SO_REUSEADDR: c_int = 0x0004;
pub const SO_ERROR: c_int = 0x1007;
pub const IFF_LOOPBACK: c_int = 0x8;
pub const SHUT_RD: c_int = 0;
pub const SHUT_WR: c_int = 1;
pub const SHUT_RDWR: c_int = 2;
}
pub mod extra {
use types::os::arch::c95::c_int;
pub const O_DSYNC : c_int = 4194304;
pub const O_SYNC : c_int = 128;
pub const O_NONBLOCK : c_int = 4;
pub const F_FULLFSYNC : c_int = 51;
pub const MAP_COPY : c_int = 0x0002;
pub const MAP_RENAME : c_int = 0x0020;
pub const MAP_NORESERVE : c_int = 0x0040;
pub const MAP_NOEXTEND : c_int = 0x0100;
pub const MAP_HASSEMAPHORE : c_int = 0x0200;
pub const MAP_NOCACHE : c_int = 0x0400;
pub const MAP_JIT : c_int = 0x0800;
pub const MAP_STACK : c_int = 0;
pub const IPPROTO_RAW : c_int = 255;
}
pub mod sysconf {
use types::os::arch::c95::c_int;
pub const _SC_ARG_MAX : c_int = 1;
pub const _SC_CHILD_MAX : c_int = 2;
pub const _SC_CLK_TCK : c_int = 3;
pub const _SC_NGROUPS_MAX : c_int = 4;
pub const _SC_OPEN_MAX : c_int = 5;
pub const _SC_JOB_CONTROL : c_int = 6;
pub const _SC_SAVED_IDS : c_int = 7;
pub const _SC_VERSION : c_int = 8;
pub const _SC_BC_BASE_MAX : c_int = 9;
pub const _SC_BC_DIM_MAX : c_int = 10;
pub const _SC_BC_SCALE_MAX : c_int = 11;
pub const _SC_BC_STRING_MAX : c_int = 12;
pub const _SC_COLL_WEIGHTS_MAX : c_int = 13;
pub const _SC_EXPR_NEST_MAX : c_int = 14;
pub const _SC_LINE_MAX : c_int = 15;
pub const _SC_RE_DUP_MAX : c_int = 16;
pub const _SC_2_VERSION : c_int = 17;
pub const _SC_2_C_BIND : c_int = 18;
pub const _SC_2_C_DEV : c_int = 19;
pub const _SC_2_CHAR_TERM : c_int = 20;
pub const _SC_2_FORT_DEV : c_int = 21;
pub const _SC_2_FORT_RUN : c_int = 22;
pub const _SC_2_LOCALEDEF : c_int = 23;
pub const _SC_2_SW_DEV : c_int = 24;
pub const _SC_2_UPE : c_int = 25;
pub const _SC_STREAM_MAX : c_int = 26;
pub const _SC_TZNAME_MAX : c_int = 27;
pub const _SC_ASYNCHRONOUS_IO : c_int = 28;
pub const _SC_PAGESIZE : c_int = 29;
pub const _SC_MEMLOCK : c_int = 30;
pub const _SC_MEMLOCK_RANGE : c_int = 31;
pub const _SC_MEMORY_PROTECTION : c_int = 32;
pub const _SC_MESSAGE_PASSING : c_int = 33;
pub const _SC_PRIORITIZED_IO : c_int = 34;
pub const _SC_PRIORITY_SCHEDULING : c_int = 35;
pub const _SC_REALTIME_SIGNALS : c_int = 36;
pub const _SC_SEMAPHORES : c_int = 37;
pub const _SC_FSYNC : c_int = 38;
pub const _SC_SHARED_MEMORY_OBJECTS : c_int = 39;
pub const _SC_SYNCHRONIZED_IO : c_int = 40;
pub const _SC_TIMERS : c_int = 41;
pub const _SC_AIO_LISTIO_MAX : c_int = 42;
pub const _SC_AIO_MAX : c_int = 43;
pub const _SC_AIO_PRIO_DELTA_MAX : c_int = 44;
pub const _SC_DELAYTIMER_MAX : c_int = 45;
pub const _SC_MQ_OPEN_MAX : c_int = 46;
pub const _SC_MAPPED_FILES : c_int = 47;
pub const _SC_RTSIG_MAX : c_int = 48;
pub const _SC_SEM_NSEMS_MAX : c_int = 49;
pub const _SC_SEM_VALUE_MAX : c_int = 50;
pub const _SC_SIGQUEUE_MAX : c_int = 51;
pub const _SC_TIMER_MAX : c_int = 52;
pub const _SC_XBS5_ILP32_OFF32 : c_int = 122;
pub const _SC_XBS5_ILP32_OFFBIG : c_int = 123;
pub const _SC_XBS5_LP64_OFF64 : c_int = 124;
pub const _SC_XBS5_LPBIG_OFFBIG : c_int = 125;
}
}
}
pub mod funcs {
// Thankfully most of c95 is universally available and does not vary by OS
// or anything. The same is not true of POSIX.
pub mod c95 {
pub mod ctype {
use types::os::arch::c95::{c_char, c_int};
extern {
pub fn isalnum(c: c_int) -> c_int;
pub fn isalpha(c: c_int) -> c_int;
pub fn iscntrl(c: c_int) -> c_int;
pub fn isdigit(c: c_int) -> c_int;
pub fn isgraph(c: c_int) -> c_int;
pub fn islower(c: c_int) -> c_int;
pub fn isprint(c: c_int) -> c_int;
pub fn ispunct(c: c_int) -> c_int;
pub fn isspace(c: c_int) -> c_int;
pub fn isupper(c: c_int) -> c_int;
pub fn isxdigit(c: c_int) -> c_int;
pub fn tolower(c: c_char) -> c_char;
pub fn toupper(c: c_char) -> c_char;
}
}
pub mod stdio {
use types::common::c95::{FILE, c_void, fpos_t};
use types::os::arch::c95::{c_char, c_int, c_long, size_t};
extern {
pub fn fopen(filename: *const c_char,
mode: *const c_char) -> *mut FILE;
pub fn freopen(filename: *const c_char, mode: *const c_char,
file: *mut FILE)
-> *mut FILE;
pub fn fflush(file: *mut FILE) -> c_int;
pub fn fclose(file: *mut FILE) -> c_int;
pub fn remove(filename: *const c_char) -> c_int;
pub fn rename(oldname: *const c_char,
newname: *const c_char) -> c_int;
pub fn tmpfile() -> *mut FILE;
pub fn setvbuf(stream: *mut FILE,
buffer: *mut c_char,
mode: c_int,
size: size_t)
-> c_int;
pub fn setbuf(stream: *mut FILE, buf: *mut c_char);
// Omitted: printf and scanf variants.
pub fn fgetc(stream: *mut FILE) -> c_int;
pub fn fgets(buf: *mut c_char, n: c_int, stream: *mut FILE)
-> *mut c_char;
pub fn fputc(c: c_int, stream: *mut FILE) -> c_int;
pub fn fputs(s: *const c_char, stream: *mut FILE)-> c_int;
// Omitted: getc, getchar (might be macros).
// Omitted: gets, so ridiculously unsafe that it should not
// survive.
// Omitted: putc, putchar (might be macros).
pub fn puts(s: *const c_char) -> c_int;
pub fn ungetc(c: c_int, stream: *mut FILE) -> c_int;
pub fn fread(ptr: *mut c_void,
size: size_t,
nobj: size_t,
stream: *mut FILE)
-> size_t;
pub fn fwrite(ptr: *const c_void,
size: size_t,
nobj: size_t,
stream: *mut FILE)
-> size_t;
pub fn fseek(stream: *mut FILE, offset: c_long, whence: c_int)
-> c_int;
pub fn ftell(stream: *mut FILE) -> c_long;
pub fn rewind(stream: *mut FILE);
pub fn fgetpos(stream: *mut FILE, ptr: *mut fpos_t) -> c_int;
pub fn fsetpos(stream: *mut FILE, ptr: *mut fpos_t) -> c_int;
pub fn feof(stream: *mut FILE) -> c_int;
pub fn ferror(stream: *mut FILE) -> c_int;
pub fn perror(s: *const c_char);
}
}
pub mod stdlib {
use types::common::c95::c_void;
use types::os::arch::c95::{c_char, c_double, c_int};
use types::os::arch::c95::{c_long, c_uint, c_ulong};
use types::os::arch::c95::{size_t};
extern {
pub fn abs(i: c_int) -> c_int;
pub fn labs(i: c_long) -> c_long;
// Omitted: div, ldiv (return pub type incomplete).
pub fn atof(s: *const c_char) -> c_double;
pub fn atoi(s: *const c_char) -> c_int;
pub fn strtod(s: *const c_char,
endp: *mut *mut c_char) -> c_double;
pub fn strtol(s: *const c_char,
endp: *mut *mut c_char, base: c_int) -> c_long;
pub fn strtoul(s: *const c_char, endp: *mut *mut c_char,
base: c_int) -> c_ulong;
pub fn calloc(nobj: size_t, size: size_t) -> *mut c_void;
pub fn malloc(size: size_t) -> *mut c_void;
pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void;
pub fn free(p: *mut c_void);
pub fn exit(status: c_int) -> !;
pub fn _exit(status: c_int) -> !;
// Omitted: atexit.
pub fn system(s: *const c_char) -> c_int;
pub fn getenv(s: *const c_char) -> *mut c_char;
// Omitted: bsearch, qsort
pub fn rand() -> c_int;
pub fn srand(seed: c_uint);
}
}
pub mod string {
use types::common::c95::c_void;
use types::os::arch::c95::{c_char, c_int, size_t};
use types::os::arch::c95::{wchar_t};
extern {
pub fn strcpy(dst: *mut c_char,
src: *const c_char) -> *mut c_char;
pub fn strncpy(dst: *mut c_char, src: *const c_char, n: size_t)
-> *mut c_char;
pub fn strcat(s: *mut c_char, ct: *const c_char) -> *mut c_char;
pub fn strncat(s: *mut c_char, ct: *const c_char,
n: size_t) -> *mut c_char;
pub fn strcmp(cs: *const c_char, ct: *const c_char) -> c_int;
pub fn strncmp(cs: *const c_char, ct: *const c_char,
n: size_t) -> c_int;
pub fn strcoll(cs: *const c_char, ct: *const c_char) -> c_int;
pub fn strchr(cs: *const c_char, c: c_int) -> *mut c_char;
pub fn strrchr(cs: *const c_char, c: c_int) -> *mut c_char;
pub fn strspn(cs: *const c_char, ct: *const c_char) -> size_t;
pub fn strcspn(cs: *const c_char, ct: *const c_char) -> size_t;
pub fn strpbrk(cs: *const c_char,
ct: *const c_char) -> *mut c_char;
pub fn strstr(cs: *const c_char,
ct: *const c_char) -> *mut c_char;
pub fn strlen(cs: *const c_char) -> size_t;
pub fn strerror(n: c_int) -> *mut c_char;
pub fn strtok(s: *mut c_char, t: *const c_char) -> *mut c_char;
pub fn strxfrm(s: *mut c_char, ct: *const c_char,
n: size_t) -> size_t;
pub fn wcslen(buf: *const wchar_t) -> size_t;
// Omitted: memcpy, memmove, memset (provided by LLVM)
// These are fine to execute on the Rust stack. They must be,
// in fact, because LLVM generates calls to them!
pub fn memcmp(cx: *const c_void, ct: *const c_void,
n: size_t) -> c_int;
pub fn memchr(cx: *const c_void, c: c_int,
n: size_t) -> *mut c_void;
}
}
}
// Microsoft helpfully underscore-qualifies all of its POSIX-like symbols
// to make sure you don't use them accidentally. It also randomly deviates
// from the exact signatures you might otherwise expect, and omits much,
// so be careful when trying to write portable code; it won't always work
// with the same POSIX functions and types as other platforms.
#[cfg(target_os = "windows")]
pub mod posix88 {
pub mod stat_ {
use types::os::common::posix01::{stat, utimbuf};
use types::os::arch::c95::{c_int, c_char, wchar_t};
extern {
#[link_name = "_chmod"]
pub fn chmod(path: *const c_char, mode: c_int) -> c_int;
#[link_name = "_wchmod"]
pub fn wchmod(path: *const wchar_t, mode: c_int) -> c_int;
#[link_name = "_mkdir"]
pub fn mkdir(path: *const c_char) -> c_int;
#[link_name = "_wrmdir"]
pub fn wrmdir(path: *const wchar_t) -> c_int;
#[link_name = "_fstat64"]
pub fn fstat(fildes: c_int, buf: *mut stat) -> c_int;
#[link_name = "_stat64"]
pub fn stat(path: *const c_char, buf: *mut stat) -> c_int;
#[link_name = "_wstat64"]
pub fn wstat(path: *const wchar_t, buf: *mut stat) -> c_int;
#[link_name = "_wutime64"]
pub fn wutime(file: *const wchar_t, buf: *mut utimbuf) -> c_int;
}
}
pub mod stdio {
use types::common::c95::FILE;
use types::os::arch::c95::{c_int, c_char};
extern {
#[link_name = "_popen"]
pub fn popen(command: *const c_char,
mode: *const c_char) -> *mut FILE;
#[link_name = "_pclose"]
pub fn pclose(stream: *mut FILE) -> c_int;
#[link_name = "_fdopen"]
pub fn fdopen(fd: c_int, mode: *const c_char) -> *mut FILE;
#[link_name = "_fileno"]
pub fn fileno(stream: *mut FILE) -> c_int;
}
}
pub mod fcntl {
use types::os::arch::c95::{c_int, c_char, wchar_t};
extern {
#[link_name = "_open"]
pub fn open(path: *const c_char, oflag: c_int, mode: c_int)
-> c_int;
#[link_name = "_wopen"]
pub fn wopen(path: *const wchar_t, oflag: c_int, mode: c_int)
-> c_int;
#[link_name = "_creat"]
pub fn creat(path: *const c_char, mode: c_int) -> c_int;
}
}
pub mod dirent {
// Not supplied at all.
}
pub mod unistd {
use types::common::c95::c_void;
use types::os::arch::c95::{c_int, c_uint, c_char,
c_long, size_t};
use types::os::arch::c99::intptr_t;
extern {
#[link_name = "_access"]
pub fn access(path: *const c_char, amode: c_int) -> c_int;
#[link_name = "_chdir"]
pub fn chdir(dir: *const c_char) -> c_int;
#[link_name = "_close"]
pub fn close(fd: c_int) -> c_int;
#[link_name = "_dup"]
pub fn dup(fd: c_int) -> c_int;
#[link_name = "_dup2"]
pub fn dup2(src: c_int, dst: c_int) -> c_int;
#[link_name = "_execv"]
pub fn execv(prog: *const c_char,
argv: *mut *const c_char) -> intptr_t;
#[link_name = "_execve"]
pub fn execve(prog: *const c_char, argv: *mut *const c_char,
envp: *mut *const c_char)
-> c_int;
#[link_name = "_execvp"]
pub fn execvp(c: *const c_char,
argv: *mut *const c_char) -> c_int;
#[link_name = "_execvpe"]
pub fn execvpe(c: *const c_char, argv: *mut *const c_char,
envp: *mut *const c_char) -> c_int;
#[link_name = "_getcwd"]
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
#[link_name = "_getpid"]
pub fn getpid() -> c_int;
#[link_name = "_isatty"]
pub fn isatty(fd: c_int) -> c_int;
#[link_name = "_lseek"]
pub fn lseek(fd: c_int, offset: c_long, origin: c_int)
-> c_long;
#[link_name = "_pipe"]
pub fn pipe(fds: *mut c_int, psize: c_uint, textmode: c_int)
-> c_int;
#[link_name = "_read"]
pub fn read(fd: c_int, buf: *mut c_void, count: c_uint)
-> c_int;
#[link_name = "_rmdir"]
pub fn rmdir(path: *const c_char) -> c_int;
#[link_name = "_unlink"]
pub fn unlink(c: *const c_char) -> c_int;
#[link_name = "_write"]
pub fn write(fd: c_int, buf: *const c_void,
count: c_uint) -> c_int;
}
}
pub mod mman {
}
}
#[cfg(any(target_os = "linux",
target_os = "android",
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly"))]
pub mod posix88 {
pub mod stat_ {
use types::os::arch::c95::{c_char, c_int};
use types::os::arch::posix01::stat;
use types::os::arch::posix88::mode_t;
extern {
pub fn chmod(path: *const c_char, mode: mode_t) -> c_int;
pub fn fchmod(fd: c_int, mode: mode_t) -> c_int;
#[cfg(any(target_os = "linux",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "android",
target_os = "ios"))]
pub fn fstat(fildes: c_int, buf: *mut stat) -> c_int;
#[cfg(target_os = "macos")]
#[link_name = "fstat64"]
pub fn fstat(fildes: c_int, buf: *mut stat) -> c_int;
pub fn mkdir(path: *const c_char, mode: mode_t) -> c_int;
pub fn mkfifo(path: *const c_char, mode: mode_t) -> c_int;
#[cfg(any(target_os = "linux",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "android",
target_os = "ios"))]
pub fn stat(path: *const c_char, buf: *mut stat) -> c_int;
#[cfg(target_os = "macos")]
#[link_name = "stat64"]
pub fn stat(path: *const c_char, buf: *mut stat) -> c_int;
}
}
pub mod stdio {
use types::common::c95::FILE;
use types::os::arch::c95::{c_char, c_int};
extern {
pub fn popen(command: *const c_char,
mode: *const c_char) -> *mut FILE;
pub fn pclose(stream: *mut FILE) -> c_int;
pub fn fdopen(fd: c_int, mode: *const c_char) -> *mut FILE;
pub fn fileno(stream: *mut FILE) -> c_int;
}
}
pub mod fcntl {
use types::os::arch::c95::{c_char, c_int};
use types::os::arch::posix88::mode_t;
extern {
pub fn open(path: *const c_char, oflag: c_int, mode: mode_t)
-> c_int;
pub fn creat(path: *const c_char, mode: mode_t) -> c_int;
pub fn fcntl(fd: c_int, cmd: c_int, ...) -> c_int;
}
}
pub mod dirent {
use types::common::posix88::{DIR, dirent_t};
use types::os::arch::c95::{c_char, c_int, c_long};
// NB: On OS X opendir and readdir have two versions,
// one for 32-bit kernelspace and one for 64.
// We should be linking to the 64-bit ones, called
// opendir$INODE64, etc. but for some reason rustc
// doesn't link it correctly on i686, so we're going
// through a C function that mysteriously does work.
extern {
#[link_name="rust_opendir"]
pub fn opendir(dirname: *const c_char) -> *mut DIR;
#[link_name="rust_readdir_r"]
pub fn readdir_r(dirp: *mut DIR, entry: *mut dirent_t,
result: *mut *mut dirent_t) -> c_int;
}
extern {
pub fn closedir(dirp: *mut DIR) -> c_int;
pub fn rewinddir(dirp: *mut DIR);
pub fn seekdir(dirp: *mut DIR, loc: c_long);
pub fn telldir(dirp: *mut DIR) -> c_long;
}
}
pub mod unistd {
use types::common::c95::c_void;
use types::os::arch::c95::{c_char, c_int, c_long, c_uint};
use types::os::arch::c95::{size_t};
use types::os::common::posix01::timespec;
use types::os::arch::posix01::utimbuf;
use types::os::arch::posix88::{gid_t, off_t, pid_t};
use types::os::arch::posix88::{ssize_t, uid_t};
pub const _PC_NAME_MAX: c_int = 4;
extern {
pub fn access(path: *const c_char, amode: c_int) -> c_int;
pub fn alarm(seconds: c_uint) -> c_uint;
pub fn chdir(dir: *const c_char) -> c_int;
pub fn chown(path: *const c_char, uid: uid_t,
gid: gid_t) -> c_int;
pub fn close(fd: c_int) -> c_int;
pub fn dup(fd: c_int) -> c_int;
pub fn dup2(src: c_int, dst: c_int) -> c_int;
pub fn execv(prog: *const c_char,
argv: *mut *const c_char) -> c_int;
pub fn execve(prog: *const c_char, argv: *mut *const c_char,
envp: *mut *const c_char)
-> c_int;
pub fn execvp(c: *const c_char,
argv: *mut *const c_char) -> c_int;
pub fn fork() -> pid_t;
pub fn fpathconf(filedes: c_int, name: c_int) -> c_long;
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
pub fn getegid() -> gid_t;
pub fn geteuid() -> uid_t;
pub fn getgid() -> gid_t ;
pub fn getgroups(ngroups_max: c_int, groups: *mut gid_t)
-> c_int;
pub fn getlogin() -> *mut c_char;
pub fn getopt(argc: c_int, argv: *mut *const c_char,
optstr: *const c_char) -> c_int;
pub fn getpgrp() -> pid_t;
pub fn getpid() -> pid_t;
pub fn getppid() -> pid_t;
pub fn getuid() -> uid_t;
pub fn getsid(pid: pid_t) -> pid_t;
pub fn isatty(fd: c_int) -> c_int;
pub fn link(src: *const c_char, dst: *const c_char) -> c_int;
pub fn lseek(fd: c_int, offset: off_t, whence: c_int)
-> off_t;
pub fn pathconf(path: *mut c_char, name: c_int) -> c_long;
pub fn pause() -> c_int;
pub fn pipe(fds: *mut c_int) -> c_int;
pub fn read(fd: c_int, buf: *mut c_void, count: size_t)
-> ssize_t;
pub fn rmdir(path: *const c_char) -> c_int;
pub fn setgid(gid: gid_t) -> c_int;
pub fn setpgid(pid: pid_t, pgid: pid_t) -> c_int;
pub fn setsid() -> pid_t;
pub fn setuid(uid: uid_t) -> c_int;
pub fn sleep(secs: c_uint) -> c_uint;
pub fn usleep(secs: c_uint) -> c_int;
pub fn nanosleep(rqtp: *const timespec,
rmtp: *mut timespec) -> c_int;
pub fn sysconf(name: c_int) -> c_long;
pub fn tcgetpgrp(fd: c_int) -> pid_t;
pub fn ttyname(fd: c_int) -> *mut c_char;
pub fn unlink(c: *const c_char) -> c_int;
pub fn write(fd: c_int, buf: *const c_void, count: size_t)
-> ssize_t;
pub fn pread(fd: c_int, buf: *mut c_void, count: size_t,
offset: off_t) -> ssize_t;
pub fn pwrite(fd: c_int, buf: *const c_void, count: size_t,
offset: off_t) -> ssize_t;
pub fn utime(file: *const c_char, buf: *const utimbuf) -> c_int;
}
}
pub mod signal {
use types::os::arch::c95::{c_int};
use types::os::arch::posix88::{pid_t};
extern {
pub fn kill(pid: pid_t, sig: c_int) -> c_int;
}
}
pub mod mman {
use types::common::c95::{c_void};
use types::os::arch::c95::{size_t, c_int, c_char};
use types::os::arch::posix88::{mode_t, off_t};
extern {
pub fn mlock(addr: *const c_void, len: size_t) -> c_int;
pub fn munlock(addr: *const c_void, len: size_t) -> c_int;
pub fn mlockall(flags: c_int) -> c_int;
pub fn munlockall() -> c_int;
pub fn mmap(addr: *mut c_void,
len: size_t,
prot: c_int,
flags: c_int,
fd: c_int,
offset: off_t)
-> *mut c_void;
pub fn munmap(addr: *mut c_void, len: size_t) -> c_int;
pub fn mprotect(addr: *mut c_void, len: size_t, prot: c_int)
-> c_int;
pub fn msync(addr: *mut c_void, len: size_t, flags: c_int)
-> c_int;
pub fn shm_open(name: *const c_char, oflag: c_int, mode: mode_t)
-> c_int;
pub fn shm_unlink(name: *const c_char) -> c_int;
}
}
pub mod net {
use types::os::arch::c95::{c_char, c_uint};
extern {
pub fn if_nametoindex(ifname: *const c_char) -> c_uint;
}
}
}
#[cfg(any(target_os = "linux",
target_os = "android",
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly"))]
pub mod posix01 {
pub mod stat_ {
use types::os::arch::c95::{c_char, c_int};
use types::os::arch::posix01::stat;
extern {
#[cfg(any(target_os = "linux",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "android",
target_os = "ios"))]
pub fn lstat(path: *const c_char, buf: *mut stat) -> c_int;
#[cfg(target_os = "macos")]
#[link_name = "lstat64"]
pub fn lstat(path: *const c_char, buf: *mut stat) -> c_int;
}
}
pub mod unistd {
use types::os::arch::c95::{c_char, c_int, size_t};
use types::os::arch::posix88::{ssize_t, off_t};
extern {
pub fn readlink(path: *const c_char,
buf: *mut c_char,
bufsz: size_t)
-> ssize_t;
pub fn fsync(fd: c_int) -> c_int;
#[cfg(any(target_os = "linux", target_os = "android"))]
pub fn fdatasync(fd: c_int) -> c_int;
pub fn setenv(name: *const c_char, val: *const c_char,
overwrite: c_int) -> c_int;
pub fn unsetenv(name: *const c_char) -> c_int;
pub fn putenv(string: *mut c_char) -> c_int;
pub fn symlink(path1: *const c_char,
path2: *const c_char) -> c_int;
pub fn ftruncate(fd: c_int, length: off_t) -> c_int;
}
}
pub mod signal {
use types::os::arch::c95::c_int;
use types::os::common::posix01::sighandler_t;
#[cfg(not(target_os = "android"))]
extern {
pub fn signal(signum: c_int,
handler: sighandler_t) -> sighandler_t;
}
#[cfg(target_os = "android")]
extern {
#[link_name = "bsd_signal"]
pub fn signal(signum: c_int,
handler: sighandler_t) -> sighandler_t;
}
}
pub mod glob {
use types::os::arch::c95::{c_char, c_int};
use types::os::common::posix01::{glob_t};
extern {
pub fn glob(pattern: *const c_char,
flags: c_int,
errfunc: ::Nullable<extern "C" fn(epath: *const c_char,
errno: c_int) -> c_int>,
pglob: *mut glob_t);
pub fn globfree(pglob: *mut glob_t);
}
}
pub mod mman {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_int, size_t};
extern {
pub fn posix_madvise(addr: *mut c_void,
len: size_t,
advice: c_int)
-> c_int;
}
}
}
#[cfg(target_os = "windows")]
pub mod posix01 {
pub mod stat_ {
}
pub mod unistd {
}
pub mod glob {
}
pub mod mman {
}
pub mod net {
}
}
#[cfg(any(target_os = "windows",
target_os = "linux",
target_os = "android",
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly"))]
pub mod posix08 {
pub mod unistd {
}
}
#[cfg(not(windows))]
pub mod bsd43 {
use types::common::c95::{c_void};
use types::os::common::bsd44::{socklen_t, sockaddr, ifaddrs};
use types::os::arch::c95::{c_int, size_t};
use types::os::arch::posix88::ssize_t;
extern "system" {
pub fn socket(domain: c_int, ty: c_int, protocol: c_int) -> c_int;
pub fn connect(socket: c_int, address: *const sockaddr,
len: socklen_t) -> c_int;
pub fn bind(socket: c_int, address: *const sockaddr,
address_len: socklen_t) -> c_int;
pub fn listen(socket: c_int, backlog: c_int) -> c_int;
pub fn accept(socket: c_int, address: *mut sockaddr,
address_len: *mut socklen_t) -> c_int;
pub fn getpeername(socket: c_int, address: *mut sockaddr,
address_len: *mut socklen_t) -> c_int;
pub fn getsockname(socket: c_int, address: *mut sockaddr,
address_len: *mut socklen_t) -> c_int;
pub fn setsockopt(socket: c_int, level: c_int, name: c_int,
value: *const c_void,
option_len: socklen_t) -> c_int;
pub fn recv(socket: c_int, buf: *mut c_void, len: size_t,
flags: c_int) -> ssize_t;
pub fn send(socket: c_int, buf: *const c_void, len: size_t,
flags: c_int) -> ssize_t;
pub fn recvfrom(socket: c_int, buf: *mut c_void, len: size_t,
flags: c_int, addr: *mut sockaddr,
addrlen: *mut socklen_t) -> ssize_t;
pub fn sendto(socket: c_int, buf: *const c_void, len: size_t,
flags: c_int, addr: *const sockaddr,
addrlen: socklen_t) -> ssize_t;
pub fn getifaddrs(ifap: *mut *mut ifaddrs) -> c_int;
pub fn freeifaddrs(ifa: *mut ifaddrs);
pub fn shutdown(socket: c_int, how: c_int) -> c_int;
}
}
#[cfg(windows)]
pub mod bsd43 {
use types::common::c95::{c_void};
use types::os::common::bsd44::{socklen_t, sockaddr, SOCKET};
use types::os::arch::c95::c_int;
use types::os::arch::posix88::ssize_t;
extern "system" {
pub fn socket(domain: c_int, ty: c_int, protocol: c_int) -> SOCKET;
pub fn connect(socket: SOCKET, address: *const sockaddr,
len: socklen_t) -> c_int;
pub fn bind(socket: SOCKET, address: *const sockaddr,
address_len: socklen_t) -> c_int;
pub fn listen(socket: SOCKET, backlog: c_int) -> c_int;
pub fn accept(socket: SOCKET, address: *mut sockaddr,
address_len: *mut socklen_t) -> SOCKET;
pub fn getpeername(socket: SOCKET, address: *mut sockaddr,
address_len: *mut socklen_t) -> c_int;
pub fn getsockname(socket: SOCKET, address: *mut sockaddr,
address_len: *mut socklen_t) -> c_int;
pub fn setsockopt(socket: SOCKET, level: c_int, name: c_int,
value: *const c_void,
option_len: socklen_t) -> c_int;
pub fn closesocket(socket: SOCKET) -> c_int;
pub fn recv(socket: SOCKET, buf: *mut c_void, len: c_int,
flags: c_int) -> c_int;
pub fn send(socket: SOCKET, buf: *const c_void, len: c_int,
flags: c_int) -> c_int;
pub fn recvfrom(socket: SOCKET, buf: *mut c_void, len: c_int,
flags: c_int, addr: *mut sockaddr,
addrlen: *mut c_int) -> ssize_t;
pub fn sendto(socket: SOCKET, buf: *const c_void, len: c_int,
flags: c_int, addr: *const sockaddr,
addrlen: c_int) -> c_int;
pub fn shutdown(socket: SOCKET, how: c_int) -> c_int;
}
}
#[cfg(any(target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly"))]
pub mod bsd44 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_char, c_uchar, c_int, c_uint, c_ulong, size_t};
extern {
pub fn ioctl(d: c_int, request: c_ulong, ...) -> c_int;
pub fn sysctl(name: *mut c_int,
namelen: c_uint,
oldp: *mut c_void,
oldlenp: *mut size_t,
newp: *mut c_void,
newlen: size_t)
-> c_int;
pub fn sysctlbyname(name: *const c_char,
oldp: *mut c_void,
oldlenp: *mut size_t,
newp: *mut c_void,
newlen: size_t)
-> c_int;
pub fn sysctlnametomib(name: *const c_char,
mibp: *mut c_int,
sizep: *mut size_t)
-> c_int;
pub fn getdtablesize() -> c_int;
pub fn madvise(addr: *mut c_void, len: size_t, advice: c_int)
-> c_int;
pub fn mincore(addr: *mut c_void, len: size_t, vec: *mut c_uchar)
-> c_int;
}
}
#[cfg(any(target_os = "linux", target_os = "android"))]
pub mod bsd44 {
use types::common::c95::{c_void};
use types::os::arch::c95::{c_uchar, c_int, size_t};
extern {
pub fn getdtablesize() -> c_int;
pub fn ioctl(d: c_int, request: c_int, ...) -> c_int;
pub fn madvise(addr: *mut c_void, len: size_t, advice: c_int)
-> c_int;
pub fn mincore(addr: *mut c_void, len: size_t, vec: *mut c_uchar)
-> c_int;
}
}
#[cfg(target_os = "windows")]
pub mod bsd44 {
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub mod extra {
use types::os::arch::c95::{c_char, c_int};
extern {
pub fn _NSGetExecutablePath(buf: *mut c_char, bufsize: *mut u32)
-> c_int;
}
}
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
pub mod extra {
}
#[cfg(any(target_os = "linux", target_os = "android"))]
pub mod extra {
}
#[cfg(target_os = "windows")]
pub mod extra {
pub mod kernel32 {
use types::os::arch::c95::{c_uint};
use types::os::arch::extra::{BOOL, DWORD, SIZE_T, HMODULE,
LPCWSTR, LPWSTR,
LPWCH, LPDWORD, LPVOID,
LPCVOID, LPOVERLAPPED,
LPSECURITY_ATTRIBUTES,
LPSTARTUPINFO,
LPPROCESS_INFORMATION,
LPMEMORY_BASIC_INFORMATION,
LPSYSTEM_INFO, HANDLE, LPHANDLE,
LARGE_INTEGER, PLARGE_INTEGER,
LPFILETIME, LPWIN32_FIND_DATAW};
extern "system" {
pub fn GetEnvironmentVariableW(n: LPCWSTR,
v: LPWSTR,
nsize: DWORD)
-> DWORD;
pub fn SetEnvironmentVariableW(n: LPCWSTR, v: LPCWSTR)
-> BOOL;
pub fn GetEnvironmentStringsW() -> LPWCH;
pub fn FreeEnvironmentStringsW(env_ptr: LPWCH) -> BOOL;
pub fn GetModuleFileNameW(hModule: HMODULE,
lpFilename: LPWSTR,
nSize: DWORD)
-> DWORD;
pub fn CreateDirectoryW(lpPathName: LPCWSTR,
lpSecurityAttributes:
LPSECURITY_ATTRIBUTES)
-> BOOL;
pub fn CopyFileW(lpExistingFileName: LPCWSTR,
lpNewFileName: LPCWSTR,
bFailIfExists: BOOL)
-> BOOL;
pub fn DeleteFileW(lpPathName: LPCWSTR) -> BOOL;
pub fn RemoveDirectoryW(lpPathName: LPCWSTR) -> BOOL;
pub fn GetCurrentDirectoryW(nBufferLength: DWORD,
lpBuffer: LPWSTR)
-> DWORD;
pub fn SetCurrentDirectoryW(lpPathName: LPCWSTR) -> BOOL;
pub fn GetLastError() -> DWORD;
pub fn FindFirstFileW(fileName: LPCWSTR, findFileData: LPWIN32_FIND_DATAW)
-> HANDLE;
pub fn FindNextFileW(findFile: HANDLE, findFileData: LPWIN32_FIND_DATAW)
-> BOOL;
pub fn FindClose(findFile: HANDLE) -> BOOL;
pub fn DuplicateHandle(hSourceProcessHandle: HANDLE,
hSourceHandle: HANDLE,
hTargetProcessHandle: HANDLE,
lpTargetHandle: LPHANDLE,
dwDesiredAccess: DWORD,
bInheritHandle: BOOL,
dwOptions: DWORD)
-> BOOL;
pub fn CloseHandle(hObject: HANDLE) -> BOOL;
pub fn OpenProcess(dwDesiredAccess: DWORD,
bInheritHandle: BOOL,
dwProcessId: DWORD)
-> HANDLE;
pub fn GetCurrentProcess() -> HANDLE;
pub fn CreateProcessW(lpApplicationName: LPCWSTR,
lpCommandLine: LPWSTR,
lpProcessAttributes:
LPSECURITY_ATTRIBUTES,
lpThreadAttributes:
LPSECURITY_ATTRIBUTES,
bInheritHandles: BOOL,
dwCreationFlags: DWORD,
lpEnvironment: LPVOID,
lpCurrentDirectory: LPCWSTR,
lpStartupInfo: LPSTARTUPINFO,
lpProcessInformation:
LPPROCESS_INFORMATION)
-> BOOL;
pub fn WaitForSingleObject(hHandle: HANDLE,
dwMilliseconds: DWORD)
-> DWORD;
pub fn TerminateProcess(hProcess: HANDLE, uExitCode: c_uint)
-> BOOL;
pub fn GetExitCodeProcess(hProcess: HANDLE,
lpExitCode: LPDWORD)
-> BOOL;
pub fn GetSystemInfo(lpSystemInfo: LPSYSTEM_INFO);
pub fn VirtualAlloc(lpAddress: LPVOID,
dwSize: SIZE_T,
flAllocationType: DWORD,
flProtect: DWORD)
-> LPVOID;
pub fn VirtualFree(lpAddress: LPVOID,
dwSize: SIZE_T,
dwFreeType: DWORD)
-> BOOL;
pub fn VirtualLock(lpAddress: LPVOID, dwSize: SIZE_T) -> BOOL;
pub fn VirtualUnlock(lpAddress: LPVOID, dwSize: SIZE_T)
-> BOOL;
pub fn VirtualProtect(lpAddress: LPVOID,
dwSize: SIZE_T,
flNewProtect: DWORD,
lpflOldProtect: LPDWORD)
-> BOOL;
pub fn VirtualQuery(lpAddress: LPCVOID,
lpBuffer: LPMEMORY_BASIC_INFORMATION,
dwLength: SIZE_T)
-> SIZE_T;
pub fn CreateFileMappingW(hFile: HANDLE,
lpAttributes: LPSECURITY_ATTRIBUTES,
flProtect: DWORD,
dwMaximumSizeHigh: DWORD,
dwMaximumSizeLow: DWORD,
lpName: LPCWSTR)
-> HANDLE;
pub fn MapViewOfFile(hFileMappingObject: HANDLE,
dwDesiredAccess: DWORD,
dwFileOffsetHigh: DWORD,
dwFileOffsetLow: DWORD,
dwNumberOfBytesToMap: SIZE_T)
-> LPVOID;
pub fn UnmapViewOfFile(lpBaseAddress: LPCVOID) -> BOOL;
pub fn MoveFileExW(lpExistingFileName: LPCWSTR,
lpNewFileName: LPCWSTR,
dwFlags: DWORD) -> BOOL;
pub fn CreateHardLinkW(lpSymlinkFileName: LPCWSTR,
lpTargetFileName: LPCWSTR,
lpSecurityAttributes: LPSECURITY_ATTRIBUTES)
-> BOOL;
pub fn FlushFileBuffers(hFile: HANDLE) -> BOOL;
pub fn CreateFileW(lpFileName: LPCWSTR,
dwDesiredAccess: DWORD,
dwShareMode: DWORD,
lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
dwCreationDisposition: DWORD,
dwFlagsAndAttributes: DWORD,
hTemplateFile: HANDLE) -> HANDLE;
pub fn ReadFile(hFile: HANDLE,
lpBuffer: LPVOID,
nNumberOfBytesToRead: DWORD,
lpNumberOfBytesRead: LPDWORD,
lpOverlapped: LPOVERLAPPED) -> BOOL;
pub fn WriteFile(hFile: HANDLE,
lpBuffer: LPVOID,
nNumberOfBytesToRead: DWORD,
lpNumberOfBytesRead: LPDWORD,
lpOverlapped: LPOVERLAPPED) -> BOOL;
pub fn SetFilePointerEx(hFile: HANDLE,
liDistanceToMove: LARGE_INTEGER,
lpNewFilePointer: PLARGE_INTEGER,
dwMoveMethod: DWORD) -> BOOL;
pub fn SetEndOfFile(hFile: HANDLE) -> BOOL;
pub fn GetSystemTimeAsFileTime(
lpSystemTimeAsFileTime: LPFILETIME);
pub fn QueryPerformanceFrequency(
lpFrequency: *mut LARGE_INTEGER) -> BOOL;
pub fn QueryPerformanceCounter(
lpPerformanceCount: *mut LARGE_INTEGER) -> BOOL;
pub fn GetCurrentProcessId() -> DWORD;
pub fn CreateNamedPipeW(
lpName: LPCWSTR,
dwOpenMode: DWORD,
dwPipeMode: DWORD,
nMaxInstances: DWORD,
nOutBufferSize: DWORD,
nInBufferSize: DWORD,
nDefaultTimeOut: DWORD,
lpSecurityAttributes: LPSECURITY_ATTRIBUTES
) -> HANDLE;
pub fn ConnectNamedPipe(hNamedPipe: HANDLE,
lpOverlapped: LPOVERLAPPED) -> BOOL;
pub fn WaitNamedPipeW(lpNamedPipeName: LPCWSTR,
nTimeOut: DWORD) -> BOOL;
pub fn SetNamedPipeHandleState(hNamedPipe: HANDLE,
lpMode: LPDWORD,
lpMaxCollectionCount: LPDWORD,
lpCollectDataTimeout: LPDWORD)
-> BOOL;
pub fn CreateEventW(lpEventAttributes: LPSECURITY_ATTRIBUTES,
bManualReset: BOOL,
bInitialState: BOOL,
lpName: LPCWSTR) -> HANDLE;
pub fn GetOverlappedResult(hFile: HANDLE,
lpOverlapped: LPOVERLAPPED,
lpNumberOfBytesTransferred: LPDWORD,
bWait: BOOL) -> BOOL;
pub fn DisconnectNamedPipe(hNamedPipe: HANDLE) -> BOOL;
}
}
pub mod msvcrt {
use types::os::arch::c95::{c_int, c_long};
use types::os::arch::c99::intptr_t;
extern {
#[link_name = "_commit"]
pub fn commit(fd: c_int) -> c_int;
#[link_name = "_get_osfhandle"]
pub fn get_osfhandle(fd: c_int) -> c_long;
#[link_name = "_open_osfhandle"]
pub fn open_osfhandle(osfhandle: intptr_t,
flags: c_int) -> c_int;
}
}
pub mod winsock {
use types::os::arch::c95::{c_int, c_long, c_ulong};
use types::os::common::bsd44::SOCKET;
extern "system" {
pub fn ioctlsocket(s: SOCKET, cmd: c_long, argp: *mut c_ulong) -> c_int;
}
}
}
}
#[doc(hidden)]
pub fn issue_14344_workaround() {} // FIXME #14344 force linkage to happen correctly
#[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows
#[doc(hidden)]
#[cfg(not(test))]
mod std {
pub use core::kinds;
}<|fim▁end|>
|
pub const EBADRQC: c_int = 54;
pub const EBADSLT: c_int = 55;
|
<|file_name|>OneWayLink.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import java.util.ArrayList;
import java.util.concurrent.Callable;
public class OneWayLink {
public OneWayLink(Location start, Location end) {
this(start, end, new ArrayList<Traversable>(), null);
}
public OneWayLink(Location start, Location end, ArrayList<Traversable> steps) {
this(start, end, steps, null);
}
public OneWayLink(Location start, Location end, ArrayList<Traversable> steps, Callable<Boolean> requirement) {
new Link(start, end, steps, requirement);
}
}<|fim▁end|>
|
package org.booleanfloat.traveler.links;
import org.booleanfloat.traveler.Location;
import org.booleanfloat.traveler.interfaces.Traversable;
|
<|file_name|>extendableevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::ExtendableEventBinding;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::str::DOMString;
use dom::event::Event;
use js::jsapi::{HandleValue, JSContext};
use string_cache::Atom;
// https://w3c.github.io/ServiceWorker/#extendable-event
#[dom_struct]
pub struct ExtendableEvent {
event: Event,
extensions_allowed: bool
}
impl ExtendableEvent {
pub fn new_inherited() -> ExtendableEvent {
ExtendableEvent {
event: Event::new_inherited(),
extensions_allowed: true
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: bool,
cancelable: bool)
-> Root<ExtendableEvent> {
let ev = reflect_dom_object(box ExtendableEvent::new_inherited(), global, ExtendableEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &ExtendableEventBinding::ExtendableEventInit) -> Fallible<Root<ExtendableEvent>> {
Ok(ExtendableEvent::new(global,
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable))
}
// https://w3c.github.io/ServiceWorker/#wait-until-method
pub fn WaitUntil(&self, _cx: *mut JSContext, val: HandleValue) -> ErrorResult {
// Step 1
if !self.extensions_allowed {<|fim▁hole|> // TODO add a extended_promises array to enqueue the `val`
Ok(())
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
pub fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|>
|
return Err(Error::InvalidState);
}
// Step 2
|
<|file_name|>slam.py<|end_file_name|><|fim▁begin|># vim: set et sw=4 ts=4 ai:
import unittest
import utils
from testbin import TestBin
class TestBinSlam(TestBin, unittest.TestCase):
def setUp(self):
self.bin = 'slam'
<|fim▁hole|><|fim▁end|>
|
def tearDown(self):
pass
|
<|file_name|>main_ode.py<|end_file_name|><|fim▁begin|># Copyright 2019 DeepMind Technologies Limited and Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Training script."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app<|fim▁hole|>import tensorflow.compat.v1 as tf
from cs_gan import file_utils
from cs_gan import gan
from cs_gan import image_metrics
from cs_gan import utils
flags.DEFINE_integer(
'num_training_iterations', 1200000,
'Number of training iterations.')
flags.DEFINE_string(
'ode_mode', 'rk4', 'Integration method.')
flags.DEFINE_integer(
'batch_size', 64, 'Training batch size.')
flags.DEFINE_float(
'grad_reg_weight', 0.02, 'Step size for latent optimisation.')
flags.DEFINE_string(
'opt_name', 'gd', 'Name of the optimiser (gd|adam).')
flags.DEFINE_bool(
'schedule_lr', True, 'The method to project z.')
flags.DEFINE_bool(
'reg_first_grad_only', True, 'Whether only to regularise the first grad.')
flags.DEFINE_integer(
'num_latents', 128, 'The number of latents')
flags.DEFINE_integer(
'summary_every_step', 1000,
'The interval at which to log debug ops.')
flags.DEFINE_integer(
'image_metrics_every_step', 1000,
'The interval at which to log (expensive) image metrics.')
flags.DEFINE_integer(
'export_every', 10,
'The interval at which to export samples.')
# Use 50k to reproduce scores from the paper. Default to 10k here to avoid the
# runtime error caused by too large graph with 50k samples on some machines.
flags.DEFINE_integer(
'num_eval_samples', 10000,
'The number of samples used to evaluate FID/IS.')
flags.DEFINE_string(
'dataset', 'cifar', 'The dataset used for learning (cifar|mnist).')
flags.DEFINE_string(
'output_dir', '/tmp/ode_gan/gan', 'Location where to save output files.')
flags.DEFINE_float('disc_lr', 4e-2, 'Discriminator Learning rate.')
flags.DEFINE_float('gen_lr', 4e-2, 'Generator Learning rate.')
flags.DEFINE_bool(
'run_real_data_metrics', False,
'Whether or not to run image metrics on real data.')
flags.DEFINE_bool(
'run_sample_metrics', True,
'Whether or not to run image metrics on samples.')
FLAGS = flags.FLAGS
# Log info level (for Hooks).
tf.logging.set_verbosity(tf.logging.INFO)
def _copy_vars(v_list):
"""Copy variables in v_list."""
t_list = []
for v in v_list:
t_list.append(tf.identity(v))
return t_list
def _restore_vars(v_list, t_list):
"""Restore variables in v_list from t_list."""
ops = []
for v, t in zip(v_list, t_list):
ops.append(v.assign(t))
return ops
def _scale_vars(s, v_list):
"""Scale all variables in v_list by s."""
return [s * v for v in v_list]
def _acc_grads(g_sum, g_w, g):
"""Accumulate gradients in g, weighted by g_w."""
return [g_sum_i + g_w * g_i for g_sum_i, g_i in zip(g_sum, g)]
def _compute_reg_grads(gen_grads, disc_vars):
"""Compute gradients norm (this is an upper-bpund of the full-batch norm)."""
gen_norm = tf.accumulate_n([tf.reduce_sum(u * u) for u in gen_grads])
disc_reg_grads = tf.gradients(gen_norm, disc_vars)
return disc_reg_grads
def run_model(prior, images, model, disc_reg_weight):
"""Run the model with new data and samples.
Args:
prior: the noise source as the generator input.
images: images sampled from dataset.
model: a GAN model defined in gan.py.
disc_reg_weight: regularisation weight for discrmininator gradients.
Returns:
debug_ops: statistics from the model, see gan.py for more detials.
disc_grads: discriminator gradients.
gen_grads: generator gradients.
"""
generator_inputs = prior.sample(FLAGS.batch_size)
model_output = model.connect(images, generator_inputs)
optimization_components = model_output.optimization_components
disc_grads = tf.gradients(
optimization_components['disc'].loss,
optimization_components['disc'].vars)
gen_grads = tf.gradients(
optimization_components['gen'].loss,
optimization_components['gen'].vars)
if disc_reg_weight > 0.0:
reg_grads = _compute_reg_grads(gen_grads,
optimization_components['disc'].vars)
disc_grads = _acc_grads(disc_grads, disc_reg_weight, reg_grads)
debug_ops = model_output.debug_ops
return debug_ops, disc_grads, gen_grads
def update_model(model, disc_grads, gen_grads, disc_opt, gen_opt,
global_step, update_scale):
"""Update model with gradients."""
disc_vars, gen_vars = model.get_variables()
with tf.control_dependencies(gen_grads + disc_grads):
disc_update_op = disc_opt.apply_gradients(
zip(_scale_vars(update_scale, disc_grads),
disc_vars))
gen_update_op = gen_opt.apply_gradients(
zip(_scale_vars(update_scale, gen_grads),
gen_vars),
global_step=global_step)
update_op = tf.group([disc_update_op, gen_update_op])
return update_op
def main(argv):
del argv
utils.make_output_dir(FLAGS.output_dir)
data_processor = utils.DataProcessor()
# Compute the batch-size multiplier
if FLAGS.ode_mode == 'rk2':
batch_mul = 2
elif FLAGS.ode_mode == 'rk4':
batch_mul = 4
else:
batch_mul = 1
images = utils.get_train_dataset(data_processor, FLAGS.dataset,
int(FLAGS.batch_size * batch_mul))
image_splits = tf.split(images, batch_mul)
logging.info('Generator learning rate: %d', FLAGS.gen_lr)
logging.info('Discriminator learning rate: %d', FLAGS.disc_lr)
global_step = tf.train.get_or_create_global_step()
# Construct optimizers.
if FLAGS.opt_name == 'adam':
disc_opt = tf.train.AdamOptimizer(FLAGS.disc_lr, beta1=0.5, beta2=0.999)
gen_opt = tf.train.AdamOptimizer(FLAGS.gen_lr, beta1=0.5, beta2=0.999)
elif FLAGS.opt_name == 'gd':
if FLAGS.schedule_lr:
gd_disc_lr = tf.train.piecewise_constant(
global_step,
values=[FLAGS.disc_lr / 4., FLAGS.disc_lr, FLAGS.disc_lr / 2.],
boundaries=[500, 400000])
gd_gen_lr = tf.train.piecewise_constant(
global_step,
values=[FLAGS.gen_lr / 4., FLAGS.gen_lr, FLAGS.gen_lr / 2.],
boundaries=[500, 400000])
else:
gd_disc_lr = FLAGS.disc_lr
gd_gen_lr = FLAGS.gen_lr
disc_opt = tf.train.GradientDescentOptimizer(gd_disc_lr)
gen_opt = tf.train.GradientDescentOptimizer(gd_gen_lr)
else:
raise ValueError('Unknown ODE mode!')
# Create the networks and models.
generator = utils.get_generator(FLAGS.dataset)
metric_net = utils.get_metric_net(FLAGS.dataset, use_sn=False)
model = gan.GAN(metric_net, generator)
prior = utils.make_prior(FLAGS.num_latents)
# Setup ODE parameters.
if FLAGS.ode_mode == 'rk2':
ode_grad_weights = [0.5, 0.5]
step_scale = [1.0]
elif FLAGS.ode_mode == 'rk4':
ode_grad_weights = [1. / 6., 1. / 3., 1. / 3., 1. / 6.]
step_scale = [0.5, 0.5, 1.]
elif FLAGS.ode_mode == 'euler':
# Euler update
ode_grad_weights = [1.0]
step_scale = []
else:
raise ValueError('Unknown ODE mode!')
# Extra steps for RK updates.
num_extra_steps = len(step_scale)
if FLAGS.reg_first_grad_only:
first_reg_weight = FLAGS.grad_reg_weight / ode_grad_weights[0]
other_reg_weight = 0.0
else:
first_reg_weight = FLAGS.grad_reg_weight
other_reg_weight = FLAGS.grad_reg_weight
debug_ops, disc_grads, gen_grads = run_model(prior, image_splits[0],
model, first_reg_weight)
disc_vars, gen_vars = model.get_variables()
final_disc_grads = _scale_vars(ode_grad_weights[0], disc_grads)
final_gen_grads = _scale_vars(ode_grad_weights[0], gen_grads)
restore_ops = []
# Preparing for further RK steps.
if num_extra_steps > 0:
# copy the variables before they are changed by update_op
saved_disc_vars = _copy_vars(disc_vars)
saved_gen_vars = _copy_vars(gen_vars)
# Enter RK loop.
with tf.control_dependencies(saved_disc_vars + saved_gen_vars):
step_deps = []
for i_step in range(num_extra_steps):
with tf.control_dependencies(step_deps):
# Compute gradient steps for intermediate updates.
update_op = update_model(
model, disc_grads, gen_grads, disc_opt, gen_opt,
None, step_scale[i_step])
with tf.control_dependencies([update_op]):
_, disc_grads, gen_grads = run_model(
prior, image_splits[i_step + 1], model, other_reg_weight)
# Accumlate gradients for final update.
final_disc_grads = _acc_grads(final_disc_grads,
ode_grad_weights[i_step + 1],
disc_grads)
final_gen_grads = _acc_grads(final_gen_grads,
ode_grad_weights[i_step + 1],
gen_grads)
# Make new restore_op for each step.
restore_ops = []
restore_ops += _restore_vars(disc_vars, saved_disc_vars)
restore_ops += _restore_vars(gen_vars, saved_gen_vars)
step_deps = restore_ops
with tf.control_dependencies(restore_ops):
update_op = update_model(
model, final_disc_grads, final_gen_grads, disc_opt, gen_opt,
global_step, 1.0)
samples = generator(prior.sample(FLAGS.batch_size), is_training=False)
# Get data needed to compute FID. We also compute metrics on
# real data as a sanity check and as a reference point.
eval_real_data = utils.get_real_data_for_eval(FLAGS.num_eval_samples,
FLAGS.dataset,
split='train')
def sample_fn(x):
return utils.optimise_and_sample(x, module=model,
data=None, is_training=False)[0]
if FLAGS.run_sample_metrics:
sample_metrics = image_metrics.get_image_metrics_for_samples(
eval_real_data, sample_fn,
prior, data_processor,
num_eval_samples=FLAGS.num_eval_samples)
else:
sample_metrics = {}
if FLAGS.run_real_data_metrics:
data_metrics = image_metrics.get_image_metrics(
eval_real_data, eval_real_data)
else:
data_metrics = {}
sample_exporter = file_utils.FileExporter(
os.path.join(FLAGS.output_dir, 'samples'))
# Hooks.
debug_ops['it'] = global_step
# Abort training on Nans.
nan_disc_hook = tf.train.NanTensorHook(debug_ops['disc_loss'])
nan_gen_hook = tf.train.NanTensorHook(debug_ops['gen_loss'])
# Step counter.
step_conter_hook = tf.train.StepCounterHook()
checkpoint_saver_hook = tf.train.CheckpointSaverHook(
checkpoint_dir=utils.get_ckpt_dir(FLAGS.output_dir), save_secs=10 * 60)
loss_summary_saver_hook = tf.train.SummarySaverHook(
save_steps=FLAGS.summary_every_step,
output_dir=os.path.join(FLAGS.output_dir, 'summaries'),
summary_op=utils.get_summaries(debug_ops))
metrics_summary_saver_hook = tf.train.SummarySaverHook(
save_steps=FLAGS.image_metrics_every_step,
output_dir=os.path.join(FLAGS.output_dir, 'summaries'),
summary_op=utils.get_summaries(sample_metrics))
hooks = [checkpoint_saver_hook, metrics_summary_saver_hook,
nan_disc_hook, nan_gen_hook, step_conter_hook,
loss_summary_saver_hook]
# Start training.
with tf.train.MonitoredSession(hooks=hooks) as sess:
logging.info('starting training')
for key, value in sess.run(data_metrics).items():
logging.info('%s: %d', key, value)
for i in range(FLAGS.num_training_iterations):
sess.run(update_op)
if i % FLAGS.export_every == 0:
samples_np, data_np = sess.run([samples, image_splits[0]])
# Create an object which gets data and does the processing.
data_np = data_processor.postprocess(data_np)
samples_np = data_processor.postprocess(samples_np)
sample_exporter.save(samples_np, 'samples')
sample_exporter.save(data_np, 'data')
if __name__ == '__main__':
tf.enable_resource_variables()
app.run(main)<|fim▁end|>
|
from absl import flags
from absl import logging
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# @Date : 2016-01-21 13:15
# @Author : leiyue ([email protected])
# @Link : https://leiyue.wordpress.com/
from flask_wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, length
from .models import User
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validators=[DataRequired()])
about_me = TextAreaField('about_me', validators=[length(min=0, max=140)])
def __init__(self, original_nickname, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.original_nickname = original_nickname
def validate(self):
if not Form.validate(self):
return False
if self.nickname.data == self.original_nickname:<|fim▁hole|> user = User.query.filter_by(nickname=self.nickname.data).first()
if user is not None:
self.nickname.errors.append('This nickname is already in use, Please choose another one.')
return False
return True
class PostForm(Form):
post = TextAreaField('post', validators=[DataRequired(), length(min=0, max=140)])
class SearchForm(Form):
search = StringField('search', validators=[DataRequired()])<|fim▁end|>
|
return True
|
<|file_name|>recommend.py<|end_file_name|><|fim▁begin|>"""
This script is responsible for generating recommendations for the users. The general flow is as follows:
The best_model saved in HDFS is loaded with the help of model_id which is fetched from model_metadata_df.
`spark_user_id` and `recording_id` are fetched from top_artist_candidate_set_df and are given as input to the
recommender. An RDD of `user`, `product` and `rating` is returned from the recommender which is later converted to
a dataframe by filtering top X (an int supplied as an argument to the script) recommendations for all users sorted on rating
and fields renamed as `spark_user_id`, `recording_id` and `rating`. The ratings are scaled so that they lie between 0 and 1.
This dataframe is joined with recordings_df on recording_id to get the recording mbids which are then sent over the queue.
The same process is done for similar artist candidate set.
"""
import logging
import time
from py4j.protocol import Py4JJavaError
import listenbrainz_spark
from listenbrainz_spark import utils, path
from listenbrainz_spark.exceptions import (PathNotFoundException,
FileNotFetchedException,
SparkSessionNotInitializedException,
RecommendationsNotGeneratedException,
EmptyDataframeExcpetion)
from listenbrainz_spark.recommendations.recording.train_models import get_model_path
from listenbrainz_spark.recommendations.recording.candidate_sets import _is_empty_dataframe
from pyspark.sql import Row
import pyspark.sql.functions as func
from pyspark.sql.window import Window
from pyspark.sql.functions import col, udf, row_number
from pyspark.sql.types import DoubleType
from pyspark.mllib.recommendation import MatrixFactorizationModel
logger = logging.getLogger(__name__)
class RecommendationParams:
def __init__(self, recordings_df, model, top_artist_candidate_set_df, similar_artist_candidate_set_df,
recommendation_top_artist_limit, recommendation_similar_artist_limit):<|fim▁hole|> self.similar_artist_candidate_set_df = similar_artist_candidate_set_df
self.recommendation_top_artist_limit = recommendation_top_artist_limit
self.recommendation_similar_artist_limit = recommendation_similar_artist_limit
def get_most_recent_model_id():
""" Get model id of recently created model.
Returns:
model_id (str): Model identification string.
"""
try:
model_metadata = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_MODEL_METADATA)
except PathNotFoundException as err:
logger.error(str(err), exc_info=True)
raise
except FileNotFetchedException as err:
logger.error(str(err), exc_info=True)
raise
latest_ts = model_metadata.select(func.max('model_created').alias('model_created')).take(1)[0].model_created
model_id = model_metadata.select('model_id') \
.where(col('model_created') == latest_ts).take(1)[0].model_id
return model_id
def load_model():
""" Load model from given path in HDFS.
"""
model_id = get_most_recent_model_id()
dest_path = get_model_path(model_id)
try:
model = MatrixFactorizationModel.load(listenbrainz_spark.context, dest_path)
return model
except Py4JJavaError as err:
logger.error('Unable to load model "{}"\n{}\nAborting...'.format(model_id, str(err.java_exception)),
exc_info=True)
raise
def get_recording_mbids(params: RecommendationParams, recommendation_df, users_df):
""" Get recording mbids corresponding to recommended recording ids sorted on rating.
Args:
params: RecommendationParams class object.
recommendation_df: Dataframe of spark_user_id, recording id and rating.
users_df : user_id and spark_user_id of active users.
Returns:
dataframe of recommended recording mbids and related info.
"""
df = params.recordings_df.join(recommendation_df, 'recording_id', 'inner') \
.select('rating',
'recording_mbid',
'spark_user_id')
recording_mbids_df = df.join(users_df, 'spark_user_id', 'inner')
window = Window.partitionBy('user_id').orderBy(col('rating').desc())
df = recording_mbids_df.withColumn('rank', row_number().over(window)) \
.select('recording_mbid',
'rank',
'rating',
'spark_user_id',
'user_id')
return df
def filter_recommendations_on_rating(df, limit):
""" Filter top X recommendations for each user on rating where X = limit.
Args:
df: Dataframe of user, product and rating.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
window = Window.partitionBy('user').orderBy(col('rating').desc())
recommendation_df = df.withColumn('rank', row_number().over(window)) \
.where(col('rank') <= limit) \
.select(col('rating'),
col('product').alias('recording_id'),
col('user').alias('spark_user_id'))
return recommendation_df
def generate_recommendations(candidate_set, params: RecommendationParams, limit):
""" Generate recommendations from the candidate set.
Args:
candidate_set (rdd): RDD of spark_user_id and recording_id.
params: RecommendationParams class object.
limit (int): Number of recommendations to be filtered for each user.
Returns:
recommendation_df: Dataframe of spark_user_id, recording_id and rating.
"""
recommendations = params.model.predictAll(candidate_set)
if recommendations.isEmpty():
raise RecommendationsNotGeneratedException('Recommendations not generated!')
df = listenbrainz_spark.session.createDataFrame(recommendations, schema=None)
recommendation_df = filter_recommendations_on_rating(df, limit)
return recommendation_df
def get_scale_rating_udf(rating):
""" Get user defined function (udf) to scale ratings so that they fall in the
range: 0.0 -> 1.0.
Args:
rating (float): score given to recordings by CF.
Returns:
rating udf.
"""
scaled_rating = (rating / 2.0) + 0.5
return round(min(max(scaled_rating, -1.0), 1.0), 3)
def scale_rating(df):
""" Scale the ratings column of dataframe so that they fall in the
range: 0.0 -> 1.0.
Args:
df: Dataframe to scale.
Returns:
df: Dataframe with scaled rating.
"""
scaling_udf = udf(get_scale_rating_udf, DoubleType())
df = df.withColumn("scaled_rating", scaling_udf(df.rating)) \
.select(col('recording_id'),
col('spark_user_id'),
col('scaled_rating').alias('rating'))
return df
def get_candidate_set_rdd_for_user(candidate_set_df, users):
""" Get candidate set RDD for a given user.
Args:
candidate_set_df: A dataframe of spark_user_id and recording_id for all users.
users: list of user names to generate recommendations for.
Returns:
candidate_set_rdd: An RDD of spark_user_id and recording_id for a given user.
"""
if users:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id') \
.where(col('user_id').isin(users))
else:
candidate_set_user_df = candidate_set_df.select('spark_user_id', 'recording_id')
if _is_empty_dataframe(candidate_set_user_df):
raise EmptyDataframeExcpetion('Empty Candidate sets!')
candidate_set_rdd = candidate_set_user_df.rdd.map(lambda r: (r['spark_user_id'], r['recording_id']))
return candidate_set_rdd
def get_user_name_and_user_id(params: RecommendationParams, users):
""" Get users from top artist candidate set.
Args:
params: RecommendationParams class object.
users = list of users names to generate recommendations.
Returns:
users_df: dataframe of user id and user names.
"""
if len(users) == 0:
users_df = params.top_artist_candidate_set_df.select('spark_user_id', 'user_id').distinct()
else:
users_df = params.top_artist_candidate_set_df.select('spark_user_id', 'user_id') \
.where(params.top_artist_candidate_set_df.user_id.isin(users)) \
.distinct()
if _is_empty_dataframe(users_df):
raise EmptyDataframeExcpetion('No active users found!')
return users_df
def check_for_ratings_beyond_range(top_artist_rec_df, similar_artist_rec_df):
""" Check if rating in top_artist_rec_df and similar_artist_rec_df does not belong to [-1, 1].
Args:
top_artist_rec_df (dataframe): Top artist recommendations for all users.
similar_artist_rec_df (dataframe): Similar artist recommendations for all users.
Returns:
a tuple of booleans (max out of range, min out of range)
"""
max_rating = top_artist_rec_df.select(func.max('rating').alias('rating')).take(1)[0].rating
max_rating = max(similar_artist_rec_df.select(func.max('rating').alias('rating')).take(1)[0].rating, max_rating)
min_rating = top_artist_rec_df.select(func.min('rating').alias('rating')).take(1)[0].rating
min_rating = min(similar_artist_rec_df.select(func.min('rating').alias('rating')).take(1)[0].rating, min_rating)
if max_rating > 1.0:
logger.info('Some ratings are greater than 1 \nMax rating: {}'.format(max_rating))
if min_rating < -1.0:
logger.info('Some ratings are less than -1 \nMin rating: {}'.format(min_rating))
return max_rating > 1.0, min_rating < -1.0
def create_messages(top_artist_rec_mbid_df, similar_artist_rec_mbid_df, active_user_count, total_time,
top_artist_rec_user_count, similar_artist_rec_user_count):
""" Create messages to send the data to the webserver via RabbitMQ.
Args:
top_artist_rec_mbid_df (dataframe): Top artist recommendations.
similar_artist_rec_mbid_df (dataframe): Similar artist recommendations.
active_user_count (int): Number of users active in the last week.
total_time (str): Time taken in exceuting the whole script.
top_artist_rec_user_count (int): Number of users for whom top artist recommendations were generated.
similar_artist_rec_user_count (int): Number of users for whom similar artist recommendations were generated.
Returns:
messages: A list of messages to be sent via RabbitMQ
"""
top_artist_rec_itr = top_artist_rec_mbid_df.toLocalIterator()
user_rec = {}
for row in top_artist_rec_itr:
if user_rec.get(row.user_id) is None:
user_rec[row.user_id] = {}
user_rec[row.user_id]['top_artist'] = [
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
]
user_rec[row.user_id]['similar_artist'] = []
else:
user_rec[row.user_id]['top_artist'].append(
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
)
similar_artist_rec_itr = similar_artist_rec_mbid_df.toLocalIterator()
for row in similar_artist_rec_itr:
if user_rec.get(row.user_id) is None:
user_rec[row.user_id] = {}
user_rec[row.user_id]['similar_artist'] = [
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
]
else:
user_rec[row.user_id]['similar_artist'].append(
{
"recording_mbid": row.recording_mbid,
"score": row.rating
}
)
for user_id, data in user_rec.items():
messages = {
'user_id': user_id,
'type': 'cf_recommendations_recording_recommendations',
'recommendations': {
'top_artist': data.get('top_artist', []),
'similar_artist': data.get('similar_artist', [])
}
}
yield messages
yield {
'type': 'cf_recommendations_recording_mail',
'active_user_count': active_user_count,
'top_artist_user_count': top_artist_rec_user_count,
'similar_artist_user_count': similar_artist_rec_user_count,
'total_time': '{:.2f}'.format(total_time / 3600)
}
def get_recommendations_for_all(params: RecommendationParams, users):
""" Get recommendations for all active users.
Args:
params: RecommendationParams class object.
users = list of users names to generate recommendations.
Returns:
top_artist_rec_df: Top artist recommendations.
similar_artist_rec_df: Similar artist recommendations.
"""
try:
top_artist_candidate_set_rdd = get_candidate_set_rdd_for_user(params.top_artist_candidate_set_df, users)
except EmptyDataframeExcpetion:
logger.error('Top artist candidate set not found for any user.', exc_info=True)
raise
try:
similar_artist_candidate_set_rdd = get_candidate_set_rdd_for_user(params.similar_artist_candidate_set_df, users)
except EmptyDataframeExcpetion:
logger.error('Similar artist candidate set not found for any user.', exc_info=True)
raise
try:
top_artist_rec_df = generate_recommendations(top_artist_candidate_set_rdd, params,
params.recommendation_top_artist_limit)
except RecommendationsNotGeneratedException:
logger.error('Top artist recommendations not generated for any user', exc_info=True)
raise
try:
similar_artist_rec_df = generate_recommendations(similar_artist_candidate_set_rdd, params,
params.recommendation_similar_artist_limit)
except RecommendationsNotGeneratedException:
logger.error('Similar artist recommendations not generated for any user', exc_info=True)
raise
return top_artist_rec_df, similar_artist_rec_df
def get_user_count(df):
""" Get distinct user count from the given dataframe.
"""
users_df = df.select('spark_user_id').distinct()
return users_df.count()
def main(recommendation_top_artist_limit=None, recommendation_similar_artist_limit=None, users=None):
try:
listenbrainz_spark.init_spark_session('Recommendations')
except SparkSessionNotInitializedException as err:
logger.error(str(err), exc_info=True)
raise
try:
recordings_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDINGS_DATAFRAME)
top_artist_candidate_set_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_TOP_ARTIST_CANDIDATE_SET)
similar_artist_candidate_set_df = utils.read_files_from_HDFS(path.RECOMMENDATION_RECORDING_SIMILAR_ARTIST_CANDIDATE_SET)
except PathNotFoundException as err:
logger.error(str(err), exc_info=True)
raise
except FileNotFetchedException as err:
logger.error(str(err), exc_info=True)
raise
logger.info('Loading model...')
model = load_model()
# an action must be called to persist data in memory
recordings_df.count()
recordings_df.persist()
params = RecommendationParams(recordings_df, model, top_artist_candidate_set_df,
similar_artist_candidate_set_df,
recommendation_top_artist_limit,
recommendation_similar_artist_limit)
try:
# timestamp when the script was invoked
ts_initial = time.monotonic()
users_df = get_user_name_and_user_id(params, users)
# Some users are excluded from the top_artist_candidate_set because of the limited data
# in the mapping. Therefore, active_user_count may or may not be equal to number of users
# active in the last week. Ideally, top_artist_candidate_set should give the active user count.
active_user_count = users_df.count()
users_df.persist()
logger.info('Took {:.2f}sec to get active user count'.format(time.monotonic() - ts_initial))
except EmptyDataframeExcpetion as err:
logger.error(str(err), exc_info=True)
raise
logger.info('Generating recommendations...')
ts = time.monotonic()
top_artist_rec_df, similar_artist_rec_df = get_recommendations_for_all(params, users)
logger.info('Recommendations generated!')
logger.info('Took {:.2f}sec to generate recommendations for all active users'.format(time.monotonic() - ts))
ts = time.monotonic()
top_artist_rec_user_count = get_user_count(top_artist_rec_df)
similar_artist_rec_user_count = get_user_count(similar_artist_rec_df)
logger.info('Took {:.2f}sec to get top artist and similar artist user count'.format(time.monotonic() - ts))
ts = time.monotonic()
check_for_ratings_beyond_range(top_artist_rec_df, similar_artist_rec_df)
top_artist_rec_scaled_df = scale_rating(top_artist_rec_df)
similar_artist_rec_scaled_df = scale_rating(similar_artist_rec_df)
logger.info('Took {:.2f}sec to scale the ratings'.format(time.monotonic() - ts))
ts = time.monotonic()
top_artist_rec_mbid_df = get_recording_mbids(params, top_artist_rec_scaled_df, users_df)
similar_artist_rec_mbid_df = get_recording_mbids(params, similar_artist_rec_scaled_df, users_df)
logger.info('Took {:.2f}sec to get mbids corresponding to recording ids'.format(time.monotonic() - ts))
# persisted data must be cleared from memory after usage to avoid OOM
recordings_df.unpersist()
total_time = time.monotonic() - ts_initial
logger.info('Total time: {:.2f}sec'.format(total_time))
result = create_messages(top_artist_rec_mbid_df, similar_artist_rec_mbid_df, active_user_count, total_time,
top_artist_rec_user_count, similar_artist_rec_user_count)
users_df.unpersist()
return result<|fim▁end|>
|
self.recordings_df = recordings_df
self.model = model
self.top_artist_candidate_set_df = top_artist_candidate_set_df
|
<|file_name|>api-creation-step1.component.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import ApiCreationController from './api-creation.controller';
import { shouldDisplayHint } from './form.helper';
import ApiPrimaryOwnerModeService from '../../../../services/apiPrimaryOwnerMode.service';
const ApiCreationStep1Component: ng.IComponentOptions = {
require: {
parent: '^apiCreation',
},
template: require('./api-creation-step1.html'),
controller: class {
private parent: ApiCreationController;
private advancedMode: boolean;
private useGroupAsPrimaryOwner: boolean;<|fim▁hole|> this.advancedMode = false;
this.useGroupAsPrimaryOwner = this.ApiPrimaryOwnerModeService.isGroupOnly();
}
toggleAdvancedMode = () => {
this.advancedMode = !this.advancedMode;
if (!this.advancedMode) {
this.parent.api.groups = [];
}
};
canUseAdvancedMode = () => {
return (
(this.ApiPrimaryOwnerModeService.isHybrid() &&
((this.parent.attachableGroups && this.parent.attachableGroups.length > 0) ||
(this.parent.poGroups && this.parent.poGroups.length > 0))) ||
(this.ApiPrimaryOwnerModeService.isGroupOnly() && this.parent.attachableGroups && this.parent.attachableGroups.length > 0)
);
};
},
};
export default ApiCreationStep1Component;<|fim▁end|>
|
public shouldDisplayHint = shouldDisplayHint;
constructor(private ApiPrimaryOwnerModeService: ApiPrimaryOwnerModeService) {
'ngInject';
|
<|file_name|>tree_perf.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {runBenchmark, verifyNoBrowserErrors} from 'e2e_util/perf_util';
import {$} from 'protractor';
interface Worker {
id: string;
prepare?(): void;
work(): void;
}
const CreateOnlyWorker: Worker = {
id: 'createOnly',
prepare: () => $('#destroyDom').click(),
work: () => $('#createDom').click()
};
const CreateAndDestroyWorker: Worker = {
id: 'createDestroy',
work: () => {
$('#createDom').click();
$('#destroyDom').click();
}
};
const UpdateWorker: Worker = {
id: 'update',
work: () => $('#createDom').click()
};
describe('tree benchmark perf', () => {
afterEach(verifyNoBrowserErrors);
[CreateOnlyWorker, CreateAndDestroyWorker, UpdateWorker].forEach((worker) => {
describe(worker.id, () => {
it('should run for ng2', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2/index.html',
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for ng2 next', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.next.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2_next/index.html',
ignoreBrowserSynchronization: true,
work: worker.work,
prepare: worker.prepare,
// Can't use bundles as we use non exported code
extraParams: [{name: 'bundles', value: false}]
}).then(done, done.fail);
});
it('should run for ng2 ftl', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.ftl.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2_ftl/index.html',
work: worker.work,
prepare: worker.prepare,
// Can't use bundles as we use AoT generated code
// which relies on deep imports
extraParams: [{name: 'bundles', value: false}]
}).then(done, done.fail);
});
it('should run for ng2 static', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.static.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2_static/index.html',
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for ng2 static ftl', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.ftl.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2_static_ftl/index.html',
work: worker.work,
prepare: worker.prepare,
// Can't use bundles as we use AoT generated code
// which relies on deep imports
extraParams: [{name: 'bundles', value: false}]
}).then(done, done.fail);
});
it('should run for ng2 switch', (done) => {
runTreeBenchmark({
id: `deepTree.ng2_switch.${worker.id}`,
url: 'all/benchmarks/src/tree/ng2_switch/index.html',
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for the baseline', (done) => {
runTreeBenchmark({
id: `deepTree.baseline.${worker.id}`,
url: 'all/benchmarks/src/tree/baseline/index.html',
ignoreBrowserSynchronization: true,
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for incremental-dom', (done) => {
runTreeBenchmark({
id: `deepTree.incremental_dom.${worker.id}`,
url: 'all/benchmarks/src/tree/incremental_dom/index.html',
ignoreBrowserSynchronization: true,
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for polymer binary tree', (done) => {
runTreeBenchmark({
id: `deepTree.polymer.${worker.id}`,
url: 'all/benchmarks/src/tree/polymer/index.html',
ignoreBrowserSynchronization: true,
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
it('should run for polymer leaves', (done) => {
runTreeBenchmark({
id: `deepTree.polymer_leaves.${worker.id}`,
url: 'all/benchmarks/src/tree/polymer_leaves/index.html',
ignoreBrowserSynchronization: true,
work: worker.work,
prepare: worker.prepare,
}).then(done, done.fail);
});
});
});
it('should run ng2 changedetection', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.changedetection`,<|fim▁hole|> url: 'all/benchmarks/src/tree/ng2/index.html',
work: () => $('#detectChanges').click(),
setup: () => $('#createDom').click(),
}).then(done, done.fail);
});
it('should run ng2 next changedetection', (done) => {
runTreeBenchmark({
id: `deepTree.ng2.next.changedetection`,
url: 'all/benchmarks/src/tree/ng2_next/index.html',
work: () => $('#detectChanges').click(),
setup: () => $('#createDom').click(),
ignoreBrowserSynchronization: true,
// Can't use bundles as we use non exported code
extraParams: [{name: 'bundles', value: false}]
}).then(done, done.fail);
});
function runTreeBenchmark(config: {
id: string,
url: string, ignoreBrowserSynchronization?: boolean,
work: () => any,
prepare?: () => any,
extraParams?: {name: string, value: any}[],
setup?: () => any
}) {
let params = [{name: 'depth', value: 11}];
if (config.extraParams) {
params = params.concat(config.extraParams);
}
return runBenchmark({
id: config.id,
url: config.url,
ignoreBrowserSynchronization: config.ignoreBrowserSynchronization,
params: params,
work: config.work,
prepare: config.prepare,
setup: config.setup
});
}
});<|fim▁end|>
| |
<|file_name|>gcp_healthcheck.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2017 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_healthcheck
version_added: "2.4"
short_description: Create, Update or Destroy a Healthcheck.
description:
- Create, Update or Destroy a Healthcheck. Currently only HTTP and
HTTPS Healthchecks are supported. Healthchecks are used to monitor
individual instances, managed instance groups and/or backend
services. Healtchecks are reusable.
- Visit
U(https://cloud.google.com/compute/docs/load-balancing/health-checks)
for an overview of Healthchecks on GCP.
- See
U(https://cloud.google.com/compute/docs/reference/latest/httpHealthChecks) for
API details on HTTP Healthchecks.
- See
U(https://cloud.google.com/compute/docs/reference/latest/httpsHealthChecks)
for more details on the HTTPS Healtcheck API.
requirements:
- "python >= 2.6"
- "google-api-python-client >= 1.6.2"
- "google-auth >= 0.9.0"
- "google-auth-httplib2 >= 0.0.2"
notes:
- Only supports HTTP and HTTPS Healthchecks currently.
author:
- "Tom Melendez (@supertom) <[email protected]>"
options:
check_interval:
description:
- How often (in seconds) to send a health check.
required: false
default: 5
healthcheck_name:
description:
- Name of the Healthcheck.
required: true
healthcheck_type:
description:
- Type of Healthcheck.
required: true
choices: ["HTTP", "HTTPS"]
host_header:
description:
- The value of the host header in the health check request. If left
empty, the public IP on behalf of which this health
check is performed will be used.
required: true
default: ""
port:
description:
- The TCP port number for the health check request. The default value is
443 for HTTPS and 80 for HTTP.
required: false
request_path:
description:
- The request path of the HTTPS health check request.
required: false
default: "/"
state:
description: State of the Healthcheck.
required: true
choices: ["present", "absent"]
timeout:
description:
- How long (in seconds) to wait for a response before claiming
failure. It is invalid for timeout
to have a greater value than check_interval.
required: false
default: 5
unhealthy_threshold:
description:
- A so-far healthy instance will be marked unhealthy after this
many consecutive failures.
required: false
default: 2
healthy_threshold:
description:
- A so-far unhealthy instance will be marked healthy after this
many consecutive successes.
required: false
default: 2
service_account_email:
description:
- service account email
required: false
default: null
service_account_permissions:
version_added: "2.0"
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
required: false
default: null
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
credentials_file:
description:
- Path to the JSON file associated with the service account email
default: null
required: false
project_id:
description:
- Your GCP project ID
required: false
default: null
'''
EXAMPLES = '''
- name: Create Minimum HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: my-healthcheck
healthcheck_type: HTTP
state: present
- name: Create HTTP HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: my-healthcheck
healthcheck_type: HTTP
host: my-host
request_path: /hc
check_interval: 10
timeout: 30
unhealthy_threshhold: 2
healthy_threshhold: 1
state: present
- name: Create HTTPS HealthCheck
gcp_healthcheck:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
healthcheck_name: "{{ https_healthcheck }}"
healthcheck_type: HTTPS
host_header: my-host
request_path: /hc
check_interval: 5
timeout: 5
unhealthy_threshold: 2
healthy_threshold: 1
state: present
'''
RETURN = '''
state:
description: state of the Healthcheck
returned: Always.
type: str
sample: present
healthcheck_name:
description: Name of the Healthcheck
returned: Always
type: str
sample: my-url-map
healthcheck_type:
description: Type of the Healthcheck
returned: Always
type: str
sample: HTTP
healthcheck:
description: GCP Healthcheck dictionary
returned: Always. Refer to GCP documentation for detailed field descriptions.
type: dict
sample: { "name": "my-hc", "port": 443, "requestPath": "/foo" }
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gcp import get_google_api_client, GCPUtils
USER_AGENT_PRODUCT = 'ansible-healthcheck'
USER_AGENT_VERSION = '0.0.1'
def _validate_healthcheck_params(params):
"""
Validate healthcheck params.
Simple validation has already assumed by AnsibleModule.
:param params: Ansible dictionary containing configuration.
:type params: ``dict``
:return: True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
if params['timeout'] > params['check_interval']:
raise ValueError("timeout (%s) is greater than check_interval (%s)" % (
params['timeout'], params['check_interval']))
return (True, '')
def _build_healthcheck_dict(params):
"""
Reformat services in Ansible Params for GCP.
:param params: Params from AnsibleModule object
:type params: ``dict``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: dictionary suitable for submission to GCP
HealthCheck (HTTP/HTTPS) API.
:rtype ``dict``
"""
gcp_dict = GCPUtils.params_to_gcp_dict(params, 'healthcheck_name')
if 'timeout' in gcp_dict:
gcp_dict['timeoutSec'] = gcp_dict['timeout']
del gcp_dict['timeout']
if 'checkInterval' in gcp_dict:
gcp_dict['checkIntervalSec'] = gcp_dict['checkInterval']
del gcp_dict['checkInterval']
if 'hostHeader' in gcp_dict:
gcp_dict['host'] = gcp_dict['hostHeader']
del gcp_dict['hostHeader']
if 'healthcheckType' in gcp_dict:
del gcp_dict['healthcheckType']
return gcp_dict
def _get_req_resource(client, resource_type):
if resource_type == 'HTTPS':
return (client.httpsHealthChecks(), 'httpsHealthCheck')
else:
return (client.httpHealthChecks(), 'httpHealthCheck')
def get_healthcheck(client, name, project_id=None, resource_type='HTTP'):
"""
Get a Healthcheck from GCP.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: A dict resp from the respective GCP 'get' request.
:rtype: ``dict``
"""
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name}
req = resource.get(**args)
return GCPUtils.execute_api_client_req(req, raise_404=False)
except:
raise
def create_healthcheck(client, params, project_id, resource_type='HTTP'):
"""
Create a new Healthcheck.<|fim▁hole|> :param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_healthcheck_dict(params)
try:
resource, _ = _get_req_resource(client, resource_type)
args = {'project': project_id, 'body': gcp_dict}
req = resource.insert(**args)
return_data = GCPUtils.execute_api_client_req(req, client, raw=False)
if not return_data:
return_data = get_healthcheck(client,
name=params['healthcheck_name'],
project_id=project_id)
return (True, return_data)
except:
raise
def delete_healthcheck(client, name, project_id, resource_type='HTTP'):
"""
Delete a Healthcheck.
:param client: An initialized GCE Compute Disover resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name}
req = resource.delete(**args)
return_data = GCPUtils.execute_api_client_req(req, client)
return (True, return_data)
except:
raise
def update_healthcheck(client, healthcheck, params, name, project_id,
resource_type='HTTP'):
"""
Update a Healthcheck.
If the healthcheck has not changed, the update will not occur.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param healthcheck: Name of the Url Map.
:type healthcheck: ``dict``
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_healthcheck_dict(params)
ans = GCPUtils.are_params_equal(healthcheck, gcp_dict)
if ans:
return (False, 'no update necessary')
try:
resource, entity_name = _get_req_resource(client, resource_type)
args = {'project': project_id, entity_name: name, 'body': gcp_dict}
req = resource.update(**args)
return_data = GCPUtils.execute_api_client_req(
req, client=client, raw=False)
return (True, return_data)
except:
raise
def main():
module = AnsibleModule(argument_spec=dict(
healthcheck_name=dict(required=True),
healthcheck_type=dict(required=True,
choices=['HTTP', 'HTTPS']),
request_path=dict(required=False, default='/'),
check_interval=dict(required=False, type='int', default=5),
healthy_threshold=dict(required=False, type='int', default=2),
unhealthy_threshold=dict(required=False, type='int', default=2),
host_header=dict(required=False, type='str', default=''),
timeout=dict(required=False, type='int', default=5),
port=dict(required=False, type='int'),
state=dict(choices=['absent', 'present'], default='present'),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
credentials_file=dict(),
project_id=dict(), ), )
client, conn_params = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT,
user_agent_version=USER_AGENT_VERSION)
params = {}
params['healthcheck_name'] = module.params.get('healthcheck_name')
params['healthcheck_type'] = module.params.get('healthcheck_type')
params['request_path'] = module.params.get('request_path')
params['check_interval'] = module.params.get('check_interval')
params['healthy_threshold'] = module.params.get('healthy_threshold')
params['unhealthy_threshold'] = module.params.get('unhealthy_threshold')
params['host_header'] = module.params.get('host_header')
params['timeout'] = module.params.get('timeout')
params['port'] = module.params.get('port', None)
params['state'] = module.params.get('state')
if not params['port']:
params['port'] = 80
if params['healthcheck_type'] == 'HTTPS':
params['port'] = 443
try:
_validate_healthcheck_params(params)
except Exception as e:
module.fail_json(msg=e.message, changed=False)
changed = False
json_output = {'state': params['state']}
healthcheck = get_healthcheck(client,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
if not healthcheck:
if params['state'] == 'absent':
# Doesn't exist in GCE, and state==absent.
changed = False
module.fail_json(
msg="Cannot delete unknown healthcheck: %s" %
(params['healthcheck_name']))
else:
# Create
changed, json_output['healthcheck'] = create_healthcheck(client,
params=params,
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
elif params['state'] == 'absent':
# Delete
changed, json_output['healthcheck'] = delete_healthcheck(client,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
else:
changed, json_output['healthcheck'] = update_healthcheck(client,
healthcheck=healthcheck,
params=params,
name=params['healthcheck_name'],
project_id=conn_params['project_id'],
resource_type=params['healthcheck_type'])
json_output['changed'] = changed
json_output.update(params)
module.exit_json(**json_output)
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>robotremoteserver.py<|end_file_name|><|fim▁begin|># Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from collections import Mapping
import inspect
import os
import re
import select
import signal
import sys
import threading
import traceback
if sys.version_info < (3,):
from SimpleXMLRPCServer import SimpleXMLRPCServer
from StringIO import StringIO
from xmlrpclib import Binary, ServerProxy
PY2, PY3 = True, False
else:
from io import StringIO
from xmlrpc.client import Binary, ServerProxy
from xmlrpc.server import SimpleXMLRPCServer
PY2, PY3 = False, True
unicode = str
long = int
__all__ = ['RobotRemoteServer', 'stop_remote_server', 'test_remote_server']
__version__ = 'devel'
BINARY = re.compile('[\x00-\x08\x0B\x0C\x0E-\x1F]')
NON_ASCII = re.compile('[\x80-\xff]')
class RobotRemoteServer(object):
def __init__(self, library, host='127.0.0.1', port=8270, port_file=None,
allow_stop='DEPRECATED', serve=True, allow_remote_stop=True):
"""Configure and start-up remote server.
:param library: Test library instance or module to host.
:param host: Address to listen. Use ``'0.0.0.0'`` to listen
to all available interfaces.
:param port: Port to listen. Use ``0`` to select a free port
automatically. Can be given as an integer or as
a string.
:param port_file: File to write the port that is used. ``None`` means
no such file is written. Port file is created after
the server is started and removed automatically
after it has stopped.
:param allow_stop: DEPRECATED since version 1.1. Use
``allow_remote_stop`` instead.
:param serve: If ``True``, start the server automatically and
wait for it to be stopped.
:param allow_remote_stop: Allow/disallow stopping the server using
``Stop Remote Server`` keyword and
``stop_remote_server`` XML-RPC method.
"""
self._library = RemoteLibraryFactory(library)
self._server = StoppableXMLRPCServer(host, int(port))
self._register_functions(self._server)
self._port_file = port_file
self._allow_remote_stop = allow_remote_stop \
if allow_stop == 'DEPRECATED' else allow_stop
if serve:
self.serve()
def _register_functions(self, server):
server.register_function(self.get_keyword_names)
server.register_function(self.run_keyword)
server.register_function(self.get_keyword_arguments)
server.register_function(self.get_keyword_documentation)
server.register_function(self.stop_remote_server)
@property
def server_address(self):
"""Server address as a tuple ``(host, port)``."""
return self._server.server_address
@property
def server_port(self):
"""Server port as an integer.
If the initial given port is 0, also this property returns 0 until
the server is activated.
"""
return self._server.server_address[1]
def activate(self):
"""Bind port and activate the server but do not yet start serving.
:return Port number that the server is going to use. This is the
actual port to use, even if the initially given port is 0.
"""
return self._server.activate()
def serve(self, log=True):
"""Start the server and wait for it to be stopped.
:param log: When ``True``, print messages about start and stop to
the console.
Automatically activates the server if it is not activated already.
If this method is executed in the main thread, automatically registers
signals SIGINT, SIGTERM and SIGHUP to stop the server.
Using this method requires using ``serve=False`` when initializing the
server. Using ``serve=True`` is equal to first using ``serve=False``
and then calling this method.
In addition to signals, the server can be stopped with the ``Stop
Remote Server`` keyword and the ``stop_remote_serve`` XML-RPC method,
unless they are disabled when the server is initialized. If this method
is executed in a thread, then it is also possible to stop the server
using the :meth:`stop` method.
"""
self._server.activate()
self._announce_start(log, self._port_file)
with SignalHandler(self.stop):
self._server.serve()
self._announce_stop(log, self._port_file)
<|fim▁hole|> with open(port_file, 'w') as pf:
pf.write(str(self.server_port))
def _announce_stop(self, log, port_file):
self._log('stopped', log)
if port_file and os.path.exists(port_file):
os.remove(port_file)
def _log(self, action, log=True, warn=False):
if log:
address = '%s:%s' % self.server_address
if warn:
print('*WARN*', end=' ')
print('Robot Framework remote server at %s %s.' % (address, action))
def stop(self):
"""Stop server."""
self._server.stop()
# Exposed XML-RPC methods. Should they be moved to own class?
def stop_remote_server(self, log=True):
if not self._allow_remote_stop:
self._log('does not allow stopping', log, warn=True)
return False
self.stop()
return True
def get_keyword_names(self):
return self._library.get_keyword_names() + ['stop_remote_server']
def run_keyword(self, name, args, kwargs=None):
if name == 'stop_remote_server':
return KeywordRunner(self.stop_remote_server).run_keyword(args, kwargs)
return self._library.run_keyword(name, args, kwargs)
def get_keyword_arguments(self, name):
if name == 'stop_remote_server':
return []
return self._library.get_keyword_arguments(name)
def get_keyword_documentation(self, name):
if name == 'stop_remote_server':
return ('Stop the remote server unless stopping is disabled.\n\n'
'Return ``True/False`` depending was server stopped or not.')
return self._library.get_keyword_documentation(name)
def get_keyword_tags(self, name):
if name == 'stop_remote_server':
return []
return self._library.get_keyword_tags(name)
class StoppableXMLRPCServer(SimpleXMLRPCServer):
allow_reuse_address = True
def __init__(self, host, port):
SimpleXMLRPCServer.__init__(self, (host, port), logRequests=False,
bind_and_activate=False)
self._activated = False
self._stopper_thread = None
def activate(self):
if not self._activated:
self.server_bind()
self.server_activate()
self._activated = True
return self.server_address[1]
def serve(self):
self.activate()
try:
self.serve_forever()
except select.error:
# Signals seem to cause this error with Python 2.6.
if sys.version_info[:2] > (2, 6):
raise
self.server_close()
if self._stopper_thread:
self._stopper_thread.join()
self._stopper_thread = None
def stop(self):
self._stopper_thread = threading.Thread(target=self.shutdown)
self._stopper_thread.daemon = True
self._stopper_thread.start()
class SignalHandler(object):
def __init__(self, handler):
self._handler = lambda signum, frame: handler()
self._original = {}
def __enter__(self):
for name in 'SIGINT', 'SIGTERM', 'SIGHUP':
if hasattr(signal, name):
try:
orig = signal.signal(getattr(signal, name), self._handler)
except ValueError: # Not in main thread
return
self._original[name] = orig
def __exit__(self, *exc_info):
while self._original:
name, handler = self._original.popitem()
signal.signal(getattr(signal, name), handler)
def RemoteLibraryFactory(library):
if inspect.ismodule(library):
return StaticRemoteLibrary(library)
get_keyword_names = dynamic_method(library, 'get_keyword_names')
if not get_keyword_names:
return StaticRemoteLibrary(library)
run_keyword = dynamic_method(library, 'run_keyword')
if not run_keyword:
return HybridRemoteLibrary(library, get_keyword_names)
return DynamicRemoteLibrary(library, get_keyword_names, run_keyword)
def dynamic_method(library, underscore_name):
tokens = underscore_name.split('_')
camelcase_name = tokens[0] + ''.join(t.title() for t in tokens[1:])
for name in underscore_name, camelcase_name:
method = getattr(library, name, None)
if method and is_function_or_method(method):
return method
return None
def is_function_or_method(item):
return inspect.isfunction(item) or inspect.ismethod(item)
class StaticRemoteLibrary(object):
def __init__(self, library):
self._library = library
self._names, self._robot_name_index = self._get_keyword_names(library)
def _get_keyword_names(self, library):
names = []
robot_name_index = {}
for name, kw in inspect.getmembers(library):
if is_function_or_method(kw):
if getattr(kw, 'robot_name', None):
names.append(kw.robot_name)
robot_name_index[kw.robot_name] = name
elif name[0] != '_':
names.append(name)
return names, robot_name_index
def get_keyword_names(self):
return self._names
def run_keyword(self, name, args, kwargs=None):
kw = self._get_keyword(name)
return KeywordRunner(kw).run_keyword(args, kwargs)
def _get_keyword(self, name):
if name in self._robot_name_index:
name = self._robot_name_index[name]
return getattr(self._library, name)
def get_keyword_arguments(self, name):
if __name__ == '__init__':
return []
kw = self._get_keyword(name)
args, varargs, kwargs, defaults = inspect.getargspec(kw)
if inspect.ismethod(kw):
args = args[1:] # drop 'self'
if defaults:
args, names = args[:-len(defaults)], args[-len(defaults):]
args += ['%s=%s' % (n, d) for n, d in zip(names, defaults)]
if varargs:
args.append('*%s' % varargs)
if kwargs:
args.append('**%s' % kwargs)
return args
def get_keyword_documentation(self, name):
if name == '__intro__':
source = self._library
elif name == '__init__':
source = self._get_init(self._library)
else:
source = self._get_keyword(name)
return inspect.getdoc(source) or ''
def _get_init(self, library):
if inspect.ismodule(library):
return None
init = getattr(library, '__init__', None)
return init if self._is_valid_init(init) else None
def _is_valid_init(self, init):
if not init:
return False
# https://bitbucket.org/pypy/pypy/issues/2462/
if 'PyPy' in sys.version:
if PY2:
return init.__func__ is not object.__init__.__func__
return init is not object.__init__
return is_function_or_method(init)
def get_keyword_tags(self, name):
keyword = self._get_keyword(name)
return getattr(keyword, 'robot_tags', [])
class HybridRemoteLibrary(StaticRemoteLibrary):
def __init__(self, library, get_keyword_names):
StaticRemoteLibrary.__init__(self, library)
self.get_keyword_names = get_keyword_names
class DynamicRemoteLibrary(HybridRemoteLibrary):
def __init__(self, library, get_keyword_names, run_keyword):
HybridRemoteLibrary.__init__(self, library, get_keyword_names)
self._run_keyword = run_keyword
self._supports_kwargs = self._get_kwargs_support(run_keyword)
self._get_keyword_arguments \
= dynamic_method(library, 'get_keyword_arguments')
self._get_keyword_documentation \
= dynamic_method(library, 'get_keyword_documentation')
self._get_keyword_tags \
= dynamic_method(library, 'get_keyword_tags')
def _get_kwargs_support(self, run_keyword):
spec = inspect.getargspec(run_keyword)
return len(spec.args) > 3 # self, name, args, kwargs=None
def run_keyword(self, name, args, kwargs=None):
args = [name, args, kwargs] if kwargs else [name, args]
return KeywordRunner(self._run_keyword).run_keyword(args)
def get_keyword_arguments(self, name):
if self._get_keyword_arguments:
return self._get_keyword_arguments(name)
if self._supports_kwargs:
return ['*varargs', '**kwargs']
return ['*varargs']
def get_keyword_documentation(self, name):
if self._get_keyword_documentation:
return self._get_keyword_documentation(name)
return ''
def get_keyword_tags(self, name):
if self._get_keyword_tags:
return self._get_keyword_tags(name)
return []
class KeywordRunner(object):
def __init__(self, keyword):
self._keyword = keyword
def run_keyword(self, args, kwargs=None):
args = self._handle_binary(args)
kwargs = self._handle_binary(kwargs or {})
result = KeywordResult()
with StandardStreamInterceptor() as interceptor:
try:
return_value = self._keyword(*args, **kwargs)
except Exception:
result.set_error(*sys.exc_info())
else:
try:
result.set_return(return_value)
except Exception:
result.set_error(*sys.exc_info()[:2])
else:
result.set_status('PASS')
result.set_output(interceptor.output)
return result.data
def _handle_binary(self, arg):
# No need to compare against other iterables or mappings because we
# only get actual lists and dicts over XML-RPC. Binary cannot be
# a dictionary key either.
if isinstance(arg, list):
return [self._handle_binary(item) for item in arg]
if isinstance(arg, dict):
return dict((key, self._handle_binary(arg[key])) for key in arg)
if isinstance(arg, Binary):
return arg.data
return arg
class StandardStreamInterceptor(object):
def __init__(self):
self.output = ''
self.origout = sys.stdout
self.origerr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
def __enter__(self):
return self
def __exit__(self, *exc_info):
stdout = sys.stdout.getvalue()
stderr = sys.stderr.getvalue()
close = [sys.stdout, sys.stderr]
sys.stdout = self.origout
sys.stderr = self.origerr
for stream in close:
stream.close()
if stdout and stderr:
if not stderr.startswith(('*TRACE*', '*DEBUG*', '*INFO*', '*HTML*',
'*WARN*', '*ERROR*')):
stderr = '*INFO* %s' % stderr
if not stdout.endswith('\n'):
stdout += '\n'
self.output = stdout + stderr
class KeywordResult(object):
_generic_exceptions = (AssertionError, RuntimeError, Exception)
def __init__(self):
self.data = {'status': 'FAIL'}
def set_error(self, exc_type, exc_value, exc_tb=None):
self.data['error'] = self._get_message(exc_type, exc_value)
if exc_tb:
self.data['traceback'] = self._get_traceback(exc_tb)
continuable = self._get_error_attribute(exc_value, 'CONTINUE')
if continuable:
self.data['continuable'] = continuable
fatal = self._get_error_attribute(exc_value, 'EXIT')
if fatal:
self.data['fatal'] = fatal
def _get_message(self, exc_type, exc_value):
name = exc_type.__name__
message = self._get_message_from_exception(exc_value)
if not message:
return name
if exc_type in self._generic_exceptions \
or getattr(exc_value, 'ROBOT_SUPPRESS_NAME', False):
return message
return '%s: %s' % (name, message)
def _get_message_from_exception(self, value):
# UnicodeError occurs if message contains non-ASCII bytes
try:
msg = unicode(value)
except UnicodeError:
msg = ' '.join(self._str(a, handle_binary=False) for a in value.args)
return self._handle_binary_result(msg)
def _get_traceback(self, exc_tb):
# Latest entry originates from this module so it can be removed
entries = traceback.extract_tb(exc_tb)[1:]
trace = ''.join(traceback.format_list(entries))
return 'Traceback (most recent call last):\n' + trace
def _get_error_attribute(self, exc_value, name):
return bool(getattr(exc_value, 'ROBOT_%s_ON_FAILURE' % name, False))
def set_return(self, value):
value = self._handle_return_value(value)
if value != '':
self.data['return'] = value
def _handle_return_value(self, ret):
if isinstance(ret, (str, unicode, bytes)):
return self._handle_binary_result(ret)
if isinstance(ret, (int, long, float)):
return ret
if isinstance(ret, Mapping):
return dict((self._str(key), self._handle_return_value(value))
for key, value in ret.items())
try:
return [self._handle_return_value(item) for item in ret]
except TypeError:
return self._str(ret)
def _handle_binary_result(self, result):
if not self._contains_binary(result):
return result
if not isinstance(result, bytes):
try:
result = result.encode('ASCII')
except UnicodeError:
raise ValueError("Cannot represent %r as binary." % result)
# With IronPython Binary cannot be sent if it contains "real" bytes.
if sys.platform == 'cli':
result = str(result)
return Binary(result)
def _contains_binary(self, result):
if PY3:
return isinstance(result, bytes) or BINARY.search(result)
return (isinstance(result, bytes) and NON_ASCII.search(result) or
BINARY.search(result))
def _str(self, item, handle_binary=True):
if item is None:
return ''
if not isinstance(item, (str, unicode, bytes)):
item = unicode(item)
if handle_binary:
item = self._handle_binary_result(item)
return item
def set_status(self, status):
self.data['status'] = status
def set_output(self, output):
if output:
self.data['output'] = self._handle_binary_result(output)
def test_remote_server(uri, log=True):
"""Test is remote server running.
:param uri: Server address.
:param log: Log status message or not.
:return ``True`` if server is running, ``False`` otherwise.
"""
logger = print if log else lambda message: None
try:
ServerProxy(uri).get_keyword_names()
except Exception:
logger('No remote server running at %s.' % uri)
return False
logger('Remote server running at %s.' % uri)
return True
def stop_remote_server(uri, log=True):
"""Stop remote server unless server has disabled stopping.
:param uri: Server address.
:param log: Log status message or not.
:return ``True`` if server was stopped or it was not running in
the first place, ``False`` otherwise.
"""
logger = print if log else lambda message: None
if not test_remote_server(uri, log=False):
logger('No remote server running at %s.' % uri)
return True
logger('Stopping remote server at %s.' % uri)
if not ServerProxy(uri).stop_remote_server():
logger('Stopping not allowed!')
return False
return True
if __name__ == '__main__':
def parse_args(script, *args):
actions = {'stop': stop_remote_server, 'test': test_remote_server}
if not (0 < len(args) < 3) or args[0] not in actions:
sys.exit('Usage: %s {test|stop} [uri]' % os.path.basename(script))
uri = args[1] if len(args) == 2 else 'http://127.0.0.1:8270'
if '://' not in uri:
uri = 'http://' + uri
return actions[args[0]], uri
action, uri = parse_args(*sys.argv)
success = action(uri)
sys.exit(0 if success else 1)<|fim▁end|>
|
def _announce_start(self, log, port_file):
self._log('started', log)
if port_file:
|
<|file_name|>content_script.js<|end_file_name|><|fim▁begin|>console.log("VS: loading content_script.js..." + new Date());
<|fim▁hole|>// Check if the communication between page and background.js has broken.
var last_message_time = new Date().getTime();
new Promise((resolve) => setTimeout(resolve, 1000000)).then(() => {
var now = new Date().getTime();
if (now - last_message_time > 500000) {
sendAlert('Not having message from background for at least 500s, force reloading');
reloadPage();
}
});
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
// Update timestamp first.
last_message_time = new Date().getTime();
console.log("VS: received data from content_script.js" + new Date());
console.log(request);
var action = request["action"];
takeAction(action, request);
});
var already_logging_in = false;
function takeAction(action, request) {
var url = window.location.href;
console.log("VS: Taking action: " + action + " in " + url);
if (action === ACTION_FOR_HOMEPAGE) {
homePage(request);
} else if (action === ACTION_FOR_LOGIN_PAGE) {
loginPage(request);
} else if (action === ACTION_FOR_ASYNC_LOGIN) {
loginPage(request);
} else if (action === ACTION_FOR_DASHBOARD_PAGE) {
dashboardPage(request);
} else {
// Other cases.
console.log("VS: unknown action:" + new Date());
console.log(action);
return;
}
}
function dashboardPage(request) {
console.log("VS: In dashboard page" + new Date());
//var val = $('[data-reactid=".0.0.3.0.0.0.0.0.1.0.0.1.0"]');
//if (val) {
// var ts = new Date().getTime();
// var amount = val.text();
// if (!amount) {
// console.log("Failed to parse data from html page. " + new Date());
// } else {
// saveGenerationData({'amount': amount, 'time': ts});
// }
//} else {
// sendAlert('Failed to read data from Dashboard page' + window.location.href);
//}
//console.log("VS: setting to reload page in 60s: " + new Date());
//window.setInterval(function() {
console.log("VS: polling account data" + new Date());
$.ajax({url: "/api/fusion/accounts"}).done(function(msg) {
console.log("VS: got account data" + new Date());
var j = msg;
if (typeof(j) === "object" && 'accounts' in j) {
console.log(j['accounts']);
var acct = j['accounts'][0]['account_no'];
var newUrl = '/api/fusion/accounts/' + acct;
console.log("VS: polling account detail data" + new Date());
$.ajax({url: newUrl}).done(function(msg) {
console.log("VS: got account detail data" + new Date());
var j = msg;
if (typeof(j) === "object" && 'energyToday' in j) {
var ts = new Date().getTime();
var amount = j['energyToday'] / 1000.0;
console.log("VS: saveing energy data" + new Date());
saveGenerationData({'time': ts, 'amount': amount});
return;
}
sendAlert("Failed parse detailed account info from AJAX for: " + textStatus);
reloadPage();
}).fail(function(jqXHR, textStatus) {
sendAlert("Request failed for loading detailed account info from AJAX for: " + textStatus);
reloadPage();
});
return;
}
sendAlert('Failed to parse account data');
reloadPage();
}).fail(function(jqXHR, textStatus) {
sendAlert("Request failed for loading accounts AJAX for: " + textStatus);
reloadPage();
});
//}, 60000);
}
function loginPage(request) {
if (request) {
asyncLogin(request);
} else {
chrome.runtime.sendMessage({"action": ACTION_FOR_ASYNC_LOGIN});
}
}
function homePage(request) {
var links = $('A');
for (var i in links) {
var link = links[i];
if (link.href == LOGIN_PAGE) {
link.click();
}
}
}
function asyncLogin(request) {
if (already_logging_in) {
console.log("VS: already logging in. This is possible, ignoring.." + new Date());
return;
}
already_logging_in = true;
console.log("VS: gettting new data to login" + new Date());
console.log(request);
context = request['data'];
if ($("INPUT[data-reactid='.0.0.0.0.0.1.1']").val(context.username).length > 0
&& $("INPUT[data-reactid='.0.0.0.0.0.2.0']").val(context.passwd).length > 0) {
$("BUTTON[data-reactid='.0.0.0.0.0.4.0']").click();
new Promise((resolve) => setTimeout(resolve, 100000)).then(() => {
sendAlert('Login failed for username' + context.username + ' and passwd: ' + context.passwd);
});
}
$('.email-input.js-initial-focus').val(context.username);
$('.js-password-field').val(context.passwd);
new Promise((resolve) => setTimeout(resolve, 1500)).then(() => {
$('button.submit').click();
});
}
var action = urlToAction(window.location.href);
console.log("VS: intercepted action:" + action + " at " + new Date());
if (action != '') {
takeAction(action, null);
}
console.log("VS: loaded:" + window.location.href);
console.log("VS: registered on load event here handler in content_script.js" + new Date());<|fim▁end|>
| |
<|file_name|>Catalogs.py<|end_file_name|><|fim▁begin|>#
# Catalogs.py -- Catalogs plugin for fits viewer
#
# Eric Jeschke ([email protected])
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from ginga.misc import Bunch, Future
from ginga.gtkw import FitsImageCanvasTypesGtk as CanvasTypes
from ginga.gtkw import ColorBar
from ginga import GingaPlugin
from ginga import cmap, imap
from ginga import wcs
import gobject
import gtk
import pango
from ginga.gtkw import GtkHelp
class Catalogs(GingaPlugin.LocalPlugin):
def __init__(self, fv, fitsimage):
super(Catalogs, self).__init__(fv, fitsimage)
self.mycolor = 'skyblue'
self.color_cursor = 'red'
self.limit_stars_to_area = False
self.use_dss_channel = False
self.plot_max = 500
self.plot_limit = 100
self.plot_start = 0
# star list
self.starlist = []
# catalog listing
self.table = None
canvas = CanvasTypes.DrawingCanvas()
canvas.enable_draw(True)
canvas.set_drawtype('rectangle', color='cyan', linestyle='dash',
drawdims=True)
canvas.set_callback('button-release', self.btnup)
canvas.set_callback('draw-event', self.getarea)
canvas.setSurface(self.fitsimage)
self.canvas = canvas
self.layertag = 'catalog-canvas'
self.areatag = None
self.curstar = None
self.image_server_options = []
self.image_server_params = None
self.catalog_server_options = []
self.catalog_server_params = None
self.tooltips = self.fv.w.tooltips
def build_gui(self, container, future=None):
vbox1 = gtk.VBox()
self.msgFont = pango.FontDescription("Sans 12")
tw = gtk.TextView()
tw.set_wrap_mode(gtk.WRAP_WORD)
tw.set_left_margin(4)
tw.set_right_margin(4)
tw.set_editable(False)
tw.set_left_margin(4)
tw.set_right_margin(4)
tw.modify_font(self.msgFont)
self.tw = tw
fr = gtk.Frame(" Instructions ")
fr.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
fr.set_label_align(0.1, 0.5)
fr.add(tw)
vbox1.pack_start(fr, padding=4, fill=True, expand=False)
nb = gtk.Notebook()
#nb.set_group_id(group)
#nb.connect("create-window", self.detach_page, group)
nb.set_tab_pos(gtk.POS_BOTTOM)
nb.set_scrollable(True)
nb.set_show_tabs(True)
nb.set_show_border(False)
vbox1.pack_start(nb, padding=4, fill=True, expand=True)
vbox0 = gtk.VBox()
hbox = gtk.HBox(spacing=4)
vbox = gtk.VBox()
fr = gtk.Frame(" Image Server ")
fr.set_shadow_type(gtk.SHADOW_ETCHED_IN)
fr.set_label_align(0.5, 0.5)
fr.add(vbox)
captions = (('Server', 'xlabel'),
('@Server', 'combobox'),
('Use DSS channel', 'checkbutton'),
('Get Image', 'button'))
w, self.w = GtkHelp.build_info(captions)
self.w.nb = nb
self.w.get_image.connect('clicked', lambda w: self.getimage_cb())
self.w.use_dss_channel.set_active(self.use_dss_channel)
self.w.use_dss_channel.connect('toggled', self.use_dss_channel_cb)
vbox.pack_start(w, padding=4, fill=True, expand=False)
self.w.img_params = gtk.VBox()
vbox.pack_start(self.w.img_params, padding=4, fill=True, expand=False)
combobox = self.w.server
index = 0
self.image_server_options = self.fv.imgsrv.getServerNames(kind='image')
for name in self.image_server_options:
combobox.insert_text(index, name)
index += 1
index = 0
combobox.set_active(index)
combobox.sconnect('changed', self.setup_params_image)
if len(self.image_server_options) > 0:
self.setup_params_image(combobox, redo=False)
hbox.pack_start(fr, fill=True, expand=True)
vbox = gtk.VBox()
fr = gtk.Frame(" Catalog Server ")
fr.set_shadow_type(gtk.SHADOW_ETCHED_IN)
fr.set_label_align(0.5, 0.5)
fr.add(vbox)
captions = (('Server', 'xlabel'),
('@Server', 'combobox'),
('Limit stars to area', 'checkbutton'),
('Search', 'button'))
w, self.w2 = GtkHelp.build_info(captions)
self.w2.search.connect('clicked', lambda w: self.getcatalog_cb())
self.w2.limit_stars_to_area.set_active(self.limit_stars_to_area)
self.w2.limit_stars_to_area.connect('toggled', self.limit_area_cb)
vbox.pack_start(w, padding=4, fill=True, expand=False)
self.w2.cat_params = gtk.VBox()
vbox.pack_start(self.w2.cat_params, padding=4, fill=True, expand=False)
combobox = self.w2.server
index = 0
self.catalog_server_options = self.fv.imgsrv.getServerNames(kind='catalog')
for name in self.catalog_server_options:
combobox.insert_text(index, name)
index += 1
index = 0
combobox.set_active(index)
combobox.sconnect('changed', self.setup_params_catalog)
if len(self.catalog_server_options) > 0:
self.setup_params_catalog(combobox, redo=False)
hbox.pack_start(fr, fill=True, expand=True)
vbox0.pack_start(hbox, fill=True, expand=True)
btns = gtk.HButtonBox()
btns.set_layout(gtk.BUTTONBOX_CENTER)
btns.set_spacing(5)
btn = gtk.Button("Set parameters from entire image")
btn.connect('clicked', lambda w: self.setfromimage())
btns.add(btn)
vbox0.pack_start(btns, padding=4, fill=True, expand=False)
lbl = gtk.Label("Params")
self.w.params = vbox0
nb.append_page(vbox0, lbl)
vbox = gtk.VBox()
self.table = CatalogListing(self.logger, vbox)
hbox = gtk.HBox()
scale = gtk.HScrollbar()
adj = scale.get_adjustment()
adj.configure(0, 0, 0, 1, 10, self.plot_limit)
#scale.set_size_request(200, -1)
self.tooltips.set_tip(scale, "Choose subset of stars plotted")
#scale.set_update_policy(gtk.UPDATE_DELAYED)
scale.set_update_policy(gtk.UPDATE_CONTINUOUS)
self.w.plotgrp = scale
scale.connect('value-changed', self.plot_pct_cb)
hbox.pack_start(scale, padding=2, fill=True, expand=True)
sb = GtkHelp.SpinButton()
adj = sb.get_adjustment()
adj.configure(self.plot_limit, 10, self.plot_max, 10, 100, 100)
self.w.plotnum = sb
self.tooltips.set_tip(sb, "Adjust size of subset of stars plotted")
sb.connect('value-changed', self.plot_limit_cb)
hbox.pack_start(sb, padding=2, fill=False, expand=False)
vbox.pack_start(hbox, padding=0, fill=False, expand=False)
#vbox1.pack_start(vbox, padding=4, fill=True, expand=True)
lbl = gtk.Label("Listing")
self.w.listing = vbox
nb.append_page(vbox, lbl)
btns = gtk.HButtonBox()
btns.set_layout(gtk.BUTTONBOX_START)
btns.set_spacing(3)
btns.set_child_size(15, -1)
self.w.buttons = btns
btn = gtk.Button("Close")
btn.connect('clicked', lambda w: self.close())
btns.add(btn)
if future:
btn = gtk.Button('Ok')
btn.connect('clicked', lambda w: self.ok())
btns.add(btn)
btn = gtk.Button('Cancel')
btn.connect('clicked', lambda w: self.cancel())
btns.add(btn)
vbox1.pack_start(btns, padding=4, fill=True, expand=False)
vbox1.show_all()
container.pack_start(vbox1, padding=0, fill=True, expand=True)
def limit_area_cb(self, w):
self.limit_stars_to_area = w.get_active()
return True
def use_dss_channel_cb(self, w):
self.use_dss_channel = w.get_active()
return True
def plot_pct_cb(self, rng):
val = rng.get_value()
self.plot_start = int(val)
self.replot_stars()
return True
def _update_plotscroll(self):
num_stars = len(self.starlist)
if num_stars > 0:
adj = self.w.plotgrp.get_adjustment()
page_size = self.plot_limit
self.plot_start = min(self.plot_start, num_stars-1)
adj.configure(self.plot_start, 0, num_stars, 1,
page_size, page_size)
self.replot_stars()
def plot_limit_cb(self, rng):
val = rng.get_value()
self.plot_limit = int(val)
self._update_plotscroll()
return True
def set_message(self, msg):
buf = self.tw.get_buffer()
buf.set_text(msg)
self.tw.modify_font(self.msgFont)
def ok(self):
return self.close()
def cancel(self):
return self.close()
def update_gui(self):
self.fv.update_pending()
def close(self):
chname = self.fv.get_channelName(self.fitsimage)
self.fv.stop_operation_channel(chname, str(self))
return True
def _setup_params(self, obj, container):
params = obj.getParams()
captions = []
for key, bnch in params.items():
text = key
if bnch.has_key('label'):
text = bnch.label
captions.append((text, 'entry'))
# TODO: put RA/DEC first, and other stuff not in random orders
w, b = GtkHelp.build_info(captions)
# remove old widgets
children = container.get_children()
for child in children:
container.remove(child)
# add new widgets
container.pack_start(w, fill=False, expand=False)
container.show_all()
return b
def setup_params_image(self, combobox, redo=True):
index = combobox.get_active()
key = self.image_server_options[index]
# Get the parameter list and adjust the widget
obj = self.fv.imgsrv.getImageServer(key)
b = self._setup_params(obj, self.w.img_params)
self.image_server_params = b
if redo:
self.redo()
def setup_params_catalog(self, combobox, redo=True):
index = combobox.get_active()
key = self.catalog_server_options[index]
# Get the parameter list and adjust the widget
obj = self.fv.imgsrv.getCatalogServer(key)
b = self._setup_params(obj, self.w2.cat_params)
self.catalog_server_params = b
if redo:
self.redo()
def instructions(self):
self.set_message("""TBD.""")
def start(self, future=None):
self.instructions()
# start catalog operation
try:
obj = self.fitsimage.getObjectByTag(self.layertag)
except KeyError:
# Add canvas layer
self.fitsimage.add(self.canvas, tag=self.layertag)
# Raise the params tab
num = self.w.nb.page_num(self.w.params)
self.w.nb.set_current_page(num)
self.setfromimage()
self.resume()
def pause(self):
self.canvas.ui_setActive(False)
def resume(self):
self.canvas.ui_setActive(True)
#self.fv.showStatus("Draw a rectangle with the right mouse button")
def stop(self):
# stop catalog operation
self.clearAll()
# remove the canvas from the image
self.canvas.ui_setActive(False)
try:
self.fitsimage.deleteObjectByTag(self.layertag)
except:
pass
try:
self.table.close()
except:
pass
self.fv.showStatus("")
def redo(self):
obj = self.canvas.getObjectByTag(self.areatag)
if obj.kind != 'rectangle':
self.stop()
return True
try:
image = self.fitsimage.get_image()
# calculate center of bbox
wd = obj.x2 - obj.x1
dw = wd // 2
ht = obj.y2 - obj.y1
dh = ht // 2
ctr_x, ctr_y = obj.x1 + dw, obj.y1 + dh
ra_ctr, dec_ctr = image.pixtoradec(ctr_x, ctr_y, format='str')
# Calculate RA and DEC for the three points
# origination point
ra_org, dec_org = image.pixtoradec(obj.x1, obj.y1)
# destination point
ra_dst, dec_dst = image.pixtoradec(obj.x2, obj.y2)
# "heel" point making a right triangle
ra_heel, dec_heel = image.pixtoradec(obj.x1, obj.y2)
ht_deg = image.deltaStarsRaDecDeg(ra_org, dec_org, ra_heel, dec_heel)
wd_deg = image.deltaStarsRaDecDeg(ra_heel, dec_heel, ra_dst, dec_dst)
radius_deg = image.deltaStarsRaDecDeg(ra_heel, dec_heel, ra_dst, dec_dst)
# width and height are specified in arcmin
sgn, deg, mn, sec = wcs.degToDms(wd_deg)
wd = deg*60.0 + float(mn) + sec/60.0
sgn, deg, mn, sec = wcs.degToDms(ht_deg)
ht = deg*60.0 + float(mn) + sec/60.0
sgn, deg, mn, sec = wcs.degToDms(radius_deg)
radius = deg*60.0 + float(mn) + sec/60.0
except Exception, e:
self.fv.showStatus('BAD WCS: %s' % str(e))
return True
# Copy the image parameters out to the widget
d = { 'ra': ra_ctr, 'dec': dec_ctr, 'width': str(wd),
'height': ht, 'r': radius, 'r2': radius,
'r1': 0.0,
}
for bnch in (self.image_server_params,
self.catalog_server_params):
if bnch != None:
for key in bnch.keys():
if d.has_key(key):
bnch[key].set_text(str(d[key]))
return True
def btndown(self, canvas, button, data_x, data_y):
pass
def btnup(self, canvas, button, data_x, data_y):
if not (button == 0x1):
return
objs = self.canvas.getItemsAt(data_x, data_y)
for obj in objs:
if (obj.tag != None) and obj.tag.startswith('star'):
info = obj.get_data()
self.table.show_selection(info.star)
return True
def highlight_object(self, obj, tag, color, redraw=True):
x = obj.objects[0].x
y = obj.objects[0].y
delta = 10
radius = obj.objects[0].radius + delta
hilite = CanvasTypes.Circle(x, y, radius,
linewidth=4, color=color)
obj.add(hilite, tag=tag, redraw=redraw)
def highlight_objects(self, objs, tag, color, redraw=True):
for obj in objs:
self.highlight_object(obj, tag, color, redraw=False)
if redraw:
self.canvas.redraw()
def unhighlight_object(self, obj, tag):
# delete the highlight ring of the former cursor object
try:
#hilite = obj.objects[2]
obj.deleteObjectByTag(tag)
except:
pass
def highlight_cursor(self, obj):
if self.curstar:
bnch = self.curstar
if bnch.obj == obj:
# <-- we are already highlighting this object
return True
# delete the highlight ring of the former cursor object
self.unhighlight_object(bnch.obj, 'cursor')
self.highlight_object(obj, 'cursor', self.color_cursor)
self.curstar = Bunch.Bunch(obj=obj)
self.canvas.redraw()
def setfromimage(self):
x1, y1 = 0, 0
x2, y2 = self.fitsimage.get_data_size()
tag = self.canvas.add(CanvasTypes.Rectangle(x1, y1, x2, y2,
color=self.mycolor))
self.getarea(self.canvas, tag)
def getarea(self, canvas, tag):
obj = canvas.getObjectByTag(tag)
if obj.kind != 'rectangle':
return True
if self.areatag:
try:
canvas.deleteObjectByTag(self.areatag)
except:
pass
obj.color = self.mycolor
obj.linestyle = 'solid'
canvas.redraw(whence=3)
self.areatag = tag
# Raise the params tab
num = self.w.nb.page_num(self.w.params)
self.w.nb.set_current_page(num)
return self.redo()
def get_params(self, bnch):
params = {}
for key in bnch.keys():
params[key] = bnch[key].get_text()
return params
def getimage_cb(self):
params = self.get_params(self.image_server_params)
index = self.w.server.get_active()
server = self.image_server_options[index]
self.clearAll()
if self.use_dss_channel:
chname = 'DSS'
if not self.fv.has_channel(chname):
self.fv.add_channel(chname)
else:
chname = self.fv.get_channelName(self.fitsimage)
self.fitsimage.onscreen_message("Querying image db...",
delay=1.0)
# Offload this network task to a non-gui thread
self.fv.nongui_do(self.getimage, server, params, chname)
def getimage(self, server, params, chname):
fitspath = self.fv.get_sky_image(server, params)
self.fv.load_file(fitspath, chname=chname)
# Update the GUI
def getimage_update(self):
self.setfromimage()
self.redo()
self.fv.gui_do(getimage_update)
def getcatalog_cb(self):
params = self.get_params(self.catalog_server_params)
index = self.w2.server.get_active()
server = self.catalog_server_options[index]
obj = None
if self.limit_stars_to_area:
# Look for the defining object to filter stars
# If none, then use the visible image area
try:
obj = self.canvas.getObjectByTag(self.areatag)
except KeyError:
pass
self.reset()
self.fitsimage.onscreen_message("Querying catalog db...",
delay=1.0)
# Offload this network task to a non-gui thread
self.fv.nongui_do(self.getcatalog, server, params, obj)
def getcatalog(self, server, params, obj):
starlist, info = self.fv.get_catalog(server, params)
self.logger.debug("starlist=%s" % str(starlist))
starlist = self.filter_results(starlist, obj)
# Update the GUI
self.fv.gui_do(self.update_catalog, starlist, info)
def update_catalog(self, starlist, info):
self.starlist = starlist
self.table.show_table(self, info, starlist)
# Raise the listing tab
num = self.w.nb.page_num(self.w.listing)
self.w.nb.set_current_page(num)
self._update_plotscroll()
def filter_results(self, starlist, filter_obj):
image = self.fitsimage.get_image()
# Filter starts by a containing object, if provided
if filter_obj:
stars = []
for star in starlist:
x, y = image.radectopix(star['ra_deg'], star['dec_deg'])
if filter_obj.contains(x, y):
stars.append(star)
starlist = stars
return starlist
def clear(self):
objects = self.canvas.getObjectsByTagpfx('star')
self.canvas.deleteObjects(objects)
def clearAll(self):
self.canvas.deleteAllObjects()
def reset(self):
#self.clear()
self.clearAll()
self.table.clear()
def plot_star(self, obj, image=None):
if not image:
image = self.fitsimage.get_image()
x, y = image.radectopix(obj['ra_deg'], obj['dec_deg'])
#print "STAR at %d,%d" % (x, y)
# TODO: auto-pick a decent radius
radius = 10
color = self.table.get_color(obj)
#print "color is %s" % str(color)
circle = CanvasTypes.Circle(x, y, radius, color=color)
point = CanvasTypes.Point(x, y, radius, color=color)
## What is this from?
if obj.has_key('pick'):
# Some objects returned from the Gen2 star catalog are marked
# with the attribute 'pick'. If present then we show the
# star with or without the cross, otherwise we always show the
# cross
if not obj['pick']:
star = CanvasTypes.Canvas(circle, point)
else:
star = CanvasTypes.Canvas(circle)
else:
star = CanvasTypes.Canvas(circle, point)
star.set_data(star=obj)
obj.canvobj = star
self.canvas.add(star, tagpfx='star', redraw=False)
def replot_stars(self, selected=[]):
self.clear()
image = self.fitsimage.get_image()
canvas = self.canvas
length = len(self.starlist)
if length <= self.plot_limit:
i = 0
else:
i = self.plot_start
i = int(min(i, length - self.plot_limit))
length = self.plot_limit
# remove references to old objects before this range
for j in xrange(i):
obj = self.starlist[j]
obj.canvobj = None
# plot stars in range
for j in xrange(length):
obj = self.starlist[i]
i += 1
self.plot_star(obj, image=image)
# remove references to old objects after this range
for j in xrange(i, length):
obj = self.starlist[j]
obj.canvobj = None
# plot stars in selected list even if they are not in the range
#for obj in selected:
selected = self.table.get_selected()
for obj in selected:
if (not obj.has_key('canvobj')) or (obj.canvobj == None):
self.plot_star(obj, image=image)
self.highlight_object(obj.canvobj, 'selected', 'skyblue')
canvas.redraw(whence=3)
def __str__(self):
return 'catalogs'
class CatalogListing(object):
def __init__(self, logger, container):
self.logger = logger
self.tag = None
self.mycolor = 'skyblue'
self.magmap = 'stairs8'
self.mag_max = 25.0
self.mag_min = 0.0
# keys: are name, ra, dec, mag, flag, b_r, preference, priority, dst
# TODO: automate this generation
self.columns = [('Name', 'name'),
('RA', 'ra'),
('DEC', 'dec'),
('Mag', 'mag'),
('Preference', 'preference'),
('Priority', 'priority'),
('Flag', 'flag'),
('b-r', 'b_r'),
('Dst', 'dst'),
('Description', 'description'),
]
self.cell_sort_funcs = []
for kwd, key in self.columns:
self.cell_sort_funcs.append(self._mksrtfnN(key))
self.catalog = None
self.cursor = 0
self.color_cursor = 'red'
self.color_selected = 'skyblue'
self.selection_mode = 'single'
self.selected = []
self.moving_cursor = False
self.btn = Bunch.Bunch()
self.mframe = container
vbox = gtk.VBox()
sw = gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
#self.font = pango.FontDescription('Monospace 10')
# create the TreeView
treeview = gtk.TreeView()
self.treeview = treeview
# create the TreeViewColumns to display the data
tvcolumn = [None] * len(self.columns)
for n in range(0, len(self.columns)):
cell = gtk.CellRendererText()
cell.set_padding(2, 0)
header, kwd = self.columns[n]
tvc = gtk.TreeViewColumn(header, cell)
tvc.set_spacing(4)
tvc.set_resizable(True)
tvc.connect('clicked', self.sort_cb, n)
tvc.set_clickable(True)
tvcolumn[n] = tvc
fn_data = self._mkcolfnN(kwd)
tvcolumn[n].set_cell_data_func(cell, fn_data)
treeview.append_column(tvcolumn[n])
sw.add(treeview)
self.treeview.connect('cursor-changed', self.select_star)
sw.show_all()
vbox.pack_start(sw, fill=True, expand=True)
self.cbar = ColorBar.ColorBar(self.logger)
self.cmap = cmap.get_cmap(self.magmap)
self.imap = imap.get_imap('ramp')
self.cbar.set_cmap(self.cmap)
self.cbar.set_imap(self.imap)
self.cbar.set_size_request(-1, 20)
vbox.pack_start(self.cbar, padding=4, fill=True, expand=False)
btns = gtk.HButtonBox()
btns.set_layout(gtk.BUTTONBOX_CENTER)
btns.set_spacing(5)
for name in ('Plot', 'Clear', #'Close'
):
btn = gtk.Button(name)
btns.add(btn)
self.btn[name.lower()] = btn
self.btn.plot.connect('clicked', lambda w: self.replot_stars())
self.btn.clear.connect('clicked', lambda w: self.clear())
#self.btn.close.connect('clicked', lambda w: self.close())
vbox.pack_start(btns, padding=4, fill=True, expand=False)
vbox.show_all()
self.mframe.pack_start(vbox, expand=True, fill=True)
self.mframe.show_all()
def _mkcolfnN(self, kwd):
def fn(column, cell, model, iter):
bnch = model.get_value(iter, 0)
cell.set_property('text', bnch[kwd])
return fn
def sort_cb(self, column, idx):
treeview = column.get_tree_view()
model = treeview.get_model()
model.set_sort_column_id(idx, gtk.SORT_ASCENDING)
fn = self.cell_sort_funcs[idx]
model.set_sort_func(idx, fn)
return True
def _mksrtfnN(self, key):
def fn(model, iter1, iter2):
bnch1 = model.get_value(iter1, 0)
bnch2 = model.get_value(iter2, 0)
val1, val2 = bnch1[key], bnch2[key]
if isinstance(val1, str):
val1 = val1.lower()
val2 = val2.lower()
res = cmp(val1, val2)
return res
return fn
def show_table(self, catalog, info, starlist):
self.starlist = starlist
self.catalog = catalog
# info is ignored, for now
#self.info = info
self.selected = []
# Update the starlist info
listmodel = gtk.ListStore(object)
for star in starlist:
# TODO: find mag range
listmodel.append([star])
self.treeview.set_model(listmodel)
self.cbar.set_range(self.mag_min, self.mag_max)
def get_color(self, obj):
try:
mag = obj['mag']
except:
return self.mycolor
# clip magnitude to the range we have defined
mag = max(self.mag_min, mag)
mag = min(self.mag_max, mag)
# calculate percentage in range
point = float(mag) / float(self.mag_max - self.mag_min)
# invert
#point = 1.0 - point
# map to a 8-bit color range
point = int(point * 255.0)
# Apply colormap.
rgbmap = self.cbar.get_rgbmap()
(r, g, b) = rgbmap.get_rgbval(point)
r = float(r) / 255.0
g = float(g) / 255.0
b = float(b) / 255.0
return (r, g, b)
def mark_selection(self, star, fromtable=False):
"""Mark or unmark a star as selected. (fromtable)==True if the
selection action came from the table (instead of the star plot).
"""
self.logger.debug("star selected name=%s ra=%s dec=%s" % (
star['name'], star['ra'], star['dec']))
if star in self.selected:
# Item is already selected--so unselect it
self.selected.remove(star)
try:
self._unselect_tv(star)
self.catalog.unhighlight_object(star.canvobj, 'selected')
except Exception, e:
self.logger.warn("Error unhilighting star: %s" % (str(e)))
return False
else:
if self.selection_mode == 'single':
# if selection mode is 'single' unselect any existing selections
for star2 in self.selected:
self.selected.remove(star2)
try:
self._unselect_tv(star2)
self.catalog.unhighlight_object(star2.canvobj, 'selected')
except Exception, e:
self.logger.warn("Error unhilighting star: %s" % (str(e)))
self.selected.append(star)
try:
# If this star is not plotted, then plot it
if (not star.has_key('canvobj')) or (star.canvobj == None):
self.catalog.plot_star(star)
self._select_tv(star, fromtable=fromtable)
self.catalog.highlight_object(star.canvobj, 'selected', 'skyblue')
except Exception, e:
self.logger.warn("Error hilighting star: %s" % (str(e)))
return True
def show_selection(self, star):
"""This method is called when the user clicks on a plotted star in the
fitsviewer.
"""
self.mark_selection(star)
def _select_tv(self, star, fromtable=False):
treeselection = self.treeview.get_selection()
star_idx = self.starlist.index(star)
treeselection.select_path(star_idx)
if not fromtable:
# If the user did not select the star from the table, scroll
# the table so they can see the selection
self.treeview.scroll_to_cell(star_idx, use_align=True, row_align=0.5)
def _unselect_tv(self, star):
treeselection = self.treeview.get_selection()
star_idx = self.starlist.index(star)
treeselection.unselect_path(star_idx)
def clear(self):
try:
self.catalog.clear()
except Exception, e:
# may not have generated a catalog yet
self.logger.warn("Error clearing star table: %s" % (str(e)))
def get_selected(self):
return self.selected
def replot_stars(self):
self.catalog.replot_stars()
canvobjs = map(lambda star: star.canvobj, self.selected)
self.catalog.highlight_objects(canvobjs, 'selected', 'skyblue')
def select_star(self, treeview):
"""This method is called when the user selects a star from the table.
"""
path, column = treeview.get_cursor()
model = treeview.get_model()
iter = model.get_iter(path)
star = model.get_value(iter, 0)
self.logger.debug("selected star: %s" % (str(star)))
self.mark_selection(star, fromtable=True)
return True
def motion_notify_event(self, widget, event):
if event.is_hint:
x, y, state = event.window.get_pointer()
else:
x, y, state = event.x, event.y, event.state
buf_x1, buf_y1 = self.tw.window_to_buffer_coords(gtk.TEXT_WINDOW_TEXT,
x, y)
txtiter = self.tw.get_iter_at_location(buf_x1, buf_y1)
line = txtiter.get_line()
star = self.line_to_object(line)
if star == self.cursor:
return True
self._mark_cursor(star)
try:
self.catalog.highlight_cursor(star.canvobj)
except:
pass
return True<|fim▁hole|># END<|fim▁end|>
| |
<|file_name|>lexer.re2c.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2018 Nagisa Sekiguchi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "json.h"
// helper macro definition.
#define RET(k) \
do { \
kind = JSONTokenKind::k; \
goto END; \
} while (false)
#define REACH_EOS() \
do { \
if (this->isEnd()) { \
goto EOS; \
} else { \
ERROR(); \
} \
} while (false)
#define UPDATE_LN() this->updateNewline(startPos)
#define SKIP() goto INIT
#define ERROR() \
do { \
RET(INVALID); \
} while (false)
namespace ydsh::json {
JSONTokenKind JSONLexer::nextToken(Token &token) {
/*!re2c
re2c:define:YYCTYPE = "unsigned char";
re2c:define:YYCURSOR = this->cursor;
re2c:define:YYLIMIT = this->limit;
re2c:define:YYMARKER = this->marker;
re2c:define:YYCTXMARKER = this->ctxMarker;
re2c:define:YYFILL:naked = 1;
re2c:define:YYFILL@len = #;
re2c:define:YYFILL = "if(!this->fill(#)) { REACH_EOS(); }";<|fim▁hole|>
INT = "0" | [1-9] [0-9]*;
FRAC = "." [0-9]+;
EXP = [eE] [+-] [0-9]+;
UNESCAPED = [\x20\x21\x23-\x5B\x5D-\U0010FFFF];
HEX = [0-9a-fA-F];
CHAR = UNESCAPED | "\\" ( ["\\/bfnrt] | "u" HEX{4} );
*/
INIT:
unsigned int startPos = this->getPos();
JSONTokenKind kind = JSONTokenKind::INVALID;
/*!re2c
"true" { RET(TRUE); }
"false" { RET(FALSE); }
"null" { RET(NIL); }
"-"? INT FRAC? EXP? { RET(NUMBER); }
["] CHAR* ["] { RET(STRING); }
"[" { RET(ARRAY_OPEN); }
"]" { RET(ARRAY_CLOSE); }
"{" { RET(OBJECT_OPEN); }
"}" { RET(OBJECT_CLOSE); }
"," { RET(COMMA); }
":" { RET(COLON); }
[ \t\r\n]+ { UPDATE_LN(); SKIP(); }
"\000" { REACH_EOS(); }
* { RET(INVALID); }
*/
END:
token.pos = startPos;
token.size = this->getPos() - startPos;
return kind;
EOS:
token.pos = this->getUsedSize();
token.size = 0;
this->cursor--;
return JSONTokenKind::EOS;
}
} // namespace ydsh::json<|fim▁end|>
|
re2c:yyfill:enable = 0;
re2c:indent:top = 1;
re2c:indent:string = " ";
|
<|file_name|>peer_status.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rafthttp
import (
"errors"
"fmt"
"sync"
"time"
"go.etcd.io/etcd/pkg/types"
"go.uber.org/zap"
)
type failureType struct {
source string
action string
}
type peerStatus struct {
lg *zap.Logger
local types.ID
id types.ID
mu sync.Mutex // protect variables below
active bool
since time.Time
}
func newPeerStatus(lg *zap.Logger, local, id types.ID) *peerStatus {
return &peerStatus{lg: lg, local: local, id: id}
}
func (s *peerStatus) activate() {
s.mu.Lock()
defer s.mu.Unlock()
if !s.active {
if s.lg != nil {
s.lg.Info("peer became active", zap.String("peer-id", s.id.String()))
} else {
plog.Infof("peer %s became active", s.id)
}
s.active = true
s.since = time.Now()
activePeers.WithLabelValues(s.local.String(), s.id.String()).Inc()
}
}
func (s *peerStatus) deactivate(failure failureType, reason string) {
s.mu.Lock()
defer s.mu.Unlock()
msg := fmt.Sprintf("failed to %s %s on %s (%s)", failure.action, s.id, failure.source, reason)
if s.active {<|fim▁hole|> plog.Errorf(msg)
plog.Infof("peer %s became inactive (message send to peer failed)", s.id)
}
s.active = false
s.since = time.Time{}
activePeers.WithLabelValues(s.local.String(), s.id.String()).Dec()
disconnectedPeers.WithLabelValues(s.local.String(), s.id.String()).Inc()
return
}
if s.lg != nil {
s.lg.Debug("peer deactivated again", zap.String("peer-id", s.id.String()), zap.Error(errors.New(msg)))
}
}
func (s *peerStatus) isActive() bool {
s.mu.Lock()
defer s.mu.Unlock()
return s.active
}
func (s *peerStatus) activeSince() time.Time {
s.mu.Lock()
defer s.mu.Unlock()
return s.since
}<|fim▁end|>
|
if s.lg != nil {
s.lg.Warn("peer became inactive (message send to peer failed)", zap.String("peer-id", s.id.String()), zap.Error(errors.New(msg)))
} else {
|
<|file_name|>test_ipv4_strategy.py<|end_file_name|><|fim▁begin|>import sys
import pytest
from netaddr import INET_PTON, AddrFormatError
from netaddr.strategy import ipv4
def test_strategy_ipv4():
b = '11000000.00000000.00000010.00000001'
i = 3221225985
t = (192, 0, 2, 1)
s = '192.0.2.1'
bin_val = '0b11000000000000000000001000000001'
assert ipv4.bits_to_int(b) == i
assert ipv4.int_to_bits(i) == b
assert ipv4.int_to_str(i) == s
assert ipv4.int_to_words(i) == t
assert ipv4.int_to_bin(i) == bin_val
assert ipv4.int_to_bin(i) == bin_val
assert ipv4.bin_to_int(bin_val) == i
assert ipv4.words_to_int(t) == i
assert ipv4.words_to_int(list(t)) == i
assert ipv4.valid_bin(bin_val)
@pytest.mark.skipif(sys.version_info > (3,), reason="requires python 2.x")
def test_strategy_ipv4_py2():
i = 3221225985
p = '\xc0\x00\x02\x01'
assert ipv4.int_to_packed(i) == p
assert ipv4.packed_to_int(p) == i
@pytest.mark.skipif(sys.version_info < (3,), reason="requires python 3.x")
def test_strategy_ipv4_py3():
i = 3221225985
p = b'\xc0\x00\x02\x01'
assert ipv4.int_to_packed(i) == p
assert ipv4.packed_to_int(p) == i
def test_strategy_inet_aton_behaviour():
# inet_aton() is a very old system call and is very permissive with
# regard to what is assume is a valid IPv4 address. Unfortunately, it
# is also the most widely used by system software used in software today,
# so netaddr supports this behaviour by default.
assert ipv4.str_to_int('127') == 127
assert ipv4.str_to_int('0x7f') == 127
assert ipv4.str_to_int('0177') == 127
assert ipv4.str_to_int('127.1') == 2130706433
assert ipv4.str_to_int('0x7f.1') == 2130706433
assert ipv4.str_to_int('0177.1') == 2130706433
assert ipv4.str_to_int('127.0.0.1') == 2130706433<|fim▁hole|>def test_strategy_inet_pton_behaviour():
# inet_pton() is a newer system call that supports both IPv4 and IPv6.
# It is a lot more strict about what it deems to be a valid IPv4 address
# and doesn't support many of the features found in inet_aton() such as
# support for non- decimal octets, partial numbers of octets, etc.
with pytest.raises(AddrFormatError):
ipv4.str_to_int('127', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0x7f', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0177', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('127.1', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0x7f.1', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0177.1', flags=INET_PTON)
assert ipv4.str_to_int('127.0.0.1', flags=INET_PTON) == 2130706433<|fim▁end|>
| |
<|file_name|>IWebRequestPayload.ts<|end_file_name|><|fim▁begin|>export const WebRequestType = 'web-request';
export interface IWebRequestPayload {
protocol: {
identifier: string;
version: string;
};
url: string;
method: string;<|fim▁hole|> body: {
size: number;
form: { [key: string]: string };
files: { fileName: string, contentType: string, contentLength: number }[];
content: string;
encoding: string;
isTruncated: boolean;
};
startTime: string;
isAjax: boolean;
clientIp: string;
}<|fim▁end|>
|
headers: { [key: string]: string };
|
<|file_name|>episodes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Exodus Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from resources.lib.modules import trakt
from resources.lib.modules import cleantitle
from resources.lib.modules import cleangenre
from resources.lib.modules import control
from resources.lib.modules import client
from resources.lib.modules import cache
from resources.lib.modules import playcount
from resources.lib.modules import workers
from resources.lib.modules import views
from resources.lib.modules import utils
import os,sys,re,json,zipfile,StringIO,urllib,urllib2,urlparse,datetime
params = dict(urlparse.parse_qsl(sys.argv[2].replace('?',''))) if len(sys.argv) > 1 else dict()
action = params.get('action')
control.moderator()
class seasons:
def __init__(self):
self.list = []
self.lang = control.apiLanguage()['tvdb']
self.showunaired = control.setting('showunaired') or 'true'
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.tvdb_key = 'MUQ2MkYyRjkwMDMwQzQ0NA=='
self.tvdb_info_link = 'http://thetvdb.com/api/%s/series/%s/all/%s.zip' % (self.tvdb_key.decode('base64'), '%s', '%s')
self.tvdb_by_imdb = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s'
self.tvdb_by_query = 'http://thetvdb.com/api/GetSeries.php?seriesname=%s'
self.tvdb_image = 'http://thetvdb.com/banners/'
self.tvdb_poster = 'http://thetvdb.com/banners/_cache/'
def get(self, tvshowtitle, year, imdb, tvdb, idx=True, create_directory=True):
if control.window.getProperty('PseudoTVRunning') == 'True':
return episodes().get(tvshowtitle, year, imdb, tvdb)
if idx == True:
self.list = cache.get(self.tvdb_list, 24, tvshowtitle, year, imdb, tvdb, self.lang)
if create_directory == True: self.seasonDirectory(self.list)
return self.list
else:
self.list = self.tvdb_list(tvshowtitle, year, imdb, tvdb, 'en')
return self.list
def tvdb_list(self, tvshowtitle, year, imdb, tvdb, lang, limit=''):
try:
if imdb == '0':
try:
imdb = trakt.SearchTVShow(tvshowtitle, year, full=False)[0]
imdb = imdb.get('show', '0')
imdb = imdb.get('ids', {}).get('imdb', '0')
imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
if not imdb: imdb = '0'
except:
imdb = '0'
if tvdb == '0' and not imdb == '0':
url = self.tvdb_by_imdb % imdb
result = client.request(url, timeout='10')
try: tvdb = client.parseDOM(result, 'seriesid')[0]
except: tvdb = '0'
try: name = client.parseDOM(result, 'SeriesName')[0]
except: name = '0'
dupe = re.compile('[***]Duplicate (\d*)[***]').findall(name)
if len(dupe) > 0: tvdb = str(dupe[0])
if tvdb == '': tvdb = '0'
if tvdb == '0':
url = self.tvdb_by_query % (urllib.quote_plus(tvshowtitle))
years = [str(year), str(int(year)+1), str(int(year)-1)]
tvdb = client.request(url, timeout='10')
tvdb = re.sub(r'[^\x00-\x7F]+', '', tvdb)
tvdb = client.replaceHTMLCodes(tvdb)
tvdb = client.parseDOM(tvdb, 'Series')
tvdb = [(x, client.parseDOM(x, 'SeriesName'), client.parseDOM(x, 'FirstAired')) for x in tvdb]
tvdb = [(x, x[1][0], x[2][0]) for x in tvdb if len(x[1]) > 0 and len(x[2]) > 0]
tvdb = [x for x in tvdb if cleantitle.get(tvshowtitle) == cleantitle.get(x[1])]
tvdb = [x[0][0] for x in tvdb if any(y in x[2] for y in years)][0]
tvdb = client.parseDOM(tvdb, 'seriesid')[0]
if tvdb == '': tvdb = '0'
except:
return
try:
if tvdb == '0': return
url = self.tvdb_info_link % (tvdb, 'en')
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % 'en')
artwork = zip.read('banners.xml')
zip.close()
dupe = client.parseDOM(result, 'SeriesName')[0]
dupe = re.compile('[***]Duplicate (\d*)[***]').findall(dupe)
if len(dupe) > 0:
tvdb = str(dupe[0]).encode('utf-8')
url = self.tvdb_info_link % (tvdb, 'en')
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % 'en')
artwork = zip.read('banners.xml')
zip.close()
if not lang == 'en':
url = self.tvdb_info_link % (tvdb, lang)
data = urllib2.urlopen(url, timeout=30).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result2 = zip.read('%s.xml' % lang)
zip.close()
else:
result2 = result
artwork = artwork.split('<Banner>')
artwork = [i for i in artwork if '<Language>en</Language>' in i and '<BannerType>season</BannerType>' in i]
artwork = [i for i in artwork if not 'seasonswide' in re.findall('<BannerPath>(.+?)</BannerPath>', i)[0]]
result = result.split('<Episode>')
result2 = result2.split('<Episode>')
item = result[0] ; item2 = result2[0]
episodes = [i for i in result if '<EpisodeNumber>' in i]
episodes = [i for i in episodes if not '<SeasonNumber>0</SeasonNumber>' in i]
episodes = [i for i in episodes if not '<EpisodeNumber>0</EpisodeNumber>' in i]
seasons = [i for i in episodes if '<EpisodeNumber>1</EpisodeNumber>' in i]
locals = [i for i in result2 if '<EpisodeNumber>' in i]
result = '' ; result2 = ''
if limit == '':
episodes = []
elif limit == '-1':
seasons = []
else:
episodes = [i for i in episodes if '<SeasonNumber>%01d</SeasonNumber>' % int(limit) in i]
seasons = []
try: poster = client.parseDOM(item, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
try: status = client.parseDOM(item, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
try: studio = client.parseDOM(item, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: cast = client.parseDOM(item, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: label = client.parseDOM(item2, 'SeriesName')[0]
except: label = '0'
label = client.replaceHTMLCodes(label)
label = label.encode('utf-8')
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
unaired = ''
except:
pass
for item in seasons:
try:
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))):
unaired = 'true'
if self.showunaired != 'true': raise Exception()
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
thumb = [i for i in artwork if client.parseDOM(i, 'Season')[0] == season]
try: thumb = client.parseDOM(thumb[0], 'BannerPath')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if thumb == '0': thumb = poster
self.list.append({'season': season, 'tvshowtitle': tvshowtitle, 'label': label, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb, 'unaired': unaired})
except:
pass
for item in episodes:
try:
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))):
unaired = 'true'
if self.showunaired != 'true': raise Exception()
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try:
local = client.parseDOM(item, 'id')[0]
local = [x for x in locals if '<id>%s</id>' % str(local) in x][0]
except:
local = item
label = client.parseDOM(local, 'EpisodeName')[0]
if label == '': label = '0'
label = client.replaceHTMLCodes(label)
label = label.encode('utf-8')
try: episodeplot = client.parseDOM(local, 'Overview')[0]
except: episodeplot = ''
if episodeplot == '': episodeplot = '0'
if episodeplot == '0': episodeplot = plot
episodeplot = client.replaceHTMLCodes(episodeplot)
try: episodeplot = episodeplot.encode('utf-8')
except: pass
self.list.append({'title': title, 'label': label, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': episodeplot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb, 'unaired': unaired})
except:
pass
return self.list
def seasonDirectory(self, items):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
traktCredentials = trakt.getTraktCredentialsInfo()
try: isOld = False ; control.item().getArt('type')
except: isOld = True
try: indicators = playcount.getSeasonIndicators(items[0]['imdb'])
except: pass
watchedMenu = control.lang(32068).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32066).encode('utf-8')
unwatchedMenu = control.lang(32069).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32067).encode('utf-8')
queueMenu = control.lang(32065).encode('utf-8')
traktManagerMenu = control.lang(32070).encode('utf-8')
labelMenu = control.lang(32055).encode('utf-8')
playRandom = control.lang(32535).encode('utf-8')
addToLibrary = control.lang(32551).encode('utf-8')
for i in items:
try:
label = '%s %s' % (labelMenu, i['season'])
try:
if i['unaired'] == 'true':
label = '[COLOR darkred][I]%s[/I][/COLOR]' % label
except:
pass
systitle = sysname = urllib.quote_plus(i['tvshowtitle'])
imdb, tvdb, year, season = i['imdb'], i['tvdb'], i['year'], i['season']
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'code': imdb, 'imdbnumber': imdb, 'imdb_id': imdb})
meta.update({'tvdb_id': tvdb})
meta.update({'mediatype': 'tvshow'})
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, sysname)})
if not 'duration' in i: meta.update({'duration': '60'})
elif i['duration'] == '0': meta.update({'duration': '60'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
try: meta.update({'genre': cleangenre.lang(meta['genre'], self.lang)})
except: pass
try: meta.update({'tvshowtitle': i['label']})
except: pass
try:
if season in indicators: meta.update({'playcount': 1, 'overlay': 7})
else: meta.update({'playcount': 0, 'overlay': 6})
except:
pass
url = '%s?action=episodes&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s' % (sysaddon, systitle, year, imdb, tvdb, season)
cm = []
cm.append((playRandom, 'RunPlugin(%s?action=random&rtype=episode&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s)' % (sysaddon, urllib.quote_plus(systitle), urllib.quote_plus(year), urllib.quote_plus(imdb), urllib.quote_plus(tvdb), urllib.quote_plus(season))))
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
cm.append((watchedMenu, 'RunPlugin(%s?action=tvPlaycount&name=%s&imdb=%s&tvdb=%s&season=%s&query=7)' % (sysaddon, systitle, imdb, tvdb, season)))
cm.append((unwatchedMenu, 'RunPlugin(%s?action=tvPlaycount&name=%s&imdb=%s&tvdb=%s&season=%s&query=6)' % (sysaddon, systitle, imdb, tvdb, season)))
if traktCredentials == True:
cm.append((traktManagerMenu, 'RunPlugin(%s?action=traktManager&name=%s&tvdb=%s&content=tvshow)' % (sysaddon, sysname, tvdb)))
if isOld == True:
cm.append((control.lang2(19033).encode('utf-8'), 'Action(Info)'))
cm.append((addToLibrary, 'RunPlugin(%s?action=tvshowToLibrary&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s)' % (sysaddon, systitle, year, imdb, tvdb)))
item = control.item(label=label)
art = {}
if 'thumb' in i and not i['thumb'] == '0':
art.update({'icon': i['thumb'], 'thumb': i['thumb'], 'poster': i['thumb']})
elif 'poster' in i and not i['poster'] == '0':
art.update({'icon': i['poster'], 'thumb': i['poster'], 'poster': i['poster']})
else:
art.update({'icon': addonPoster, 'thumb': addonPoster, 'poster': addonPoster})
if 'banner' in i and not i['banner'] == '0':
art.update({'banner': i['banner']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'banner': i['fanart']})
else:
art.update({'banner': addonBanner})
if settingFanart == 'true' and 'fanart' in i and not i['fanart'] == '0':
item.setProperty('Fanart_Image', i['fanart'])
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setArt(art)
item.addContextMenuItems(cm)
item.setInfo(type='Video', infoLabels = meta)
video_streaminfo = {'codec': 'h264'}
item.addStreamInfo('video', video_streaminfo)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True)
except:
pass
try: control.property(syshandle, 'showplot', items[0]['plot'])
except: pass
control.content(syshandle, 'seasons')
control.directory(syshandle, cacheToDisc=True)
views.setView('seasons', {'skin.estuary': 55, 'skin.confluence': 500})
class episodes:
def __init__(self):
self.list = []
self.trakt_link = 'http://api.trakt.tv'
self.tvmaze_link = 'http://api.tvmaze.com'
self.tvdb_key = 'MUQ2MkYyRjkwMDMwQzQ0NA=='
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.systime = (self.datetime).strftime('%Y%m%d%H%M%S%f')
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.trakt_user = control.setting('trakt.user').strip()
self.lang = control.apiLanguage()['tvdb']
self.showunaired = control.setting('showunaired') or 'true'
self.tvdb_info_link = 'http://thetvdb.com/api/%s/series/%s/all/%s.zip' % (self.tvdb_key.decode('base64'), '%s', '%s')
self.tvdb_image = 'http://thetvdb.com/banners/'
self.tvdb_poster = 'http://thetvdb.com/banners/_cache/'
self.added_link = 'http://api.tvmaze.com/schedule'
#https://api.trakt.tv/calendars/all/shows/date[30]/31 #use this for new episodes?
#self.mycalendar_link = 'http://api.trakt.tv/calendars/my/shows/date[29]/60/'
self.mycalendar_link = 'http://api.trakt.tv/calendars/my/shows/date[30]/31/' #go back 30 and show all shows aired until tomorrow
self.trakthistory_link = 'http://api.trakt.tv/users/me/history/shows?limit=300'
self.progress_link = 'http://api.trakt.tv/users/me/watched/shows'
self.hiddenprogress_link = 'http://api.trakt.tv/users/hidden/progress_watched?limit=1000&type=show'
self.calendar_link = 'http://api.tvmaze.com/schedule?date=%s'
self.traktlists_link = 'http://api.trakt.tv/users/me/lists'
self.traktlikedlists_link = 'http://api.trakt.tv/users/likes/lists?limit=1000000'
self.traktlist_link = 'http://api.trakt.tv/users/%s/lists/%s/items'
def get(self, tvshowtitle, year, imdb, tvdb, season=None, episode=None, idx=True, create_directory=True):
try:
if idx == True:
if season == None and episode == None:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, '-1')
elif episode == None:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, season)
else:
self.list = cache.get(seasons().tvdb_list, 1, tvshowtitle, year, imdb, tvdb, self.lang, '-1')
num = [x for x,y in enumerate(self.list) if y['season'] == str(season) and y['episode'] == str(episode)][-1]
self.list = [y for x,y in enumerate(self.list) if x >= num]
if create_directory == True: self.episodeDirectory(self.list)
return self.list
else:
self.list = seasons().tvdb_list(tvshowtitle, year, imdb, tvdb, 'en', '-1')
return self.list
except:
pass
def calendar(self, url):
try:
try: url = getattr(self, url + '_link')
except: pass
if self.trakt_link in url and url == self.progress_link:
self.blist = cache.get(self.trakt_progress_list, 720, url, self.trakt_user, self.lang)
self.list = []
self.list = cache.get(self.trakt_progress_list, 0, url, self.trakt_user, self.lang)
elif self.trakt_link in url and url == self.mycalendar_link:
self.blist = cache.get(self.trakt_episodes_list, 720, url, self.trakt_user, self.lang)
self.list = []
self.list = cache.get(self.trakt_episodes_list, 0, url, self.trakt_user, self.lang)
elif self.trakt_link in url and '/users/' in url:
self.list = cache.get(self.trakt_list, 0, url, self.trakt_user)
self.list = self.list[::-1]
elif self.trakt_link in url:
self.list = cache.get(self.trakt_list, 1, url, self.trakt_user)
elif self.tvmaze_link in url and url == self.added_link:
urls = [i['url'] for i in self.calendars(idx=False)][:5]
self.list = []
for url in urls:
self.list += cache.get(self.tvmaze_list, 720, url, True)
elif self.tvmaze_link in url:
self.list = cache.get(self.tvmaze_list, 1, url, False)
self.episodeDirectory(self.list)
return self.list
except:
pass
def widget(self):
if trakt.getTraktIndicatorsInfo() == True:
setting = control.setting('tv.widget.alt')
else:
setting = control.setting('tv.widget')
if setting == '2':
self.calendar(self.progress_link)
elif setting == '3':
self.calendar(self.mycalendar_link)
else:
self.calendar(self.added_link)
def calendars(self, idx=True):
m = control.lang(32060).encode('utf-8').split('|')
try: months = [(m[0], 'January'), (m[1], 'February'), (m[2], 'March'), (m[3], 'April'), (m[4], 'May'), (m[5], 'June'), (m[6], 'July'), (m[7], 'August'), (m[8], 'September'), (m[9], 'October'), (m[10], 'November'), (m[11], 'December')]
except: months = []
d = control.lang(32061).encode('utf-8').split('|')
try: days = [(d[0], 'Monday'), (d[1], 'Tuesday'), (d[2], 'Wednesday'), (d[3], 'Thursday'), (d[4], 'Friday'), (d[5], 'Saturday'), (d[6], 'Sunday')]
except: days = []
for i in range(0, 30):
try:
name = (self.datetime - datetime.timedelta(days = i))
name = (control.lang(32062) % (name.strftime('%A'), name.strftime('%d %B'))).encode('utf-8')
for m in months: name = name.replace(m[1], m[0])
for d in days: name = name.replace(d[1], d[0])
try: name = name.encode('utf-8')
except: pass
url = self.calendar_link % (self.datetime - datetime.timedelta(days = i)).strftime('%Y-%m-%d')
self.list.append({'name': name, 'url': url, 'image': 'calendar.png', 'action': 'calendar'})
except:
pass
if idx == True: self.addDirectory(self.list)
return self.list
def userlists(self):
try:
userlists = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
activity = trakt.getActivity()
except:
pass
try:
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlists_link, self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlists_link, self.trakt_user)
except:
pass
try:
self.list = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlikedlists_link, self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlikedlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlikedlists_link, self.trakt_user)
except:
pass
self.list = userlists
for i in range(0, len(self.list)): self.list[i].update({'image': 'userlists.png', 'action': 'calendar'})
self.addDirectory(self.list, queue=True)
return self.list
def trakt_list(self, url, user):
try:
for i in re.findall('date\[(\d+)\]', url):
url = url.replace('date[%s]' % i, (self.datetime - datetime.timedelta(days = int(i))).strftime('%Y-%m-%d'))
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
q.update({'extended': 'full'})
q = (urllib.urlencode(q)).replace('%2C', ',')
u = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
itemlist = []
items = trakt.getTraktAsJson(u)
except:
return
for item in items:
try:
title = item['episode']['title']
if title == None or title == '': raise Exception()
title = client.replaceHTMLCodes(title)
season = item['episode']['season']
season = re.sub('[^0-9]', '', '%01d' % int(season))
if season == '0': raise Exception()
episode = item['episode']['number']
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
if episode == '0': raise Exception()
tvshowtitle = item['show']['title']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
year = item['show']['year']
year = re.sub('[^0-9]', '', str(year))
imdb = item['show']['ids']['imdb']
if imdb == None or imdb == '': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
tvdb = item['show']['ids']['tvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
premiered = item['episode']['first_aired']
try: premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = '0'
studio = item['show']['network']
if studio == None: studio = '0'
genre = item['show']['genres']
genre = [i.title() for i in genre]
if genre == []: genre = '0'
genre = ' / '.join(genre)
try: duration = str(item['show']['runtime'])
except: duration = '0'
if duration == None: duration = '0'
try: rating = str(item['episode']['rating'])
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
try: votes = str(item['show']['votes'])
except: votes = '0'
try: votes = str(format(int(votes),',d'))
except: pass
if votes == None: votes = '0'
mpaa = item['show']['certification']
if mpaa == None: mpaa = '0'
plot = item['episode']['overview']
if plot == None or plot == '': plot = item['show']['overview']
if plot == None or plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
try:
if self.lang == 'en': raise Exception()
item = trakt.getTVShowTranslation(imdb, lang=self.lang, season=season, episode=episode, full=True)
title = item.get('title') or title
plot = item.get('overview') or plot
tvshowtitle = trakt.getTVShowTranslation(imdb, lang=self.lang) or tvshowtitle
except:
pass
itemlist.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': 'Continuing', 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': '0', 'thumb': '0'})
except:
pass
itemlist = itemlist[::-1]
return itemlist
def trakt_progress_list(self, url, user, lang):
try:
url += '?extended=full'
result = trakt.getTraktAsJson(url)
items = []
except:
return
sortorder = control.setting('prgr.sortorder')
for item in result:
try:
num_1 = 0
for i in range(0, len(item['seasons'])):
if item['seasons'][i]['number'] > 0: num_1 += len(item['seasons'][i]['episodes'])
num_2 = int(item['show']['aired_episodes'])
if num_1 >= num_2: raise Exception()
season = str(item['seasons'][-1]['number'])
episode = [x for x in item['seasons'][-1]['episodes'] if 'number' in x]
episode = sorted(episode, key=lambda x: x['number'])
episode = str(episode[-1]['number'])
tvshowtitle = item['show']['title']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
year = item['show']['year']
year = re.sub('[^0-9]', '', str(year))
if int(year) > int(self.datetime.strftime('%Y')): raise Exception()
imdb = item['show']['ids']['imdb']
if imdb == None or imdb == '': imdb = '0'
tvdb = item['show']['ids']['tvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
last_watched = item['last_watched_at']
if last_watched == None or last_watched == '': last_watched = '0'
items.append({'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'snum': season, 'enum': episode, '_last_watched': last_watched})
except:
pass
try:
result = trakt.getTraktAsJson(self.hiddenprogress_link)
result = [str(i['show']['ids']['tvdb']) for i in result]
items = [i for i in items if not i['tvdb'] in result]
except:
pass
def items_list(i):
try:
item = [x for x in self.blist if x['tvdb'] == i['tvdb'] and x['snum'] == i['snum'] and x['enum'] == i['enum']][0]
item['action'] = 'episodes'
self.list.append(item)
return
except:
pass
try:
url = self.tvdb_info_link % (i['tvdb'], lang)
data = urllib2.urlopen(url, timeout=10).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % lang)
artwork = zip.read('banners.xml')
zip.close()
result = result.split('<Episode>')
item = [x for x in result if '<EpisodeNumber>' in x]
item2 = result[0]
num = [x for x,y in enumerate(item) if re.compile('<SeasonNumber>(.+?)</SeasonNumber>').findall(y)[0] == str(i['snum']) and re.compile('<EpisodeNumber>(.+?)</EpisodeNumber>').findall(y)[0] == str(i['enum'])][-1]
item = [y for x,y in enumerate(item) if x > num][0]
print lang
print item
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
try: status = client.parseDOM(item2, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
unaired = ''
if status == 'Ended': pass
elif premiered == '0': raise Exception()
elif int(re.sub('[^0-9]', '', str(premiered))) > int(re.sub('[^0-9]', '', str(self.today_date))):
unaired = 'true'
if self.showunaired != 'true': raise Exception()
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
tvshowtitle = i['tvshowtitle']
imdb, tvdb = i['imdb'], i['tvdb']
year = i['year']
try: year = year.encode('utf-8')
except: pass
try: poster = client.parseDOM(item2, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item2, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item2, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: studio = client.parseDOM(item2, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item2, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item2, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item2, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item2, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try: cast = client.parseDOM(item2, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: plot = client.parseDOM(item, 'Overview')[0]
except: plot = ''
if plot == '':
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb, 'snum': i['snum'], 'enum': i['enum'], 'action': 'episodes', 'unaired': unaired, '_last_watched': i['_last_watched'], '_sort_key': max(i['_last_watched'],premiered)})<|fim▁hole|> except:
pass
items = items[:100]
threads = []
for i in items: threads.append(workers.Thread(items_list, i))
[i.start() for i in threads]
[i.join() for i in threads]
try:
if sortorder == '0':
self.list = sorted(self.list, key=lambda k: k['premiered'], reverse=True)
else:
self.list = sorted(self.list, key=lambda k: k['_sort_key'], reverse=True)
except: pass
return self.list
def trakt_episodes_list(self, url, user, lang):
items = self.trakt_list(url, user)
def items_list(i):
try:
item = [x for x in self.blist if x['tvdb'] == i['tvdb'] and x['season'] == i['season'] and x['episode'] == i['episode']][0]
if item['poster'] == '0': raise Exception()
self.list.append(item)
return
except:
pass
try:
url = self.tvdb_info_link % (i['tvdb'], lang)
data = urllib2.urlopen(url, timeout=10).read()
zip = zipfile.ZipFile(StringIO.StringIO(data))
result = zip.read('%s.xml' % lang)
artwork = zip.read('banners.xml')
zip.close()
result = result.split('<Episode>')
item = [(re.findall('<SeasonNumber>%01d</SeasonNumber>' % int(i['season']), x), re.findall('<EpisodeNumber>%01d</EpisodeNumber>' % int(i['episode']), x), x) for x in result]
item = [x[2] for x in item if len(x[0]) > 0 and len(x[1]) > 0][0]
item2 = result[0]
premiered = client.parseDOM(item, 'FirstAired')[0]
if premiered == '' or '-00' in premiered: premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
try: status = client.parseDOM(item2, 'Status')[0]
except: status = ''
if status == '': status = 'Ended'
status = client.replaceHTMLCodes(status)
status = status.encode('utf-8')
title = client.parseDOM(item, 'EpisodeName')[0]
if title == '': title = '0'
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = client.parseDOM(item, 'SeasonNumber')[0]
season = '%01d' % int(season)
season = season.encode('utf-8')
episode = client.parseDOM(item, 'EpisodeNumber')[0]
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
episode = episode.encode('utf-8')
tvshowtitle = i['tvshowtitle']
imdb, tvdb = i['imdb'], i['tvdb']
year = i['year']
try: year = year.encode('utf-8')
except: pass
try: poster = client.parseDOM(item2, 'poster')[0]
except: poster = ''
if not poster == '': poster = self.tvdb_image + poster
else: poster = '0'
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: banner = client.parseDOM(item2, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
try: fanart = client.parseDOM(item2, 'fanart')[0]
except: fanart = ''
if not fanart == '': fanart = self.tvdb_image + fanart
else: fanart = '0'
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
try: thumb = client.parseDOM(item, 'filename')[0]
except: thumb = ''
if not thumb == '': thumb = self.tvdb_image + thumb
else: thumb = '0'
thumb = client.replaceHTMLCodes(thumb)
thumb = thumb.encode('utf-8')
if not poster == '0': pass
elif not fanart == '0': poster = fanart
elif not banner == '0': poster = banner
if not banner == '0': pass
elif not fanart == '0': banner = fanart
elif not poster == '0': banner = poster
if not thumb == '0': pass
elif not fanart == '0': thumb = fanart.replace(self.tvdb_image, self.tvdb_poster)
elif not poster == '0': thumb = poster
try: studio = client.parseDOM(item2, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
try: genre = client.parseDOM(item2, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = client.parseDOM(item2, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item2, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item2, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: director = client.parseDOM(item, 'Director')[0]
except: director = ''
director = [x for x in director.split('|') if not x == '']
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: writer = client.parseDOM(item, 'Writer')[0]
except: writer = ''
writer = [x for x in writer.split('|') if not x == '']
writer = ' / '.join(writer)
if writer == '': writer = '0'
writer = client.replaceHTMLCodes(writer)
writer = writer.encode('utf-8')
try: cast = client.parseDOM(item2, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
try: plot = client.parseDOM(item, 'Overview')[0]
except: plot = ''
if plot == '':
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': status, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'thumb': thumb})
except:
pass
items = items[:100]
threads = []
for i in items: threads.append(workers.Thread(items_list, i))
[i.start() for i in threads]
[i.join() for i in threads]
return self.list
def trakt_user_list(self, url, user):
try:
items = trakt.getTraktAsJson(url)
except:
pass
for item in items:
try:
try: name = item['list']['name']
except: name = item['name']
name = client.replaceHTMLCodes(name)
try: url = (trakt.slug(item['list']['user']['username']), item['list']['ids']['slug'])
except: url = ('me', item['ids']['slug'])
url = self.traktlist_link % url
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
self.list = sorted(self.list, key=lambda k: utils.title_key(k['name']))
return self.list
def tvmaze_list(self, url, limit):
try:
result = client.request(url)
itemlist = []
items = json.loads(result)
except:
return
for item in items:
try:
if not 'english' in item['show']['language'].lower(): raise Exception()
if limit == True and not 'scripted' in item['show']['type'].lower(): raise Exception()
title = item['name']
if title == None or title == '': raise Exception()
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
season = item['season']
season = re.sub('[^0-9]', '', '%01d' % int(season))
if season == '0': raise Exception()
season = season.encode('utf-8')
episode = item['number']
episode = re.sub('[^0-9]', '', '%01d' % int(episode))
if episode == '0': raise Exception()
episode = episode.encode('utf-8')
tvshowtitle = item['show']['name']
if tvshowtitle == None or tvshowtitle == '': raise Exception()
tvshowtitle = client.replaceHTMLCodes(tvshowtitle)
tvshowtitle = tvshowtitle.encode('utf-8')
year = item['show']['premiered']
year = re.findall('(\d{4})', year)[0]
year = year.encode('utf-8')
imdb = item['show']['externals']['imdb']
if imdb == None or imdb == '': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
tvdb = item['show']['externals']['thetvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
tvdb = tvdb.encode('utf-8')
poster = '0'
try: poster = item['show']['image']['original']
except: poster = '0'
if poster == None or poster == '': poster = '0'
poster = poster.encode('utf-8')
try: thumb1 = item['show']['image']['original']
except: thumb1 = '0'
try: thumb2 = item['image']['original']
except: thumb2 = '0'
if thumb2 == None or thumb2 == '0': thumb = thumb1
else: thumb = thumb2
if thumb == None or thumb == '': thumb = '0'
thumb = thumb.encode('utf-8')
premiered = item['airdate']
try: premiered = re.findall('(\d{4}-\d{2}-\d{2})', premiered)[0]
except: premiered = '0'
premiered = premiered.encode('utf-8')
try: studio = item['show']['network']['name']
except: studio = '0'
if studio == None: studio = '0'
studio = studio.encode('utf-8')
try: genre = item['show']['genres']
except: genre = '0'
genre = [i.title() for i in genre]
if genre == []: genre = '0'
genre = ' / '.join(genre)
genre = genre.encode('utf-8')
try: duration = item['show']['runtime']
except: duration = '0'
if duration == None: duration = '0'
duration = str(duration)
duration = duration.encode('utf-8')
try: rating = item['show']['rating']['average']
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
rating = str(rating)
rating = rating.encode('utf-8')
try: plot = item['show']['summary']
except: plot = '0'
if plot == None: plot = '0'
plot = re.sub('<.+?>|</.+?>|\n', '', plot)
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
itemlist.append({'title': title, 'season': season, 'episode': episode, 'tvshowtitle': tvshowtitle, 'year': year, 'premiered': premiered, 'status': 'Continuing', 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': poster, 'thumb': thumb})
except:
pass
itemlist = itemlist[::-1]
return itemlist
def episodeDirectory(self, items):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
traktCredentials = trakt.getTraktCredentialsInfo()
try: isOld = False ; control.item().getArt('type')
except: isOld = True
isPlayable = 'true' if not 'plugin' in control.infoLabel('Container.PluginName') else 'false'
indicators = playcount.getTVShowIndicators(refresh=True)
try: multi = [i['tvshowtitle'] for i in items]
except: multi = []
multi = len([x for y,x in enumerate(multi) if x not in multi[:y]])
multi = True if multi > 1 else False
try: sysaction = items[0]['action']
except: sysaction = ''
isFolder = False if not sysaction == 'episodes' else True
playbackMenu = control.lang(32063).encode('utf-8') if control.setting('hosts.mode') == '2' else control.lang(32064).encode('utf-8')
watchedMenu = control.lang(32068).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32066).encode('utf-8')
unwatchedMenu = control.lang(32069).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(32067).encode('utf-8')
queueMenu = control.lang(32065).encode('utf-8')
traktManagerMenu = control.lang(32070).encode('utf-8')
tvshowBrowserMenu = control.lang(32071).encode('utf-8')
addToLibrary = control.lang(32551).encode('utf-8')
for i in items:
try:
if not 'label' in i: i['label'] = i['title']
if i['label'] == '0':
label = '%sx%02d . %s %s' % (i['season'], int(i['episode']), 'Episode', i['episode'])
else:
label = '%sx%02d . %s' % (i['season'], int(i['episode']), i['label'])
if multi == True:
label = '%s - %s' % (i['tvshowtitle'], label)
try:
if i['unaired'] == 'true':
label = '[COLOR darkred][I]%s[/I][/COLOR]' % label
except:
pass
imdb, tvdb, year, season, episode = i['imdb'], i['tvdb'], i['year'], i['season'], i['episode']
systitle = urllib.quote_plus(i['title'])
systvshowtitle = urllib.quote_plus(i['tvshowtitle'])
syspremiered = urllib.quote_plus(i['premiered'])
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'mediatype': 'episode'})
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, systvshowtitle)})
if not 'duration' in i: meta.update({'duration': '60'})
elif i['duration'] == '0': meta.update({'duration': '60'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
try: meta.update({'genre': cleangenre.lang(meta['genre'], self.lang)})
except: pass
try: meta.update({'year': re.findall('(\d{4})', i['premiered'])[0]})
except: pass
try: meta.update({'title': i['label']})
except: pass
sysmeta = urllib.quote_plus(json.dumps(meta))
url = '%s?action=play&title=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s&tvshowtitle=%s&premiered=%s&meta=%s&t=%s' % (sysaddon, systitle, year, imdb, tvdb, season, episode, systvshowtitle, syspremiered, sysmeta, self.systime)
sysurl = urllib.quote_plus(url)
path = '%s?action=play&title=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s&tvshowtitle=%s&premiered=%s' % (sysaddon, systitle, year, imdb, tvdb, season, episode, systvshowtitle, syspremiered)
if isFolder == True:
url = '%s?action=episodes&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s&season=%s&episode=%s' % (sysaddon, systvshowtitle, year, imdb, tvdb, season, episode)
cm = []
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
if multi == True:
cm.append((tvshowBrowserMenu, 'Container.Update(%s?action=seasons&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s,return)' % (sysaddon, systvshowtitle, year, imdb, tvdb)))
try:
overlay = int(playcount.getEpisodeOverlay(indicators, imdb, tvdb, season, episode))
if overlay == 7:
cm.append((unwatchedMenu, 'RunPlugin(%s?action=episodePlaycount&imdb=%s&tvdb=%s&season=%s&episode=%s&query=6)' % (sysaddon, imdb, tvdb, season, episode)))
meta.update({'playcount': 1, 'overlay': 7})
else:
cm.append((watchedMenu, 'RunPlugin(%s?action=episodePlaycount&imdb=%s&tvdb=%s&season=%s&episode=%s&query=7)' % (sysaddon, imdb, tvdb, season, episode)))
meta.update({'playcount': 0, 'overlay': 6})
except:
pass
if traktCredentials == True:
cm.append((traktManagerMenu, 'RunPlugin(%s?action=traktManager&name=%s&tvdb=%s&content=tvshow)' % (sysaddon, systvshowtitle, tvdb)))
if isFolder == False:
cm.append((playbackMenu, 'RunPlugin(%s?action=alterSources&url=%s&meta=%s)' % (sysaddon, sysurl, sysmeta)))
if isOld == True:
cm.append((control.lang2(19033).encode('utf-8'), 'Action(Info)'))
cm.append((addToLibrary, 'RunPlugin(%s?action=tvshowToLibrary&tvshowtitle=%s&year=%s&imdb=%s&tvdb=%s)' % (sysaddon, systvshowtitle, year, imdb, tvdb)))
item = control.item(label=label)
art = {}
if 'poster' in i and not i['poster'] == '0':
art.update({'poster': i['poster'], 'tvshow.poster': i['poster'], 'season.poster': i['poster']})
else:
art.update({'poster': addonPoster})
if 'thumb' in i and not i['thumb'] == '0':
art.update({'icon': i['thumb'], 'thumb': i['thumb']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'icon': i['fanart'], 'thumb': i['fanart']})
elif 'poster' in i and not i['poster'] == '0':
art.update({'icon': i['poster'], 'thumb': i['poster']})
else:
art.update({'icon': addonFanart, 'thumb': addonFanart})
if 'banner' in i and not i['banner'] == '0':
art.update({'banner': i['banner']})
elif 'fanart' in i and not i['fanart'] == '0':
art.update({'banner': i['fanart']})
else:
art.update({'banner': addonBanner})
if settingFanart == 'true' and 'fanart' in i and not i['fanart'] == '0':
item.setProperty('Fanart_Image', i['fanart'])
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setArt(art)
item.addContextMenuItems(cm)
item.setProperty('IsPlayable', isPlayable)
item.setInfo(type='Video', infoLabels = meta)
video_streaminfo = {'codec': 'h264'}
item.addStreamInfo('video', video_streaminfo)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
except:
pass
control.content(syshandle, 'episodes')
control.directory(syshandle, cacheToDisc=True)
views.setView('episodes', {'skin.estuary': 55, 'skin.confluence': 504})
def addDirectory(self, items, queue=False):
if items == None or len(items) == 0: control.idle() ; sys.exit()
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonFanart, addonThumb, artPath = control.addonFanart(), control.addonThumb(), control.artPath()
queueMenu = control.lang(32065).encode('utf-8')
for i in items:
try:
name = i['name']
if i['image'].startswith('http'): thumb = i['image']
elif not artPath == None: thumb = os.path.join(artPath, i['image'])
else: thumb = addonThumb
url = '%s?action=%s' % (sysaddon, i['action'])
try: url += '&url=%s' % urllib.quote_plus(i['url'])
except: pass
cm = []
if queue == True:
cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
item = control.item(label=name)
item.setArt({'icon': thumb, 'thumb': thumb})
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
item.addContextMenuItems(cm)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True)
except:
pass
control.content(syshandle, 'addons')
control.directory(syshandle, cacheToDisc=True)<|fim▁end|>
| |
<|file_name|>test_iconpacker.py<|end_file_name|><|fim▁begin|>import os
from iconpacker import IconList
test_icon = "/media/hda7/Graphics/png/Classic_Truck/128.png"
icon_theme = IconList()
def initialization():
treestore = icon_theme.setup_treeview('data/legacy-icon-mapping.xml')
if treestore != None:
for i in icon_theme.icon_list:
icon_theme.set_item(i, test_icon)<|fim▁hole|> return True
return False
def test_read_theme_file(fname):
treestore = icon_theme.setup_treeview('data/legacy-icon-mapping.xml')
if treestore != None:
return icon_theme.read_theme_file(fname)
return False
"""
def path_generation():
test_path = os.tempnam()
os.mkdir(test_path)
for i in range(0,101):
os.system("mkdir %s/test_dir_%s>/dev/null" % (test_path, i))
if get_unique_path (test_path, "test_dir"):
return False
else:
os.rmdir(test_path+"/test_dir_50")
fpath, name = icon_theme.get_unique_path (test_path, "test_dir")
if name=="test_dir_50":
os.system("rm -rf %s" % test_path)
return True
else:
return False
"""
def build_icon_paths ():
icon_theme.build_paths()
return True
def test_icon_copying():
if icon_theme.copy_icon_files():
d = os.stat(test_icon)
expected_size = d[6] * len(icon_theme.icon_list) - (4096 * len(icon_theme.categories_list))
print expected_size
os.system ('du -c -b ' + icon_theme.tmp_ico_path)
return True
return False
def test_resizing():
if icon_theme.resize_icons():
return True
else:
return False
def test_make_theme():
if icon_theme.make_icon_theme('TestTheme'):
return True
else:
return False
def test_write_icon_theme():
icon_theme.write_theme_file()
def start_testing():
print "Testing treeview initialization ..."
if initialization():
print "treeview initialization [PASSED]"
else:
print "treeview initialization [FAILED]"
return False
"""
print "Testing Unique path generator ..."
if path_generation():
print "Unique Path generation [PASSED]"
else:
print "Unique Path generation [FAILED]"
return False
"""
print "Testing directory generation ..."
if build_icon_paths():
print "building icon paths [PASSED]"
else:
print "building icon paths [FAILED]"
return False
print "Testing Icon copying ..."
if test_icon_copying():
print "Icon copying [PASSED]"
else:
print "Icon copying [FAILED]"
return False
print "Testing icon resizing ..."
if test_resizing():
print "Resizing [PASSED]"
else:
print "Resizing [FAILED]"
return False
print "Testing Theme creation ..."
if test_make_theme():
print "Theme creation [PASSES]"
else:
print "Theme Creation [FAILED]"
return False
print "Testing index file creation ..."
if test_write_icon_theme():
print "Index file creation [PASSED]"
else:
print "Index file creation [FAILED]"
return False
#os.system("rm -rf %s/*" % icon_theme.build_path)
def test_writing_themes():
initialization()
icon_theme.theme_name = "TestTheme"
if icon_theme.write_icon_theme():
print "Theme Written"
else:
print "Failed"
if __name__=="__main__":
start_testing()<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.