prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>ExpenseReport.java<|end_file_name|><|fim▁begin|>package cz.muni.fi.pv243.et.model;
import org.hibernate.annotations.LazyCollection;
import org.hibernate.annotations.LazyCollectionOption;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.IndexedEmbedded;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@Entity
@Indexed
public class ExpenseReport implements Serializable {
@Id
@GeneratedValue
private Long id;
@NotNull
private String name;
private String description;
private Boolean selected;
public ExpenseReport() {
}
public ExpenseReport(String name, String description, Person submitter, Person verifier, ReportStatus status) {
this.name = name;
this.description = description;
this.submitter = submitter;
this.verifier = verifier;
this.status = status;
}
@NotNull
@ManyToOne
@IndexedEmbedded
private Person submitter;
@ManyToOne(cascade= CascadeType.MERGE)
@IndexedEmbedded
private Person verifier;
@OneToMany(mappedBy = "report", cascade = CascadeType.MERGE)
@LazyCollection(LazyCollectionOption.FALSE)
private List<Payment> payments;
@OneToMany(mappedBy = "report", cascade = CascadeType.MERGE)
@LazyCollection(LazyCollectionOption.FALSE)
private List<MoneyTransfer> moneyTransfers;
@Temporal(TemporalType.TIMESTAMP)
private Date lastSubmittedDate;
@Temporal(TemporalType.TIMESTAMP)
private Date approvedDate;
@Temporal(TemporalType.TIMESTAMP)
private Date lastChangeDate;
@Field
@Enumerated(EnumType.ORDINAL)
@IndexedEmbedded
private ReportStatus status;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Person getSubmitter() {
return submitter;
}
public void setSubmitter(Person submitter) {
this.submitter = submitter;
}
public Person getVerifier() {
return verifier;
}
public void setVerifier(Person verifier) {
this.verifier = verifier;
}
public List<Payment> getPayments() {
return payments;
}
public void setPayments(List<Payment> payments) {
this.payments = payments;
}
public List<MoneyTransfer> getMoneyTransfers() {
return moneyTransfers;
}
public void setMoneyTransfers(List<MoneyTransfer> moneyTransfers) {
this.moneyTransfers = moneyTransfers;
}
public Date getLastSubmittedDate() {
return lastSubmittedDate;
}
public void setLastSubmittedDate(Date lastSubmittedDate) {
this.lastSubmittedDate = lastSubmittedDate;
}
public Date getApprovedDate() {
return approvedDate;
}
public void setApprovedDate(Date approvedDate) {
this.approvedDate = approvedDate;
}
public ReportStatus getStatus() {
return status;
}
public void setStatus(ReportStatus status) {
this.status = status;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getLastChangeDate() {
return lastChangeDate;
}
public void setLastChangeDate(Date lastChangeDate) {
this.lastChangeDate = lastChangeDate;
}
public Boolean getSelected() {
return selected;
}
public void setSelected(Boolean selected) {
this.selected = selected;
}
@Override
public final boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ExpenseReport)) return false;
ExpenseReport report = (ExpenseReport) o;
if (getId() != null ? !getId().equals(report.getId()) : report.getId() != null) return false;
return true;
}
@Override<|fim▁hole|> @Override
public String toString() {
return "ExpenseReport{" +
"id=" + id +
", name='" + name + '\'' +
", description='" + description + '\'' +
", submitter=" + submitter +
", verifier=" + verifier +
", payments=" + payments +
", moneyTransfers=" + moneyTransfers +
", lastSubmittedDate=" + lastSubmittedDate +
", approvedDate=" + approvedDate +
", lastChangeDate=" + lastChangeDate +
", status=" + status +
", selected=" + selected +
'}';
}
}<|fim▁end|> | public int hashCode() {
return getId() != null ? getId().hashCode() : 0;
}
|
<|file_name|>windows.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15883
//! Windows file path handling
use self::PathPrefix::*;
use ascii::AsciiExt;
use char::CharExt;
use clone::Clone;
use cmp::{Ordering, Eq, Ord, PartialEq, PartialOrd};
use fmt;
use hash;
use old_io::Writer;
use iter::{AdditiveIterator, Extend};
use iter::{Iterator, IteratorExt, Map, repeat};
use mem;
use option::Option::{self, Some, None};
use result::Result::{self, Ok, Err};
use slice::{SliceExt, SliceConcatExt};
use str::{SplitTerminator, FromStr, StrExt};
use string::{String, ToString};
use vec::Vec;
use super::{contains_nul, BytesContainer, GenericPath, GenericPathUnsafe};
/// Iterator that yields successive components of a Path as &str
///
/// Each component is yielded as Option<&str> for compatibility with PosixPath, but
/// every component in WindowsPath is guaranteed to be Some.
pub type StrComponents<'a> =
Map<SplitTerminator<'a, char>, fn(&'a str) -> Option<&'a str>>;
/// Iterator that yields successive components of a Path as &[u8]
pub type Components<'a> =
Map<StrComponents<'a>, fn(Option<&str>) -> &[u8]>;
/// Represents a Windows path
// Notes for Windows path impl:
// The MAX_PATH is 260, but 253 is the practical limit due to some API bugs
// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx for good information
// about windows paths.
// That same page puts a bunch of restrictions on allowed characters in a path.
// `\foo.txt` means "relative to current drive", but will not be considered to be absolute here
// as `∃P | P.join("\foo.txt") != "\foo.txt"`.
// `C:` is interesting, that means "the current directory on drive C".
// Long absolute paths need to have \\?\ prefix (or, for UNC, \\?\UNC\). I think that can be
// ignored for now, though, and only added in a hypothetical .to_pwstr() function.
// However, if a path is parsed that has \\?\, this needs to be preserved as it disables the
// processing of "." and ".." components and / as a separator.
// Experimentally, \\?\foo is not the same thing as \foo.
// Also, \\foo is not valid either (certainly not equivalent to \foo).
// Similarly, C:\\Users is not equivalent to C:\Users, although C:\Users\\foo is equivalent
// to C:\Users\foo. In fact the command prompt treats C:\\foo\bar as UNC path. But it might be
// best to just ignore that and normalize it to C:\foo\bar.
//
// Based on all this, I think the right approach is to do the following:
// * Require valid utf-8 paths. Windows API may use WCHARs, but we don't, and utf-8 is convertible
// to UTF-16 anyway (though does Windows use UTF-16 or UCS-2? Not sure).
// * Parse the prefixes \\?\UNC\, \\?\, and \\.\ explicitly.
// * If \\?\UNC\, treat following two path components as server\share. Don't error for missing
// server\share.
// * If \\?\, parse disk from following component, if present. Don't error for missing disk.
// * If \\.\, treat rest of path as just regular components. I don't know how . and .. are handled
// here, they probably aren't, but I'm not going to worry about that.
// * Else if starts with \\, treat following two components as server\share. Don't error for missing
// server\share.
// * Otherwise, attempt to parse drive from start of path.
//
// The only error condition imposed here is valid utf-8. All other invalid paths are simply
// preserved by the data structure; let the Windows API error out on them.
#[derive(Clone)]
pub struct Path {
repr: String, // assumed to never be empty
prefix: Option<PathPrefix>,
sepidx: Option<uint> // index of the final separator in the non-prefix portion of repr
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.display(), f)
}
}
impl PartialEq for Path {
#[inline]
fn eq(&self, other: &Path) -> bool {
self.repr == other.repr
}
}
impl Eq for Path {}
impl PartialOrd for Path {
fn partial_cmp(&self, other: &Path) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Path {
fn cmp(&self, other: &Path) -> Ordering {
self.repr.cmp(&other.repr)
}
}
impl FromStr for Path {
type Err = ParsePathError;
fn from_str(s: &str) -> Result<Path, ParsePathError> {
match Path::new_opt(s) {
Some(p) => Ok(p),
None => Err(ParsePathError),
}
}
}
/// Value indicating that a path could not be parsed from a string.
#[derive(Debug, Clone, PartialEq, Copy)]
pub struct ParsePathError;
impl<S: hash::Writer + hash::Hasher> hash::Hash<S> for Path {
#[cfg(not(test))]
#[inline]
fn hash(&self, state: &mut S) {
self.repr.hash(state)
}
#[cfg(test)]
#[inline]
fn hash(&self, _: &mut S) {
// No-op because the `hash` implementation will be wrong.
}
}
impl BytesContainer for Path {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_vec()
}
#[inline]
fn container_as_str<'a>(&'a self) -> Option<&'a str> {
self.as_str()
}
#[inline]
fn is_str(_: Option<&Path>) -> bool { true }
}
impl GenericPathUnsafe for Path {
/// See `GenericPathUnsafe::from_vec_unchecked`.
///
/// # Panics
///
/// Panics if not valid UTF-8.
#[inline]
unsafe fn new_unchecked<T: BytesContainer>(path: T) -> Path {
let (prefix, path) = Path::normalize_(path.container_as_str().unwrap());
assert!(!path.is_empty());
let mut ret = Path{ repr: path, prefix: prefix, sepidx: None };
ret.update_sepidx();
ret
}
/// See `GenericPathUnsafe::set_filename_unchecked`.
///
/// # Panics
///
/// Panics if not valid UTF-8.
unsafe fn set_filename_unchecked<T: BytesContainer>(&mut self, filename: T) {
let filename = filename.container_as_str().unwrap();
match self.sepidx_or_prefix_len() {
None if ".." == self.repr => {
let mut s = String::with_capacity(3 + filename.len());
s.push_str("..");
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
None => {
self.update_normalized(filename);
}
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
let mut s = String::with_capacity(end + 1 + filename.len());
s.push_str(&self.repr[..end]);
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
let mut s = String::with_capacity(idxb + filename.len());
s.push_str(&self.repr[..idxb]);
s.push_str(filename);
self.update_normalized(&s[]);
}
Some((idxb,_,_)) => {
let mut s = String::with_capacity(idxb + 1 + filename.len());
s.push_str(&self.repr[..idxb]);
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
}
}
/// See `GenericPathUnsafe::push_unchecked`.
///
/// Concatenating two Windows Paths is rather complicated.
/// For the most part, it will behave as expected, except in the case of
/// pushing a volume-relative path, e.g. `C:foo.txt`. Because we have no
/// concept of per-volume cwds like Windows does, we can't behave exactly
/// like Windows will. Instead, if the receiver is an absolute path on
/// the same volume as the new path, it will be treated as the cwd that
/// the new path is relative to. Otherwise, the new path will be treated
/// as if it were absolute and will replace the receiver outright.
unsafe fn push_unchecked<T: BytesContainer>(&mut self, path: T) {
let path = path.container_as_str().unwrap();
fn is_vol_abs(path: &str, prefix: Option<PathPrefix>) -> bool {
// assume prefix is Some(DiskPrefix)
let rest = &path[prefix_len(prefix)..];
!rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char)
}
fn shares_volume(me: &Path, path: &str) -> bool {
// path is assumed to have a prefix of Some(DiskPrefix)
let repr = &me.repr[];
match me.prefix {
Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase()
}
Some(VerbatimDiskPrefix) => {
repr.as_bytes()[4] == path.as_bytes()[0].to_ascii_uppercase()
}
_ => false
}
}
fn is_sep_(prefix: Option<PathPrefix>, u: u8) -> bool {
if prefix_is_verbatim(prefix) { is_sep_verbatim(u as char) }
else { is_sep(u as char) }
}
fn replace_path(me: &mut Path, path: &str, prefix: Option<PathPrefix>) {
let newpath = Path::normalize__(path, prefix);
me.repr = match newpath {
Some(p) => p,
None => String::from_str(path)
};
me.prefix = prefix;
me.update_sepidx();
}
fn append_path(me: &mut Path, path: &str) {
// appends a path that has no prefix
// if me is verbatim, we need to pre-normalize the new path
let path_ = if is_verbatim(me) { Path::normalize__(path, None) }
else { None };
let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
s.push_str(&me.repr[]);
let plen = me.prefix_len();
// if me is "C:" we don't want to add a path separator
match me.prefix {
Some(DiskPrefix) if me.repr.len() == plen => (),
_ if !(me.repr.len() > plen && me.repr.as_bytes()[me.repr.len()-1] == SEP_BYTE) => {
s.push(SEP);
}
_ => ()
}
match path_ {
None => s.push_str(path),
Some(p) => s.push_str(&p[]),
};
me.update_normalized(&s[])
}
if !path.is_empty() {
let prefix = parse_prefix(path);
match prefix {
Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => {
// cwd-relative path, self is on the same volume
append_path(self, &path[prefix_len(prefix)..]);
}
Some(_) => {
// absolute path, or cwd-relative and self is not same volume
replace_path(self, path, prefix);
}
None if !path.is_empty() && is_sep_(self.prefix, path.as_bytes()[0]) => {
// volume-relative path
if self.prefix.is_some() {
// truncate self down to the prefix, then append
let n = self.prefix_len();
self.repr.truncate(n);
append_path(self, path);
} else {
// we have no prefix, so nothing to be relative to
replace_path(self, path, prefix);
}
}
None => {
// relative path
append_path(self, path);
}
}
}
}
}
impl GenericPath for Path {
#[inline]
fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
match path.container_as_str() {
None => None,
Some(ref s) => {
if contains_nul(s) {
None
} else {
Some(unsafe { GenericPathUnsafe::new_unchecked(*s) })
}
}
}
}
/// See `GenericPath::as_str` for info.
/// Always returns a `Some` value.
#[inline]
fn as_str<'a>(&'a self) -> Option<&'a str> {
Some(&self.repr[])
}
#[inline]
fn as_vec<'a>(&'a self) -> &'a [u8] {
self.repr.as_bytes()
}
#[inline]
fn into_vec(self) -> Vec<u8> {
self.repr.into_bytes()
}
#[inline]
fn dirname<'a>(&'a self) -> &'a [u8] {
self.dirname_str().unwrap().as_bytes()
}
/// See `GenericPath::dirname_str` for info.
/// Always returns a `Some` value.
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
Some(match self.sepidx_or_prefix_len() {
None if ".." == self.repr => &self.repr[],
None => ".",
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
&self.repr[]
}
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => {
&self.repr[]
}
Some((0,idxa,_)) => &self.repr[..idxa],
Some((idxb,idxa,_)) => {
match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => {
&self.repr[..idxa]
}
_ => &self.repr[..idxb]
}
}
})
}
#[inline]
fn filename<'a>(&'a self) -> Option<&'a [u8]> {
self.filename_str().map(|x| x.as_bytes())
}
/// See `GenericPath::filename_str` for info.
/// Always returns a `Some` value if `filename` returns a `Some` value.
fn filename_str<'a>(&'a self) -> Option<&'a str> {
let repr = &self.repr[];
match self.sepidx_or_prefix_len() {
None if "." == repr || ".." == repr => None,
None => Some(repr),
Some((_,idxa,end)) if &repr[idxa..end] == ".." => None,
Some((_,idxa,end)) if idxa == end => None,
Some((_,idxa,end)) => Some(&repr[idxa..end])
}
}
/// See `GenericPath::filestem_str` for info.
/// Always returns a `Some` value if `filestem` returns a `Some` value.
#[inline]
fn filestem_str<'a>(&'a self) -> Option<&'a str> {
// filestem() returns a byte vector that's guaranteed valid UTF-8
self.filestem().map(|t| unsafe { mem::transmute(t) })
}
#[inline]
fn extension_str<'a>(&'a self) -> Option<&'a str> {
// extension() returns a byte vector that's guaranteed valid UTF-8
self.extension().map(|t| unsafe { mem::transmute(t) })
}
fn dir_path(&self) -> Path {
unsafe { GenericPathUnsafe::new_unchecked(self.dirname_str().unwrap()) }
}
#[inline]
fn pop(&mut self) -> bool {
match self.sepidx_or_prefix_len() {
None if "." == self.repr => false,
None => {
self.repr = String::from_str(".");
self.sepidx = None;
true
}
Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false,
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => false,
Some((idxb,idxa,_)) => {
let trunc = match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => {
let plen = self.prefix_len();
if idxb == plen { idxa } else { idxb }
}
_ => idxb
};
self.repr.truncate(trunc);
self.update_sepidx();
true
}
}
}
fn root_path(&self) -> Option<Path> {
if self.prefix.is_some() {
Some(Path::new(match self.prefix {
Some(DiskPrefix) if self.is_absolute() => {
&self.repr[..self.prefix_len()+1]
}
Some(VerbatimDiskPrefix) => {
&self.repr[..self.prefix_len()+1]
}
_ => &self.repr[..self.prefix_len()]
}))
} else if is_vol_relative(self) {
Some(Path::new(&self.repr[..1]))
} else {
None
}
}
/// See `GenericPath::is_absolute` for info.
///
/// A Windows Path is considered absolute only if it has a non-volume prefix,
/// or if it has a volume prefix and the path starts with '\'.
/// A path of `\foo` is not considered absolute because it's actually
/// relative to the "current volume". A separate method `Path::is_vol_relative`
/// is provided to indicate this case. Similarly a path of `C:foo` is not
/// considered absolute because it's relative to the cwd on volume C:. A
/// separate method `Path::is_cwd_relative` is provided to indicate this case.
#[inline]
fn is_absolute(&self) -> bool {
match self.prefix {
Some(DiskPrefix) => {
let rest = &self.repr[self.prefix_len()..];
rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE
}
Some(_) => true,
None => false
}
}
#[inline]
fn is_relative(&self) -> bool {
self.prefix.is_none() && !is_vol_relative(self)
}
fn is_ancestor_of(&self, other: &Path) -> bool {
if !self.equiv_prefix(other) {
false
} else if self.is_absolute() != other.is_absolute() ||
is_vol_relative(self) != is_vol_relative(other) {
false
} else {
let mut ita = self.str_components().map(|x|x.unwrap());
let mut itb = other.str_components().map(|x|x.unwrap());
if "." == self.repr {
return itb.next() != Some("..");
}
loop {
match (ita.next(), itb.next()) {
(None, _) => break,
(Some(a), Some(b)) if a == b => { continue },
(Some(a), _) if a == ".." => {
// if ita contains only .. components, it's an ancestor
return ita.all(|x| x == "..");
}
_ => return false
}
}
true
}
}
fn path_relative_from(&self, base: &Path) -> Option<Path> {
fn comp_requires_verbatim(s: &str) -> bool {
s == "." || s == ".." || s.contains_char(SEP2)
}
if !self.equiv_prefix(base) {
// prefixes differ
if self.is_absolute() {
Some(self.clone())
} else if self.prefix == Some(DiskPrefix) && base.prefix == Some(DiskPrefix) {
// both drives, drive letters must differ or they'd be equiv
Some(self.clone())
} else {
None
}
} else if self.is_absolute() != base.is_absolute() {
if self.is_absolute() {
Some(self.clone())
} else {
None
}
} else if is_vol_relative(self) != is_vol_relative(base) {
if is_vol_relative(self) {
Some(self.clone())
} else {
None
}
} else {
let mut ita = self.str_components().map(|x|x.unwrap());
let mut itb = base.str_components().map(|x|x.unwrap());
let mut comps = vec![];
let a_verb = is_verbatim(self);
let b_verb = is_verbatim(base);
loop {
match (ita.next(), itb.next()) {
(None, None) => break,
(Some(a), None) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), None) => {
comps.push(a);
if !a_verb {
comps.extend(ita.by_ref());
break;
}
}
(None, _) => comps.push(".."),
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
(Some(a), Some(b)) if !b_verb && b == "." => {
if a_verb && comp_requires_verbatim(a) {
return Some(self.clone())
} else { comps.push(a) }
}
(Some(_), Some(b)) if !b_verb && b == ".." => return None,
(Some(a), Some(_)) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), Some(_)) => {
comps.push("..");
for _ in itb.by_ref() {
comps.push("..");
}
comps.push(a);
if !a_verb {
comps.extend(ita.by_ref());
break;
}
}
}
}
Some(Path::new(comps.connect("\\")))
}
}
fn ends_with_path(&self, child: &Path) -> bool {
if !child.is_relative() { return false; }
let mut selfit = self.str_components().rev();
let mut childit = child.str_components().rev();
loop {
match (selfit.next(), childit.next()) {
(Some(a), Some(b)) => if a != b { return false; },
(Some(_), None) => break,
(None, Some(_)) => return false,
(None, None) => break
}
}
true
}
}
impl Path {
/// Returns a new `Path` from a `BytesContainer`.
///
/// # Panics
///
/// Panics if the vector contains a `NUL`, or if it contains invalid UTF-8.
///
/// # Example
///
/// ```
/// println!("{}", Path::new(r"C:\some\path").display());
/// ```
#[inline]
pub fn new<T: BytesContainer>(path: T) -> Path {
GenericPath::new(path)
}
/// Returns a new `Some(Path)` from a `BytesContainer`.
///
/// Returns `None` if the vector contains a `NUL`, or if it contains invalid UTF-8.
///
/// # Example
///
/// ```
/// let path = Path::new_opt(r"C:\some\path");
///
/// match path {
/// Some(path) => println!("{}", path.display()),
/// None => println!("There was a problem with your path."),
/// }
/// ```
#[inline]
pub fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
GenericPath::new_opt(path)
}
/// Returns an iterator that yields each component of the path in turn as a Option<&str>.
/// Every component is guaranteed to be Some.
/// Does not yield the path prefix (including server/share components in UNC paths).
/// Does not distinguish between volume-relative and relative paths, e.g.
/// \a\b\c and a\b\c.
/// Does not distinguish between absolute and cwd-relative paths, e.g.
/// C:\foo and C:foo.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
let repr = &self.repr[];
let s = match self.prefix {
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
&repr[plen+1..]
} else { &repr[plen..] }
}
None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..],
None => repr
};
let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr
let ret = s.split_terminator(SEP).map(some);
ret
}
/// Returns an iterator that yields each component of the path in turn as a &[u8].
/// See str_components() for details.
pub fn components<'a>(&'a self) -> Components<'a> {
fn convert<'a>(x: Option<&'a str>) -> &'a [u8] {
#![inline]
x.unwrap().as_bytes()
}
let convert: for<'b> fn(Option<&'b str>) -> &'b [u8] = convert; // coerce to fn ptr
self.str_components().map(convert)
}
fn equiv_prefix(&self, other: &Path) -> bool {
let s_repr = &self.repr[];
let o_repr = &other.repr[];
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
s_repr.as_bytes()[0].to_ascii_lowercase() ==
o_repr.as_bytes()[4].to_ascii_lowercase()
}
(Some(VerbatimDiskPrefix), Some(DiskPrefix)) => {
other.is_absolute() &&
s_repr.as_bytes()[4].to_ascii_lowercase() ==
o_repr.as_bytes()[0].to_ascii_lowercase()
}
(Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => {
s_repr.as_bytes()[4].to_ascii_lowercase() ==
o_repr.as_bytes()[4].to_ascii_lowercase()
}
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
&s_repr[2..self.prefix_len()] == &o_repr[8..other.prefix_len()]
}
(Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => {
&s_repr[8..self.prefix_len()] == &o_repr[2..other.prefix_len()]
}
(None, None) => true,
(a, b) if a == b => {
&s_repr[..self.prefix_len()] == &o_repr[..other.prefix_len()]
}
_ => false
}
}
fn normalize_(s: &str) -> (Option<PathPrefix>, String) {
// make borrowck happy
let (prefix, val) = {
let prefix = parse_prefix(s);
let path = Path::normalize__(s, prefix);
(prefix, path)
};
(prefix, match val {
None => s.to_string(),
Some(val) => val
})
}
fn normalize__(s: &str, prefix: Option<PathPrefix>) -> Option<String> {
if prefix_is_verbatim(prefix) {
// don't do any normalization
match prefix {
Some(VerbatimUNCPrefix(x, 0)) if s.len() == 8 + x => {
// the server component has no trailing '\'
let mut s = String::from_str(s);
s.push(SEP);
Some(s)
}
_ => None
}
} else {
let (is_abs, comps) = normalize_helper(s, prefix);
let mut comps = comps;
match (comps.is_some(),prefix) {
(false, Some(DiskPrefix)) => {
if s.as_bytes()[0] >= b'a' && s.as_bytes()[0] <= b'z' {
comps = Some(vec![]);
}
}
(false, Some(VerbatimDiskPrefix)) => {
if s.as_bytes()[4] >= b'a' && s.as_bytes()[0] <= b'z' {
comps = Some(vec![]);
}
}
_ => ()
}
match comps {
None => None,
Some(comps) => {
if prefix.is_some() && comps.is_empty() {
match prefix.unwrap() {
DiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = String::from_str(&s[..len]);
unsafe {
let v = s.as_mut_vec();
v[0] = (*v)[0].to_ascii_uppercase();
}
if is_abs {
// normalize C:/ to C:\
unsafe {
s.as_mut_vec()[2] = SEP_BYTE;
}
}
Some(s)
}
VerbatimDiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = String::from_str(&s[..len]);
unsafe {
let v = s.as_mut_vec();
v[4] = (*v)[4].to_ascii_uppercase();
}
Some(s)
}
_ => {
let plen = prefix_len(prefix);
if s.len() > plen {
Some(String::from_str(&s[..plen]))
} else { None }
}
}
} else if is_abs && comps.is_empty() {
Some(repeat(SEP).take(1).collect())
} else {
let prefix_ = &s[..prefix_len(prefix)];
let n = prefix_.len() +
if is_abs { comps.len() } else { comps.len() - 1} +
comps.iter().map(|v| v.len()).sum();
let mut s = String::with_capacity(n);
match prefix {
Some(DiskPrefix) => {
s.push(prefix_.as_bytes()[0].to_ascii_uppercase() as char);
s.push(':');
}
Some(VerbatimDiskPrefix) => {
s.push_str(&prefix_[..4]);
s.push(prefix_.as_bytes()[4].to_ascii_uppercase() as char);
s.push_str(&prefix_[5..]);
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
s.push_str(&prefix_[2..a+2]);
s.push(SEP);
s.push_str(&prefix_[3+a..3+a+b]);
}
Some(_) => s.push_str(prefix_),
None => ()
}
let mut it = comps.into_iter();
if !is_abs {
match it.next() {
None => (),
Some(comp) => s.push_str(comp)
}
}
for comp in it {
s.push(SEP);
s.push_str(comp);
}
Some(s)
}
}
}
}
}
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
&self.repr[..self.repr.len()-1]
} else { &self.repr[] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
} else {
is_sep_verbatim
};
let idx = s.rfind(sep_test);
let prefixlen = self.prefix_len();
self.sepidx = idx.and_then(|x| if x < prefixlen { None } else { Some(x) });
}
fn prefix_len(&self) -> uint {
prefix_len(self.prefix)
}
// Returns a tuple (before, after, end) where before is the index of the separator
// and after is the index just after the separator.
// end is the length of the string, normally, or the index of the final character if it is
// a non-semantic trailing separator in a verbatim string.
// If the prefix is considered the separator, before and after are the same.
fn sepidx_or_prefix_len(&self) -> Option<(uint,uint,uint)> {
match self.sepidx {
None => match self.prefix_len() { 0 => None, x => Some((x,x,self.repr.len())) },
Some(x) => {
if self.has_nonsemantic_trailing_slash() {
Some((x,x+1,self.repr.len()-1))
} else { Some((x,x+1,self.repr.len())) }
}
}
}
fn has_nonsemantic_trailing_slash(&self) -> bool {
is_verbatim(self) && self.repr.len() > self.prefix_len()+1 &&
self.repr.as_bytes()[self.repr.len()-1] == SEP_BYTE
}
fn update_normalized(&mut self, s: &str) {
let (prefix, path) = Path::normalize_(s);
self.repr = path;
self.prefix = prefix;
self.update_sepidx();
}
}
/// Returns whether the path is considered "volume-relative", which means a path
/// that looks like "\foo". Paths of this form are relative to the current volume,
/// but absolute within that volume.
#[inline]
pub fn is_vol_relative(path: &Path) -> bool {
path.prefix.is_none() && is_sep_byte(&path.repr.as_bytes()[0])
}
/// Returns whether the path is considered "cwd-relative", which means a path
/// with a volume prefix that is not absolute. This look like "C:foo.txt". Paths
/// of this form are relative to the cwd on the given volume.
#[inline]
pub fn is_cwd_relative(path: &Path) -> bool {
path.prefix == Some(DiskPrefix) && !path.is_absolute()
}
/// Returns the PathPrefix for this Path
#[inline]
pub fn prefix(path: &Path) -> Option<PathPrefix> {
path.prefix
}
/// Returns whether the Path's prefix is a verbatim prefix, i.e. `\\?\`
#[inline]
pub fn is_verbatim(path: &Path) -> bool {
prefix_is_verbatim(path.prefix)
}
/// Returns the non-verbatim equivalent of the input path, if possible.
/// If the input path is a device namespace path, None is returned.
/// If the input path is not verbatim, it is returned as-is.
/// If the input path is verbatim, but the same path can be expressed as
/// non-verbatim, the non-verbatim version is returned.
/// Otherwise, None is returned.
pub fn make_non_verbatim(path: &Path) -> Option<Path> {
let repr = &path.repr[];
let new_path = match path.prefix {
Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
Some(VerbatimDiskPrefix) => {
// \\?\D:\
Path::new(&repr[4..])
}
Some(VerbatimUNCPrefix(_,_)) => {
// \\?\UNC\server\share
Path::new(format!(r"\{}", &repr[7..]))
}
};
if new_path.prefix.is_none() {
// \\?\UNC\server is a VerbatimUNCPrefix
// but \\server is nothing
return None;
}
// now ensure normalization didn't change anything
if &repr[path.prefix_len()..] == &new_path.repr[new_path.prefix_len()..] {
Some(new_path)
} else {
None
}
}
/// The standard path separator character
pub const SEP: char = '\\';
/// The standard path separator byte
pub const SEP_BYTE: u8 = SEP as u8;
/// The alternative path separator character
pub const SEP2: char = '/';
/// The alternative path separator character
pub const SEP2_BYTE: u8 = SEP2 as u8;
/// Returns whether the given char is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep(c: char) -> bool {
c == SEP || c == SEP2
}
/// Returns whether the given char is a path separator.
/// Only allows the primary separator '\'; use is_sep to allow '/'.
#[inline]
pub fn is_sep_verbatim(c: char) -> bool {
c == SEP
}
/// Returns whether the given byte is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep_byte(u: &u8) -> bool {
*u == SEP_BYTE || *u == SEP2_BYTE
}
/// Returns whether the given byte is a path separator.
/// Only allows the primary separator '\'; use is_sep_byte to allow '/'.
#[inline]
pub fn is_sep_byte_verbatim(u: &u8) -> bool {
*u == SEP_BYTE
}
/// Prefix types for Path
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum PathPrefix {
/// Prefix `\\?\`, uint is the length of the following component
VerbatimPrefix(uint),
/// Prefix `\\?\UNC\`, uints are the lengths of the UNC components
VerbatimUNCPrefix(uint, uint),
/// Prefix `\\?\C:\` (for any alphabetic character)
VerbatimDiskPrefix,
/// Prefix `\\.\`, uint is the length of the following component
DeviceNSPrefix(uint),
/// UNC prefix `\\server\share`, uints are the lengths of the server/share
UNCPrefix(uint, uint),
/// Prefix `C:` for any alphabetic character
DiskPrefix
}
fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
if path.starts_with("\\\\") {
// \\
path = &path[2..];
if path.starts_with("?\\") {
// \\?\
path = &path[2..];
if path.starts_with("UNC\\") {
// \\?\UNC\server\share
path = &path[4..];
let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) {
Some(x) => x,
None => (path.len(), 0)
};
return Some(VerbatimUNCPrefix(idx_a, idx_b));
} else {
// \\?\path
let idx = path.find('\\');
if idx == Some(2) && path.as_bytes()[1] == b':' {
let c = path.as_bytes()[0];
if c.is_ascii() && (c as char).is_alphabetic() {
// \\?\C:\ path
return Some(VerbatimDiskPrefix);
}
}
let idx = idx.unwrap_or(path.len());
return Some(VerbatimPrefix(idx));
}
} else if path.starts_with(".\\") {
// \\.\path
path = &path[2..];
let idx = path.find('\\').unwrap_or(path.len());
return Some(DeviceNSPrefix(idx));
}
match parse_two_comps(path, is_sep) {
Some((idx_a, idx_b)) if idx_a > 0 && idx_b > 0 => {
// \\server\share
return Some(UNCPrefix(idx_a, idx_b));
}
_ => ()
}
} else if path.len() > 1 && path.as_bytes()[1] == b':' {
// C:
let c = path.as_bytes()[0];
if c.is_ascii() && (c as char).is_alphabetic() {
return Some(DiskPrefix);
}
}
return None;
fn parse_two_comps(mut path: &str, f: fn(char) -> bool) -> Option<(uint, uint)> {
let idx_a = match path.find(f) {
None => return None,
Some(x) => x
};
path = &path[idx_a+1..];
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}
}
// None result means the string didn't need normalizing
fn normalize_helper<'a>(s: &'a str, prefix: Option<PathPrefix>) -> (bool, Option<Vec<&'a str>>) {
let f: fn(char) -> bool = if !prefix_is_verbatim(prefix) {
is_sep
} else {
is_sep_verbatim
};
let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
let s_ = &s[prefix_len(prefix)..];
let s_ = if is_abs { &s_[1..] } else { s_ };
if is_abs && s_.is_empty() {
return (is_abs, match prefix {
Some(DiskPrefix) | None => (if is_sep_verbatim(s.char_at(prefix_len(prefix))) { None }
else { Some(vec![]) }),
Some(_) => Some(vec![]), // need to trim the trailing separator
});
}
let mut comps: Vec<&'a str> = vec![];
let mut n_up = 0u;
let mut changed = false;
for comp in s_.split(f) {
if comp.is_empty() { changed = true }
else if comp == "." { changed = true }
else if comp == ".." {
let has_abs_prefix = match prefix {
Some(DiskPrefix) => false,
Some(_) => true,
None => false
};
if (is_abs || has_abs_prefix) && comps.is_empty() { changed = true }
else if comps.len() == n_up { comps.push(".."); n_up += 1 }
else { comps.pop().unwrap(); changed = true }
} else { comps.push(comp) }
}
if !changed && !prefix_is_verbatim(prefix) {
changed = s.find(is_sep).is_some();
}
if changed {
if comps.is_empty() && !is_abs && prefix.is_none() {
if s == "." {
return (is_abs, None);
}
comps.push(".");
}
(is_abs, Some(comps))
} else {
(is_abs, None)
}
}<|fim▁hole|>
fn prefix_is_verbatim(p: Option<PathPrefix>) -> bool {
match p {
Some(VerbatimPrefix(_)) | Some(VerbatimUNCPrefix(_,_)) | Some(VerbatimDiskPrefix) => true,
Some(DeviceNSPrefix(_)) => true, // not really sure, but I think so
_ => false
}
}
fn prefix_len(p: Option<PathPrefix>) -> uint {
match p {
None => 0,
Some(VerbatimPrefix(x)) => 4 + x,
Some(VerbatimUNCPrefix(x,y)) => 8 + x + 1 + y,
Some(VerbatimDiskPrefix) => 6,
Some(UNCPrefix(x,y)) => 2 + x + 1 + y,
Some(DeviceNSPrefix(x)) => 4 + x,
Some(DiskPrefix) => 2
}
}
#[cfg(test)]
mod tests {
use super::PathPrefix::*;
use super::parse_prefix;
use super::*;
use clone::Clone;
use iter::IteratorExt;
use option::Option::{self, Some, None};
use old_path::GenericPath;
use slice::{AsSlice, SliceExt};
use str::Str;
use string::ToString;
use vec::Vec;
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_str(), Some($exp));
}
);
(v: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_vec(), $exp);
}
)
}
#[test]
fn test_parse_prefix() {
macro_rules! t {
($path:expr, $exp:expr) => (
{
let path = $path;
let exp = $exp;
let res = parse_prefix(path);
assert_eq!(res, exp);
}
)
}
t!("\\\\SERVER\\share\\foo", Some(UNCPrefix(6,5)));
t!("\\\\", None);
t!("\\\\SERVER", None);
t!("\\\\SERVER\\", None);
t!("\\\\SERVER\\\\", None);
t!("\\\\SERVER\\\\foo", None);
t!("\\\\SERVER\\share", Some(UNCPrefix(6,5)));
t!("\\\\SERVER/share/foo", Some(UNCPrefix(6,5)));
t!("\\\\SERVER\\share/foo", Some(UNCPrefix(6,5)));
t!("//SERVER/share/foo", None);
t!("\\\\\\a\\b\\c", None);
t!("\\\\?\\a\\b\\c", Some(VerbatimPrefix(1)));
t!("\\\\?\\a/b/c", Some(VerbatimPrefix(5)));
t!("//?/a/b/c", None);
t!("\\\\.\\a\\b", Some(DeviceNSPrefix(1)));
t!("\\\\.\\a/b", Some(DeviceNSPrefix(3)));
t!("//./a/b", None);
t!("\\\\?\\UNC\\server\\share\\foo", Some(VerbatimUNCPrefix(6,5)));
t!("\\\\?\\UNC\\\\share\\foo", Some(VerbatimUNCPrefix(0,5)));
t!("\\\\?\\UNC\\", Some(VerbatimUNCPrefix(0,0)));
t!("\\\\?\\UNC\\server/share/foo", Some(VerbatimUNCPrefix(16,0)));
t!("\\\\?\\UNC\\server", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC\\server\\", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC/server/share", Some(VerbatimPrefix(16)));
t!("\\\\?\\UNC", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a\\b.txt", Some(VerbatimDiskPrefix));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\C:a.txt", Some(VerbatimPrefix(7)));
t!("\\\\?\\C:a\\b.txt", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("C:\\foo", Some(DiskPrefix));
t!("z:/foo", Some(DiskPrefix));
t!("d:", Some(DiskPrefix));
t!("ab:", None);
t!("ü:\\foo", None);
t!("3:\\foo", None);
t!(" :\\foo", None);
t!("::\\foo", None);
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\ab:\\", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("\\\\?\\C:\\a/b", Some(VerbatimDiskPrefix));
}
#[test]
fn test_paths() {
let empty: &[u8] = &[];
t!(v: Path::new(empty), b".");
t!(v: Path::new(b"\\"), b"\\");
t!(v: Path::new(b"a\\b\\c"), b"a\\b\\c");
t!(s: Path::new(""), ".");
t!(s: Path::new("\\"), "\\");
t!(s: Path::new("hi"), "hi");
t!(s: Path::new("hi\\"), "hi");
t!(s: Path::new("\\lib"), "\\lib");
t!(s: Path::new("\\lib\\"), "\\lib");
t!(s: Path::new("hi\\there"), "hi\\there");
t!(s: Path::new("hi\\there.txt"), "hi\\there.txt");
t!(s: Path::new("/"), "\\");
t!(s: Path::new("hi/"), "hi");
t!(s: Path::new("/lib"), "\\lib");
t!(s: Path::new("/lib/"), "\\lib");
t!(s: Path::new("hi/there"), "hi\\there");
t!(s: Path::new("hi\\there\\"), "hi\\there");
t!(s: Path::new("hi\\..\\there"), "there");
t!(s: Path::new("hi/../there"), "there");
t!(s: Path::new("..\\hi\\there"), "..\\hi\\there");
t!(s: Path::new("\\..\\hi\\there"), "\\hi\\there");
t!(s: Path::new("/../hi/there"), "\\hi\\there");
t!(s: Path::new("foo\\.."), ".");
t!(s: Path::new("\\foo\\.."), "\\");
t!(s: Path::new("\\foo\\..\\.."), "\\");
t!(s: Path::new("\\foo\\..\\..\\bar"), "\\bar");
t!(s: Path::new("\\.\\hi\\.\\there\\."), "\\hi\\there");
t!(s: Path::new("\\.\\hi\\.\\there\\.\\.."), "\\hi");
t!(s: Path::new("foo\\..\\.."), "..");
t!(s: Path::new("foo\\..\\..\\.."), "..\\..");
t!(s: Path::new("foo\\..\\..\\bar"), "..\\bar");
assert_eq!(Path::new(b"foo\\bar").into_vec(), b"foo\\bar");
assert_eq!(Path::new(b"\\foo\\..\\..\\bar").into_vec(), b"\\bar");
t!(s: Path::new("\\\\a"), "\\a");
t!(s: Path::new("\\\\a\\"), "\\a");
t!(s: Path::new("\\\\a\\b"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b\\"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b/"), "\\\\a\\b");
t!(s: Path::new("\\\\\\b"), "\\b");
t!(s: Path::new("\\\\a\\\\b"), "\\a\\b");
t!(s: Path::new("\\\\a\\b\\c"), "\\\\a\\b\\c");
t!(s: Path::new("\\\\server\\share/path"), "\\\\server\\share\\path");
t!(s: Path::new("\\\\server/share/path"), "\\\\server\\share\\path");
t!(s: Path::new("C:a\\b.txt"), "C:a\\b.txt");
t!(s: Path::new("C:a/b.txt"), "C:a\\b.txt");
t!(s: Path::new("z:\\a\\b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("z:/a/b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("ab:/a/b.txt"), "ab:\\a\\b.txt");
t!(s: Path::new("C:\\"), "C:\\");
t!(s: Path::new("C:"), "C:");
t!(s: Path::new("q:"), "Q:");
t!(s: Path::new("C:/"), "C:\\");
t!(s: Path::new("C:\\foo\\.."), "C:\\");
t!(s: Path::new("C:foo\\.."), "C:");
t!(s: Path::new("C:\\a\\"), "C:\\a");
t!(s: Path::new("C:\\a/"), "C:\\a");
t!(s: Path::new("C:\\a\\b\\"), "C:\\a\\b");
t!(s: Path::new("C:\\a\\b/"), "C:\\a\\b");
t!(s: Path::new("C:a\\"), "C:a");
t!(s: Path::new("C:a/"), "C:a");
t!(s: Path::new("C:a\\b\\"), "C:a\\b");
t!(s: Path::new("C:a\\b/"), "C:a\\b");
t!(s: Path::new("\\\\?\\z:\\a\\b.txt"), "\\\\?\\z:\\a\\b.txt");
t!(s: Path::new("\\\\?\\C:/a/b.txt"), "\\\\?\\C:/a/b.txt");
t!(s: Path::new("\\\\?\\C:\\a/b.txt"), "\\\\?\\C:\\a/b.txt");
t!(s: Path::new("\\\\?\\test\\a\\b.txt"), "\\\\?\\test\\a\\b.txt");
t!(s: Path::new("\\\\?\\foo\\bar\\"), "\\\\?\\foo\\bar\\");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
t!(s: Path::new("\\\\.\\"), "\\\\.\\");
t!(s: Path::new("\\\\?\\UNC\\server\\share\\foo"), "\\\\?\\UNC\\server\\share\\foo");
t!(s: Path::new("\\\\?\\UNC\\server/share"), "\\\\?\\UNC\\server/share\\");
t!(s: Path::new("\\\\?\\UNC\\server"), "\\\\?\\UNC\\server\\");
t!(s: Path::new("\\\\?\\UNC\\"), "\\\\?\\UNC\\\\");
t!(s: Path::new("\\\\?\\UNC"), "\\\\?\\UNC");
// I'm not sure whether \\.\foo/bar should normalize to \\.\foo\bar
// as information is sparse and this isn't really googleable.
// I'm going to err on the side of not normalizing it, as this skips the filesystem
t!(s: Path::new("\\\\.\\foo/bar"), "\\\\.\\foo/bar");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
}
#[test]
fn test_opt_paths() {
assert!(Path::new_opt(b"foo\\bar\0") == None);
assert!(Path::new_opt(b"foo\\bar\x80") == None);
t!(v: Path::new_opt(b"foo\\bar").unwrap(), b"foo\\bar");
assert!(Path::new_opt("foo\\bar\0") == None);
t!(s: Path::new_opt("foo\\bar").unwrap(), "foo\\bar");
}
#[test]
fn test_null_byte() {
use thread::Thread;
let result = Thread::scoped(move|| {
Path::new(b"foo/bar\0")
}).join();
assert!(result.is_err());
let result = Thread::scoped(move|| {
Path::new("test").set_filename(b"f\0o")
}).join();
assert!(result.is_err());
let result = Thread::scoped(move || {
Path::new("test").push(b"f\0o");
}).join();
assert!(result.is_err());
}
#[test]
#[should_fail]
fn test_not_utf8_panics() {
Path::new(b"hello\x80.txt");
}
#[test]
fn test_display_str() {
let path = Path::new("foo");
assert_eq!(path.display().to_string(), "foo");
let path = Path::new(b"\\");
assert_eq!(path.filename_display().to_string(), "");
let path = Path::new("foo");
let mo = path.display().as_cow();
assert_eq!(mo, "foo");
let path = Path::new(b"\\");
let mo = path.filename_display().as_cow();
assert_eq!(mo, "");
}
#[test]
fn test_display() {
macro_rules! t {
($path:expr, $exp:expr, $expf:expr) => (
{
let path = Path::new($path);
let f = format!("{}", path.display());
assert_eq!(f, $exp);
let f = format!("{}", path.filename_display());
assert_eq!(f, $expf);
}
)
}
t!("foo", "foo", "foo");
t!("foo\\bar", "foo\\bar", "bar");
t!("\\", "\\", "");
}
#[test]
fn test_components() {
macro_rules! t {
(s: $path:expr, $op:ident, $exp:expr) => (
{
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), Some($exp));
}
);
(s: $path:expr, $op:ident, $exp:expr, opt) => (
{
let path = $path;
let path = Path::new(path);
let left = path.$op();
assert_eq!(left, $exp);
}
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), $exp);
}
)
}
t!(v: b"a\\b\\c", filename, Some(b"c"));
t!(s: "a\\b\\c", filename_str, "c");
t!(s: "\\a\\b\\c", filename_str, "c");
t!(s: "a", filename_str, "a");
t!(s: "\\a", filename_str, "a");
t!(s: ".", filename_str, None, opt);
t!(s: "\\", filename_str, None, opt);
t!(s: "..", filename_str, None, opt);
t!(s: "..\\..", filename_str, None, opt);
t!(s: "c:\\foo.txt", filename_str, "foo.txt");
t!(s: "C:\\", filename_str, None, opt);
t!(s: "C:", filename_str, None, opt);
t!(s: "\\\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\server\\share", filename_str, None, opt);
t!(s: "\\\\server", filename_str, "server");
t!(s: "\\\\?\\bar\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\bar", filename_str, None, opt);
t!(s: "\\\\?\\", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\UNC\\server", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\", filename_str, None, opt);
t!(s: "\\\\?\\C:\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\C:\\", filename_str, None, opt);
t!(s: "\\\\?\\C:", filename_str, None, opt);
t!(s: "\\\\?\\foo/bar", filename_str, None, opt);
t!(s: "\\\\?\\C:/foo", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar", filename_str, "bar");
t!(s: "\\\\.\\foo", filename_str, None, opt);
t!(s: "\\\\.\\foo/bar", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar/baz", filename_str, "bar/baz");
t!(s: "\\\\.\\", filename_str, None, opt);
t!(s: "\\\\?\\a\\b\\", filename_str, "b");
t!(v: b"a\\b\\c", dirname, b"a\\b");
t!(s: "a\\b\\c", dirname_str, "a\\b");
t!(s: "\\a\\b\\c", dirname_str, "\\a\\b");
t!(s: "a", dirname_str, ".");
t!(s: "\\a", dirname_str, "\\");
t!(s: ".", dirname_str, ".");
t!(s: "\\", dirname_str, "\\");
t!(s: "..", dirname_str, "..");
t!(s: "..\\..", dirname_str, "..\\..");
t!(s: "c:\\foo.txt", dirname_str, "C:\\");
t!(s: "C:\\", dirname_str, "C:\\");
t!(s: "C:", dirname_str, "C:");
t!(s: "C:foo.txt", dirname_str, "C:");
t!(s: "\\\\server\\share\\foo.txt", dirname_str, "\\\\server\\share");
t!(s: "\\\\server\\share", dirname_str, "\\\\server\\share");
t!(s: "\\\\server", dirname_str, "\\");
t!(s: "\\\\?\\bar\\foo.txt", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\bar", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\", dirname_str, "\\\\?\\");
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", dirname_str, "\\\\?\\UNC\\server\\share");
t!(s: "\\\\?\\UNC\\server", dirname_str, "\\\\?\\UNC\\server\\");
t!(s: "\\\\?\\UNC\\", dirname_str, "\\\\?\\UNC\\\\");
t!(s: "\\\\?\\C:\\foo.txt", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:\\", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:", dirname_str, "\\\\?\\C:");
t!(s: "\\\\?\\C:/foo/bar", dirname_str, "\\\\?\\C:/foo/bar");
t!(s: "\\\\?\\foo/bar", dirname_str, "\\\\?\\foo/bar");
t!(s: "\\\\.\\foo\\bar", dirname_str, "\\\\.\\foo");
t!(s: "\\\\.\\foo", dirname_str, "\\\\.\\foo");
t!(s: "\\\\?\\a\\b\\", dirname_str, "\\\\?\\a");
t!(v: b"hi\\there.txt", filestem, Some(b"there"));
t!(s: "hi\\there.txt", filestem_str, "there");
t!(s: "hi\\there", filestem_str, "there");
t!(s: "there.txt", filestem_str, "there");
t!(s: "there", filestem_str, "there");
t!(s: ".", filestem_str, None, opt);
t!(s: "\\", filestem_str, None, opt);
t!(s: "foo\\.bar", filestem_str, ".bar");
t!(s: ".bar", filestem_str, ".bar");
t!(s: "..bar", filestem_str, ".");
t!(s: "hi\\there..txt", filestem_str, "there.");
t!(s: "..", filestem_str, None, opt);
t!(s: "..\\..", filestem_str, None, opt);
// filestem is based on filename, so we don't need the full set of prefix tests
t!(v: b"hi\\there.txt", extension, Some(b"txt"));
t!(v: b"hi\\there", extension, None);
t!(s: "hi\\there.txt", extension_str, Some("txt"), opt);
t!(s: "hi\\there", extension_str, None, opt);
t!(s: "there.txt", extension_str, Some("txt"), opt);
t!(s: "there", extension_str, None, opt);
t!(s: ".", extension_str, None, opt);
t!(s: "\\", extension_str, None, opt);
t!(s: "foo\\.bar", extension_str, None, opt);
t!(s: ".bar", extension_str, None, opt);
t!(s: "..bar", extension_str, Some("bar"), opt);
t!(s: "hi\\there..txt", extension_str, Some("txt"), opt);
t!(s: "..", extension_str, None, opt);
t!(s: "..\\..", extension_str, None, opt);
// extension is based on filename, so we don't need the full set of prefix tests
}
#[test]
fn test_push() {
macro_rules! t {
(s: $path:expr, $join:expr) => (
{
let path = $path;
let join = $join;
let mut p1 = Path::new(path);
let p2 = p1.clone();
p1.push(join);
assert_eq!(p1, p2.join(join));
}
)
}
t!(s: "a\\b\\c", "..");
t!(s: "\\a\\b\\c", "d");
t!(s: "a\\b", "c\\d");
t!(s: "a\\b", "\\c\\d");
// this is just a sanity-check test. push and join share an implementation,
// so there's no need for the full set of prefix tests
// we do want to check one odd case though to ensure the prefix is re-parsed
let mut p = Path::new("\\\\?\\C:");
assert_eq!(prefix(&p), Some(VerbatimPrefix(2)));
p.push("foo");
assert_eq!(prefix(&p), Some(VerbatimDiskPrefix));
assert_eq!(p.as_str(), Some("\\\\?\\C:\\foo"));
// and another with verbatim non-normalized paths
let mut p = Path::new("\\\\?\\C:\\a\\");
p.push("foo");
assert_eq!(p.as_str(), Some("\\\\?\\C:\\a\\foo"));
}
#[test]
fn test_push_path() {
macro_rules! t {
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
let push = Path::new($push);
p.push(&push);
assert_eq!(p.as_str(), Some($exp));
}
)
}
t!(s: "a\\b\\c", "d", "a\\b\\c\\d");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: "a\\b", ".", "a\\b");
t!(s: "a\\b", "..\\c", "a\\c");
t!(s: "a\\b", "C:a.txt", "C:a.txt");
t!(s: "a\\b", "..\\..\\..\\c", "..\\c");
t!(s: "a\\b", "C:\\a.txt", "C:\\a.txt");
t!(s: "C:\\a", "C:\\b.txt", "C:\\b.txt");
t!(s: "C:\\a\\b\\c", "C:d", "C:\\a\\b\\c\\d");
t!(s: "C:a\\b\\c", "C:d", "C:a\\b\\c\\d");
t!(s: "C:a\\b", "..\\..\\..\\c", "C:..\\c");
t!(s: "C:\\a\\b", "..\\..\\..\\c", "C:\\c");
t!(s: "C:", r"a\b\c", r"C:a\b\c");
t!(s: "C:", r"..\a", r"C:..\a");
t!(s: "\\\\server\\share\\foo", "bar", "\\\\server\\share\\foo\\bar");
t!(s: "\\\\server\\share\\foo", "..\\..\\bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share\\foo", "C:baz", "C:baz");
t!(s: "\\\\?\\C:\\a\\b", "C:c\\d", "\\\\?\\C:\\a\\b\\c\\d");
t!(s: "\\\\?\\C:a\\b", "C:c\\d", "C:c\\d");
t!(s: "\\\\?\\C:\\a\\b", "C:\\c\\d", "C:\\c\\d");
t!(s: "\\\\?\\foo\\bar", "baz", "\\\\?\\foo\\bar\\baz");
t!(s: "\\\\?\\C:\\a\\b", "..\\..\\..\\c", "\\\\?\\C:\\a\\b\\..\\..\\..\\c");
t!(s: "\\\\?\\foo\\bar", "..\\..\\c", "\\\\?\\foo\\bar\\..\\..\\c");
t!(s: "\\\\?\\", "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\UNC\\server\\share\\foo", "bar", "\\\\?\\UNC\\server\\share\\foo\\bar");
t!(s: "\\\\?\\UNC\\server\\share", "C:\\a", "C:\\a");
t!(s: "\\\\?\\UNC\\server\\share", "C:a", "C:a");
t!(s: "\\\\?\\UNC\\server", "foo", "\\\\?\\UNC\\server\\\\foo");
t!(s: "C:\\a", "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share");
t!(s: "\\\\.\\foo\\bar", "baz", "\\\\.\\foo\\bar\\baz");
t!(s: "\\\\.\\foo\\bar", "C:a", "C:a");
// again, not sure about the following, but I'm assuming \\.\ should be verbatim
t!(s: "\\\\.\\foo", "..\\bar", "\\\\.\\foo\\..\\bar");
t!(s: "\\\\?\\C:", "foo", "\\\\?\\C:\\foo"); // this is a weird one
}
#[test]
fn test_push_many() {
macro_rules! t {
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert_eq!(p.as_str(), Some($exp));
}
);
(v: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert_eq!(p.as_vec(), $exp);
}
)
}
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["d", "\\e"], "\\e");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", ["d".to_string(), "e".to_string()], "a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"e"], b"a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"\\e", b"f"], b"\\e\\f");
t!(v: b"a\\b\\c", [b"d".to_vec(), b"e".to_vec()],
b"a\\b\\c\\d\\e");
}
#[test]
fn test_pop() {
macro_rules! t {
(s: $path:expr, $left:expr, $right:expr) => (
{
let pstr = $path;
let mut p = Path::new(pstr);
let result = p.pop();
let left = $left;
assert_eq!(p.as_str(), Some(left));
assert_eq!(result, $right);
}
);
(b: $path:expr, $left:expr, $right:expr) => (
{
let mut p = Path::new($path);
let result = p.pop();
assert_eq!(p.as_vec(), $left);
assert_eq!(result, $right);
}
)
}
t!(s: "a\\b\\c", "a\\b", true);
t!(s: "a", ".", true);
t!(s: ".", ".", false);
t!(s: "\\a", "\\", true);
t!(s: "\\", "\\", false);
t!(b: b"a\\b\\c", b"a\\b", true);
t!(b: b"a", b".", true);
t!(b: b".", b".", false);
t!(b: b"\\a", b"\\", true);
t!(b: b"\\", b"\\", false);
t!(s: "C:\\a\\b", "C:\\a", true);
t!(s: "C:\\a", "C:\\", true);
t!(s: "C:\\", "C:\\", false);
t!(s: "C:a\\b", "C:a", true);
t!(s: "C:a", "C:", true);
t!(s: "C:", "C:", false);
t!(s: "\\\\server\\share\\a\\b", "\\\\server\\share\\a", true);
t!(s: "\\\\server\\share\\a", "\\\\server\\share", true);
t!(s: "\\\\server\\share", "\\\\server\\share", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a", true);
t!(s: "\\\\?\\a", "\\\\?\\a", false);
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\", true);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\", false);
t!(s: "\\\\?\\UNC\\server\\share\\a\\b", "\\\\?\\UNC\\server\\share\\a", true);
t!(s: "\\\\?\\UNC\\server\\share\\a", "\\\\?\\UNC\\server\\share", true);
t!(s: "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share", false);
t!(s: "\\\\.\\a\\b\\c", "\\\\.\\a\\b", true);
t!(s: "\\\\.\\a\\b", "\\\\.\\a", true);
t!(s: "\\\\.\\a", "\\\\.\\a", false);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a", true);
}
#[test]
fn test_root_path() {
assert_eq!(Path::new("a\\b\\c").root_path(), None);
assert_eq!(Path::new("\\a\\b\\c").root_path(), Some(Path::new("\\")));
assert_eq!(Path::new("C:a").root_path(), Some(Path::new("C:")));
assert_eq!(Path::new("C:\\a").root_path(), Some(Path::new("C:\\")));
assert_eq!(Path::new("\\\\a\\b\\c").root_path(), Some(Path::new("\\\\a\\b")));
assert_eq!(Path::new("\\\\?\\a\\b").root_path(), Some(Path::new("\\\\?\\a")));
assert_eq!(Path::new("\\\\?\\C:\\a").root_path(), Some(Path::new("\\\\?\\C:\\")));
assert_eq!(Path::new("\\\\?\\UNC\\a\\b\\c").root_path(),
Some(Path::new("\\\\?\\UNC\\a\\b")));
assert_eq!(Path::new("\\\\.\\a\\b").root_path(), Some(Path::new("\\\\.\\a")));
}
#[test]
fn test_join() {
t!(s: Path::new("a\\b\\c").join(".."), "a\\b");
t!(s: Path::new("\\a\\b\\c").join("d"), "\\a\\b\\c\\d");
t!(s: Path::new("a\\b").join("c\\d"), "a\\b\\c\\d");
t!(s: Path::new("a\\b").join("\\c\\d"), "\\c\\d");
t!(s: Path::new(".").join("a\\b"), "a\\b");
t!(s: Path::new("\\").join("a\\b"), "\\a\\b");
t!(v: Path::new(b"a\\b\\c").join(b".."), b"a\\b");
t!(v: Path::new(b"\\a\\b\\c").join(b"d"), b"\\a\\b\\c\\d");
// full join testing is covered under test_push_path, so no need for
// the full set of prefix tests
}
#[test]
fn test_join_path() {
macro_rules! t {
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let join = Path::new($join);
let res = path.join(&join);
assert_eq!(res.as_str(), Some($exp));
}
)
}
t!(s: "a\\b\\c", "..", "a\\b");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: ".", "a\\b", "a\\b");
t!(s: "\\", "a\\b", "\\a\\b");
// join is implemented using push, so there's no need for
// the full set of prefix tests
}
#[test]
fn test_join_many() {
macro_rules! t {
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert_eq!(res.as_str(), Some($exp));
}
);
(v: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert_eq!(res.as_vec(), $exp);
}
)
}
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", ["d".to_string(), "e".to_string()], "a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"e"], b"a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d".to_vec(), b"e".to_vec()],
b"a\\b\\c\\d\\e");
}
#[test]
fn test_with_helpers() {
macro_rules! t {
(s: $path:expr, $op:ident, $arg:expr, $res:expr) => (
{
let pstr = $path;
let path = Path::new(pstr);
let arg = $arg;
let res = path.$op(arg);
let exp = Path::new($res);
assert_eq!(res, exp);
}
)
}
t!(s: "a\\b\\c", with_filename, "d", "a\\b\\d");
t!(s: ".", with_filename, "foo", "foo");
t!(s: "\\a\\b\\c", with_filename, "d", "\\a\\b\\d");
t!(s: "\\", with_filename, "foo", "\\foo");
t!(s: "\\a", with_filename, "foo", "\\foo");
t!(s: "foo", with_filename, "bar", "bar");
t!(s: "\\", with_filename, "foo\\", "\\foo");
t!(s: "\\a", with_filename, "foo\\", "\\foo");
t!(s: "a\\b\\c", with_filename, "", "a\\b");
t!(s: "a\\b\\c", with_filename, ".", "a\\b");
t!(s: "a\\b\\c", with_filename, "..", "a");
t!(s: "\\a", with_filename, "", "\\");
t!(s: "foo", with_filename, "", ".");
t!(s: "a\\b\\c", with_filename, "d\\e", "a\\b\\d\\e");
t!(s: "a\\b\\c", with_filename, "\\d", "a\\b\\d");
t!(s: "..", with_filename, "foo", "..\\foo");
t!(s: "..\\..", with_filename, "foo", "..\\..\\foo");
t!(s: "..", with_filename, "", "..");
t!(s: "..\\..", with_filename, "", "..\\..");
t!(s: "C:\\foo\\bar", with_filename, "baz", "C:\\foo\\baz");
t!(s: "C:\\foo", with_filename, "bar", "C:\\bar");
t!(s: "C:\\", with_filename, "foo", "C:\\foo");
t!(s: "C:foo\\bar", with_filename, "baz", "C:foo\\baz");
t!(s: "C:foo", with_filename, "bar", "C:bar");
t!(s: "C:", with_filename, "foo", "C:foo");
t!(s: "C:\\foo", with_filename, "", "C:\\");
t!(s: "C:foo", with_filename, "", "C:");
t!(s: "C:\\foo\\bar", with_filename, "..", "C:\\");
t!(s: "C:\\foo", with_filename, "..", "C:\\");
t!(s: "C:\\", with_filename, "..", "C:\\");
t!(s: "C:foo\\bar", with_filename, "..", "C:");
t!(s: "C:foo", with_filename, "..", "C:..");
t!(s: "C:", with_filename, "..", "C:..");
t!(s: "\\\\server\\share\\foo", with_filename, "bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share", with_filename, "foo", "\\\\server\\share\\foo");
t!(s: "\\\\server\\share\\foo", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share\\foo", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\?\\C:\\foo\\bar", with_filename, "baz", "\\\\?\\C:\\foo\\baz");
t!(s: "\\\\?\\C:\\foo", with_filename, "bar", "\\\\?\\C:\\bar");
t!(s: "\\\\?\\C:\\", with_filename, "foo", "\\\\?\\C:\\foo");
t!(s: "\\\\?\\C:\\foo", with_filename, "..", "\\\\?\\C:\\..");
t!(s: "\\\\?\\foo\\bar", with_filename, "baz", "\\\\?\\foo\\baz");
t!(s: "\\\\?\\foo", with_filename, "bar", "\\\\?\\foo\\bar");
t!(s: "\\\\?\\", with_filename, "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\foo\\bar", with_filename, "..", "\\\\?\\foo\\..");
t!(s: "\\\\.\\foo\\bar", with_filename, "baz", "\\\\.\\foo\\baz");
t!(s: "\\\\.\\foo", with_filename, "bar", "\\\\.\\foo\\bar");
t!(s: "\\\\.\\foo\\bar", with_filename, "..", "\\\\.\\foo\\..");
t!(s: "hi\\there.txt", with_extension, "exe", "hi\\there.exe");
t!(s: "hi\\there.txt", with_extension, "", "hi\\there");
t!(s: "hi\\there.txt", with_extension, ".", "hi\\there..");
t!(s: "hi\\there.txt", with_extension, "..", "hi\\there...");
t!(s: "hi\\there", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\there", with_extension, ".", "hi\\there..");
t!(s: "hi\\there", with_extension, "..", "hi\\there...");
t!(s: "hi\\there.", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\.foo", with_extension, "txt", "hi\\.foo.txt");
t!(s: "hi\\there.txt", with_extension, ".foo", "hi\\there..foo");
t!(s: "\\", with_extension, "txt", "\\");
t!(s: "\\", with_extension, ".", "\\");
t!(s: "\\", with_extension, "..", "\\");
t!(s: ".", with_extension, "txt", ".");
// extension setter calls filename setter internally, no need for extended tests
}
#[test]
fn test_setters() {
macro_rules! t {
(s: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
);
(v: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
)
}
t!(v: b"a\\b\\c", set_filename, with_filename, b"d");
t!(v: b"\\", set_filename, with_filename, b"foo");
t!(s: "a\\b\\c", set_filename, with_filename, "d");
t!(s: "\\", set_filename, with_filename, "foo");
t!(s: ".", set_filename, with_filename, "foo");
t!(s: "a\\b", set_filename, with_filename, "");
t!(s: "a", set_filename, with_filename, "");
t!(v: b"hi\\there.txt", set_extension, with_extension, b"exe");
t!(s: "hi\\there.txt", set_extension, with_extension, "exe");
t!(s: "hi\\there.", set_extension, with_extension, "txt");
t!(s: "hi\\there", set_extension, with_extension, "txt");
t!(s: "hi\\there.txt", set_extension, with_extension, "");
t!(s: "hi\\there", set_extension, with_extension, "");
t!(s: ".", set_extension, with_extension, "txt");
// with_ helpers use the setter internally, so the tests for the with_ helpers
// will suffice. No need for the full set of prefix tests.
}
#[test]
fn test_getters() {
macro_rules! t {
(s: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
assert_eq!(path.filename_str(), $filename);
assert_eq!(path.dirname_str(), $dirname);
assert_eq!(path.filestem_str(), $filestem);
assert_eq!(path.extension_str(), $ext);
}
);
(v: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
assert_eq!(path.filename(), $filename);
assert_eq!(path.dirname(), $dirname);
assert_eq!(path.filestem(), $filestem);
assert_eq!(path.extension(), $ext);
}
)
}
t!(v: Path::new(b"a\\b\\c"), Some(b"c"), b"a\\b", Some(b"c"), None);
t!(s: Path::new("a\\b\\c"), Some("c"), Some("a\\b"), Some("c"), None);
t!(s: Path::new("."), None, Some("."), None, None);
t!(s: Path::new("\\"), None, Some("\\"), None, None);
t!(s: Path::new(".."), None, Some(".."), None, None);
t!(s: Path::new("..\\.."), None, Some("..\\.."), None, None);
t!(s: Path::new("hi\\there.txt"), Some("there.txt"), Some("hi"),
Some("there"), Some("txt"));
t!(s: Path::new("hi\\there"), Some("there"), Some("hi"), Some("there"), None);
t!(s: Path::new("hi\\there."), Some("there."), Some("hi"),
Some("there"), Some(""));
t!(s: Path::new("hi\\.there"), Some(".there"), Some("hi"), Some(".there"), None);
t!(s: Path::new("hi\\..there"), Some("..there"), Some("hi"),
Some("."), Some("there"));
// these are already tested in test_components, so no need for extended tests
}
#[test]
fn test_dir_path() {
t!(s: Path::new("hi\\there").dir_path(), "hi");
t!(s: Path::new("hi").dir_path(), ".");
t!(s: Path::new("\\hi").dir_path(), "\\");
t!(s: Path::new("\\").dir_path(), "\\");
t!(s: Path::new("..").dir_path(), "..");
t!(s: Path::new("..\\..").dir_path(), "..\\..");
// dir_path is just dirname interpreted as a path.
// No need for extended tests
}
#[test]
fn test_is_absolute() {
macro_rules! t {
($path:expr, $abs:expr, $vol:expr, $cwd:expr, $rel:expr) => (
{
let path = Path::new($path);
let (abs, vol, cwd, rel) = ($abs, $vol, $cwd, $rel);
assert_eq!(path.is_absolute(), abs);
assert_eq!(is_vol_relative(&path), vol);
assert_eq!(is_cwd_relative(&path), cwd);
assert_eq!(path.is_relative(), rel);
}
)
}
t!("a\\b\\c", false, false, false, true);
t!("\\a\\b\\c", false, true, false, false);
t!("a", false, false, false, true);
t!("\\a", false, true, false, false);
t!(".", false, false, false, true);
t!("\\", false, true, false, false);
t!("..", false, false, false, true);
t!("..\\..", false, false, false, true);
t!("C:a\\b.txt", false, false, true, false);
t!("C:\\a\\b.txt", true, false, false, false);
t!("\\\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\?\\a\\b\\c.txt", true, false, false, false);
t!("\\\\?\\C:\\a\\b.txt", true, false, false, false);
t!("\\\\?\\C:a\\b.txt", true, false, false, false); // NB: not equivalent to C:a\b.txt
t!("\\\\?\\UNC\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\.\\a\\b", true, false, false, false);
}
#[test]
fn test_is_ancestor_of() {
macro_rules! t {
(s: $path:expr, $dest:expr, $exp:expr) => (
{
let path = Path::new($path);
let dest = Path::new($dest);
let exp = $exp;
let res = path.is_ancestor_of(&dest);
assert_eq!(res, exp);
}
)
}
t!(s: "a\\b\\c", "a\\b\\c\\d", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "\\a\\b\\c", "\\a\\b\\c", true);
t!(s: "\\a\\b", "\\a\\b\\c", true);
t!(s: "\\a\\b\\c\\d", "\\a\\b\\c", false);
t!(s: "\\a\\b", "a\\b\\c", false);
t!(s: "a\\b", "\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\d", false);
t!(s: "..\\a\\b\\c", "a\\b\\c", false);
t!(s: "a\\b\\c", "..\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\cd", false);
t!(s: "a\\b\\cd", "a\\b\\c", false);
t!(s: "..\\a\\b", "..\\a\\b\\c", true);
t!(s: ".", "a\\b", true);
t!(s: ".", ".", true);
t!(s: "\\", "\\", true);
t!(s: "\\", "\\a\\b", true);
t!(s: "..", "a\\b", true);
t!(s: "..\\..", "a\\b", true);
t!(s: "foo\\bar", "foobar", false);
t!(s: "foobar", "foo\\bar", false);
t!(s: "foo", "C:foo", false);
t!(s: "C:foo", "foo", false);
t!(s: "C:foo", "C:foo\\bar", true);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "C:\\foo\\bar", true);
t!(s: "C:", "C:", true);
t!(s: "C:", "C:\\", false);
t!(s: "C:\\", "C:", false);
t!(s: "C:\\", "C:\\", true);
t!(s: "C:\\foo\\bar", "C:\\foo", false);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "\\foo", false);
t!(s: "\\foo", "C:\\foo", false);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share\\foo\\bar", true);
t!(s: "\\\\server\\share", "\\\\server\\share\\foo", true);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share", false);
t!(s: "C:\\foo", "\\\\server\\share\\foo", false);
t!(s: "\\\\server\\share\\foo", "C:\\foo", false);
t!(s: "\\\\?\\foo\\bar", "\\\\?\\foo\\bar\\baz", true);
t!(s: "\\\\?\\foo\\bar\\baz", "\\\\?\\foo\\bar", false);
t!(s: "\\\\?\\foo\\bar", "\\foo\\bar\\baz", false);
t!(s: "\\foo\\bar", "\\\\?\\foo\\bar\\baz", false);
t!(s: "\\\\?\\C:\\foo\\bar", "\\\\?\\C:\\foo\\bar\\baz", true);
t!(s: "\\\\?\\C:\\foo\\bar\\baz", "\\\\?\\C:\\foo\\bar", false);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\foo", true);
t!(s: "\\\\?\\C:", "\\\\?\\C:\\", false); // this is a weird one
t!(s: "\\\\?\\C:\\", "\\\\?\\C:", false);
t!(s: "\\\\?\\C:\\a", "\\\\?\\c:\\a\\b", true);
t!(s: "\\\\?\\c:\\a", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a\\b", false);
t!(s: "\\\\?\\foo", "\\\\?\\foobar", false);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\", true);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b\\", false);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c\\d", true);
t!(s: "\\\\?\\UNC\\a\\b\\c\\d", "\\\\?\\UNC\\a\\b\\c", false);
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\.\\foo\\bar", "\\\\.\\foo\\bar\\baz", true);
t!(s: "\\\\.\\foo\\bar\\baz", "\\\\.\\foo\\bar", false);
t!(s: "\\\\.\\foo", "\\\\.\\foo\\bar", true);
t!(s: "\\\\.\\foo", "\\\\.\\foobar", false);
t!(s: "\\a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "\\a\\b", false);
t!(s: "\\a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "\\a\\b", false);
t!(s: "Z:\\a\\b", "\\\\?\\z:\\a\\b", true);
t!(s: "C:\\a\\b", "\\\\?\\D:\\a\\b", false);
t!(s: "a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a\\b", "C:\\a\\b", true);
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "C:a\\b", "\\\\?\\C:a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", false);
t!(s: "\\\\?\\C:a\\b", "C:a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b\\", true);
t!(s: "\\\\?\\C:\\a\\b\\", "C:\\a\\b", true);
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b\\c", true);
}
#[test]
fn test_ends_with_path() {
macro_rules! t {
(s: $path:expr, $child:expr, $exp:expr) => (
{
let path = Path::new($path);
let child = Path::new($child);
assert_eq!(path.ends_with_path(&child), $exp);
}
);
}
t!(s: "a\\b\\c", "c", true);
t!(s: "a\\b\\c", "d", false);
t!(s: "foo\\bar\\quux", "bar", false);
t!(s: "foo\\bar\\quux", "barquux", false);
t!(s: "a\\b\\c", "b\\c", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "\\a\\b\\c", "a\\b\\c", true);
t!(s: "\\a\\b\\c", "\\a\\b\\c", false); // child must be relative
t!(s: "\\a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "a\\b\\c", "", false);
t!(s: "", "", true);
t!(s: "\\a\\b\\c", "d\\e\\f", false);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "a\\b\\c", "b", false);
t!(s: "C:\\a\\b", "b", true);
t!(s: "C:\\a\\b", "C:b", false);
t!(s: "C:\\a\\b", "C:a\\b", false);
}
#[test]
fn test_path_relative_from() {
macro_rules! t {
(s: $path:expr, $other:expr, $exp:expr) => (
{
assert_eq!(Path::new($path).path_relative_from(&Path::new($other))
.as_ref().and_then(|x| x.as_str()), $exp);
}
)
}
t!(s: "a\\b\\c", "a\\b", Some("c"));
t!(s: "a\\b\\c", "a\\b\\d", Some("..\\c"));
t!(s: "a\\b\\c", "a\\b\\c\\d", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "a\\b\\c", "a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "a\\b\\c", "a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "a\\b\\c", "d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "a\\b\\c", "\\a\\b\\c", None);
t!(s: "\\a\\b\\c", "a\\b\\c", Some("\\a\\b\\c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d", Some(".."));
t!(s: "\\a\\b\\c", "\\a\\b", Some("c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "\\a\\b\\c", "\\a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "\\a\\b\\c", "\\d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "hi\\there.txt", "hi\\there", Some("..\\there.txt"));
t!(s: ".", "a", Some(".."));
t!(s: ".", "a\\b", Some("..\\.."));
t!(s: ".", ".", Some("."));
t!(s: "a", ".", Some("a"));
t!(s: "a\\b", ".", Some("a\\b"));
t!(s: "..", ".", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "\\a\\b\\c", "\\a\\b\\c", Some("."));
t!(s: "\\", "\\", Some("."));
t!(s: "\\", ".", Some("\\"));
t!(s: "..\\..\\a", "b", Some("..\\..\\..\\a"));
t!(s: "a", "..\\..\\b", None);
t!(s: "..\\..\\a", "..\\..\\b", Some("..\\a"));
t!(s: "..\\..\\a", "..\\..\\a\\b", Some(".."));
t!(s: "..\\..\\a\\b", "..\\..\\a", Some("b"));
t!(s: "C:a\\b\\c", "C:a\\b", Some("c"));
t!(s: "C:a\\b", "C:a\\b\\c", Some(".."));
t!(s: "C:" ,"C:a\\b", Some("..\\.."));
t!(s: "C:a\\b", "C:c\\d", Some("..\\..\\a\\b"));
t!(s: "C:a\\b", "D:c\\d", Some("C:a\\b"));
t!(s: "C:a\\b", "C:..\\c", None);
t!(s: "C:..\\a", "C:b\\c", Some("..\\..\\..\\a"));
t!(s: "C:\\a\\b\\c", "C:\\a\\b", Some("c"));
t!(s: "C:\\a\\b", "C:\\a\\b\\c", Some(".."));
t!(s: "C:\\", "C:\\a\\b", Some("..\\.."));
t!(s: "C:\\a\\b", "C:\\c\\d", Some("..\\..\\a\\b"));
t!(s: "C:\\a\\b", "C:a\\b", Some("C:\\a\\b"));
t!(s: "C:a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:a\\b", None);
t!(s: "a\\b", "C:\\a\\b", None);
t!(s: "a\\b", "C:a\\b", None);
t!(s: "\\\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\a\\b\\c\\e", "\\\\a\\b\\c\\d", Some("..\\e"));
t!(s: "\\\\a\\c\\d", "\\\\a\\b\\d", Some("\\\\a\\c\\d"));
t!(s: "\\\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\b\\c\\d"));
t!(s: "\\\\a\\b\\c", "\\d\\e", Some("\\\\a\\b\\c"));
t!(s: "\\d\\e", "\\\\a\\b\\c", None);
t!(s: "d\\e", "\\\\a\\b\\c", None);
t!(s: "C:\\a\\b\\c", "\\\\a\\b\\c", Some("C:\\a\\b\\c"));
t!(s: "C:\\c", "\\\\a\\b\\c", Some("C:\\c"));
t!(s: "\\\\?\\a\\b", "\\a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", Some("c"));
t!(s: "\\\\?\\a\\b", "\\\\?\\c\\d", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a", "\\\\?\\b", Some("\\\\?\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\b", Some("..\\a"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\c:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "C:\\a\\b", Some(".."));
t!(s: "C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\c:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", Some("\\\\?\\C:\\a\\b"));
t!(s: "\\\\?\\C:\\a\\.\\b", "C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", None);
t!(s: "\\\\?\\C:\\a\\.\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "\\\\?\\C:\\a\\b\\", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\.\\b", "\\\\?\\C:\\.", Some("b"));
t!(s: "C:\\b", "\\\\?\\C:\\.", Some("..\\b"));
t!(s: "\\\\?\\a\\.\\b\\c", "\\\\?\\a\\.\\b", Some("c"));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\.\\d", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\a\\..\\b", "\\\\?\\a\\..", Some("b"));
t!(s: "\\\\?\\a\\b\\..", "\\\\?\\a\\b", Some("\\\\?\\a\\b\\.."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\..\\b", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\C:\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\a\\b\\c"));
}
#[test]
fn test_str_components() {
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.str_components().map(|x|x.unwrap())
.collect::<Vec<&str>>();
let exp: &[&str] = &$exp;
assert_eq!(comps, exp);
let comps = path.str_components().rev().map(|x|x.unwrap())
.collect::<Vec<&str>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<&str>>();
assert_eq!(comps, exp);
}
);
}
t!(s: b"a\\b\\c", ["a", "b", "c"]);
t!(s: "a\\b\\c", ["a", "b", "c"]);
t!(s: "a\\b\\d", ["a", "b", "d"]);
t!(s: "a\\b\\cd", ["a", "b", "cd"]);
t!(s: "\\a\\b\\c", ["a", "b", "c"]);
t!(s: "a", ["a"]);
t!(s: "\\a", ["a"]);
t!(s: "\\", []);
t!(s: ".", ["."]);
t!(s: "..", [".."]);
t!(s: "..\\..", ["..", ".."]);
t!(s: "..\\..\\foo", ["..", "..", "foo"]);
t!(s: "C:foo\\bar", ["foo", "bar"]);
t!(s: "C:foo", ["foo"]);
t!(s: "C:", []);
t!(s: "C:\\foo\\bar", ["foo", "bar"]);
t!(s: "C:\\foo", ["foo"]);
t!(s: "C:\\", []);
t!(s: "\\\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\server\\share\\foo", ["foo"]);
t!(s: "\\\\server\\share", []);
t!(s: "\\\\?\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\?\\foo\\bar", ["bar"]);
t!(s: "\\\\?\\foo", []);
t!(s: "\\\\?\\", []);
t!(s: "\\\\?\\a\\b", ["b"]);
t!(s: "\\\\?\\a\\b\\", ["b"]);
t!(s: "\\\\?\\foo\\bar\\\\baz", ["bar", "", "baz"]);
t!(s: "\\\\?\\C:\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\C:\\foo", ["foo"]);
t!(s: "\\\\?\\C:\\", []);
t!(s: "\\\\?\\C:\\foo\\", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share", []);
t!(s: "\\\\.\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\.\\foo\\bar", ["bar"]);
t!(s: "\\\\.\\foo", []);
}
#[test]
fn test_components_iter() {
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.components().collect::<Vec<&[u8]>>();
let exp: &[&[u8]] = &$exp;
assert_eq!(comps, exp);
let comps = path.components().rev().collect::<Vec<&[u8]>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<&[u8]>>();
assert_eq!(comps, exp);
}
)
}
t!(s: "a\\b\\c", [b"a", b"b", b"c"]);
t!(s: ".", [b"."]);
// since this is really a wrapper around str_components, those tests suffice
}
#[test]
fn test_make_non_verbatim() {
macro_rules! t {
($path:expr, $exp:expr) => (
{
let path = Path::new($path);
let exp: Option<&str> = $exp;
let exp = exp.map(|s| Path::new(s));
assert_eq!(make_non_verbatim(&path), exp);
}
)
}
t!(r"\a\b\c", Some(r"\a\b\c"));
t!(r"a\b\c", Some(r"a\b\c"));
t!(r"C:\a\b\c", Some(r"C:\a\b\c"));
t!(r"C:a\b\c", Some(r"C:a\b\c"));
t!(r"\\server\share\foo", Some(r"\\server\share\foo"));
t!(r"\\.\foo", None);
t!(r"\\?\foo", None);
t!(r"\\?\C:", None);
t!(r"\\?\C:foo", None);
t!(r"\\?\C:\", Some(r"C:\"));
t!(r"\\?\C:\foo", Some(r"C:\foo"));
t!(r"\\?\C:\foo\bar\baz", Some(r"C:\foo\bar\baz"));
t!(r"\\?\C:\foo\.\bar\baz", None);
t!(r"\\?\C:\foo\bar\..\baz", None);
t!(r"\\?\C:\foo\bar\..", None);
t!(r"\\?\UNC\server\share\foo", Some(r"\\server\share\foo"));
t!(r"\\?\UNC\server\share", Some(r"\\server\share"));
t!(r"\\?\UNC\server", None);
t!(r"\\?\UNC\server\", None);
}
}<|fim▁end|> | |
<|file_name|>[userId].tsx<|end_file_name|><|fim▁begin|>import { Fab, makeStyles, Toolbar } from '@material-ui/core';
import Container from '@material-ui/core/Container';
import Grid from '@material-ui/core/Grid';
import Paper from '@material-ui/core/Paper';
import Typography from '@material-ui/core/Typography';
import AddIcon from '@material-ui/icons/Add';
import { GetServerSideProps, NextPage } from 'next';
import React, { useEffect, useState } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { AddUserRoleModal } from '../../../../components/roles/add-user-role-modal';
import { UserRoleList } from '../../../../components/roles/user-role-list';
import { useAuthenticatedUser } from '../../../../src/use-authenticated-user.hook';
import { loadActivities } from '../../../../store/activities/activities.actions';
import { listVisible } from '../../../../store/common/list-state.models';
import { loadOrganization, loadOrganizationUsers } from '../../../../store/organizations/organizations.actions';
import { loadRoles } from '../../../../store/roles/roles.actions';
import { RootState } from '../../../../store/store';
import { loadUserRoles, UserRole } from '../../../../store/users/users.actions';
interface OrganizationUserProps {
organizationId: string;
userId: string;
}
const useStyles = makeStyles((theme) => ({
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
},<|fim▁hole|> organizationUsersList: {
minHeight: theme.spacing(30),
maxHeight: theme.spacing(60),
overflow: 'auto',
},
organizationUsersListTitle: {
flex: '1 1 100%',
},
organizationUsersListToolbar: {
paddingLeft: theme.spacing(2),
paddingRight: theme.spacing(2),
},
}));
const OrganizationUserPage: NextPage<OrganizationUserProps> = ({ userId, organizationId }) => {
const classes = useStyles();
const dispatch = useDispatch();
const { user } = useAuthenticatedUser(['https://swetuggtixdev.onmicrosoft.com/tix-api/access_as_backoffice']);
useEffect(() => {
if (user.current?.userId) {
dispatch(loadOrganization(organizationId));
dispatch(loadOrganizationUsers(organizationId));
dispatch(loadUserRoles(userId, organizationId));
dispatch(loadActivities());
dispatch(loadRoles());
}
}, [user.current, organizationId, userId]);
const [addRoleModalOpen, setAddRoleModalOpen] = useState(false);
const onAddRoleButtonClick = () => {
setAddRoleModalOpen(true);
};
const organizations = useSelector((r: RootState) => r.organizations);
const roles = useSelector((r: RootState) => r.roles);
const users = useSelector((r: RootState) => r.users);
const organization = organizations.organizations.models[organizationId];
const orgUser = organizations.organizationUsers.models[userId];
const visibleUserRoles = listVisible(users.userRoles);
const visibleRoles = listVisible(roles.roles);
return (
<React.Fragment>
{orgUser && (
<Container maxWidth={false} className={classes.container}>
<Typography variant="h4" component="h1" gutterBottom>
{orgUser.name}
</Typography>
<Grid container spacing={3}>
<Grid item xs={12} md={5}>
<Grid container spacing={2}>
<Grid item xs={12}>
<Paper className={classes.paper}></Paper>
</Grid>
</Grid>
</Grid>
<Grid item xs={12}>
<Paper className={classes.paper}>
<Toolbar className={classes.organizationUsersListToolbar}>
<Typography className={classes.organizationUsersListTitle} variant="h6" component="div">
Roles
</Typography>
<Fab size="small" color="primary" onClick={onAddRoleButtonClick}>
<AddIcon />
</Fab>
{
<AddUserRoleModal
roles={visibleRoles}
userId={userId}
open={addRoleModalOpen}
setOpen={setAddRoleModalOpen}
/>
}
</Toolbar>
{<UserRoleList organization={organization} user={orgUser} userRoles={visibleUserRoles} />}
</Paper>
</Grid>
</Grid>
</Container>
)}
</React.Fragment>
);
};
export default OrganizationUserPage;
export const getServerSideProps: GetServerSideProps = async ({ params, query }) => {
const organizationId = params?.organizationId;
const userId = params?.userId;
return {
props: {
organizationId: organizationId,
userId: userId,
},
};
};<|fim▁end|> | |
<|file_name|>TransactionScopeListObject.java<|end_file_name|><|fim▁begin|>package nam.model.transactionScope;
import java.io.Serializable;
import org.aries.ui.AbstractListObject;
import nam.model.TransactionScope;
public class TransactionScopeListObject extends AbstractListObject<TransactionScope> implements Comparable<TransactionScopeListObject>, Serializable {
private TransactionScope transactionScope;
public TransactionScopeListObject(TransactionScope transactionScope) {
this.transactionScope = transactionScope;
}
public TransactionScope getTransactionScope() {
return transactionScope;
}
@Override
public Object getKey() {<|fim▁hole|> return transactionScope.value();
}
@Override
public String getLabel() {
return getLabel(transactionScope);
}
public String getLabel(TransactionScope transactionScope) {
return transactionScope.name();
}
@Override
public String toString() {
return toString(transactionScope);
}
@Override
public String toString(TransactionScope transactionScope) {
return transactionScope.name();
}
@Override
public int compareTo(TransactionScopeListObject other) {
Object thisKey = getKey(this.transactionScope);
Object otherKey = getKey(other.transactionScope);
String thisText = thisKey.toString();
String otherText = otherKey.toString();
if (thisText == null)
return -1;
if (otherText == null)
return 1;
return thisText.compareTo(otherText);
}
@Override
public boolean equals(Object object) {
TransactionScopeListObject other = (TransactionScopeListObject) object;
String thisText = toString(this.transactionScope);
String otherText = toString(other.transactionScope);
return thisText.equals(otherText);
}
}<|fim▁end|> | return getKey(transactionScope);
}
public Object getKey(TransactionScope transactionScope) { |
<|file_name|>curses_ui_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests of the curses-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import curses
import numpy as np
from tensorflow.python.debug.cli import curses_ui
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import tensor_format
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
def string_to_codes(cmd):
return [ord(c) for c in cmd]
def codes_to_string(cmd_code):
# Omit non-ASCII key codes.
return "".join([chr(code) for code in cmd_code if code < 256])
class MockCursesUI(curses_ui.CursesUI):
"""Mock subclass of CursesUI that bypasses actual terminal manipulations."""
def __init__(self, height, width, command_sequence=None):
self._height = height
self._width = width
self._command_sequence = command_sequence
self._command_counter = 0
# The mock class has no actual textbox. So use this variable to keep
# track of what's entered in the textbox on creation.
self._curr_existing_command = ""
# Observers for test.
# Observers of screen output.
self.unwrapped_outputs = []
self.wrapped_outputs = []
self.scroll_messages = []
self.output_array_pointer_indices = []
self.output_pad_rows = []
# Observers of command textbox.
self.existing_commands = []
# Observer for tab-completion candidates.
self.candidates_lists = []
# Observer for toast messages.
self.toasts = []
curses_ui.CursesUI.__init__(self)
# Below, override the _screen_ prefixed member methods that interact with the
# actual terminal, so that the mock can run in a terminal-less environment.
# TODO(cais): Search for a way to have a mock terminal object that behaves
# like the actual terminal, so that we can test the terminal interaction
# parts of the CursesUI class.
def _screen_init(self):
pass
def _screen_refresh_size(self):
self._max_y = self._height
self._max_x = self._width
def _screen_launch(self):
pass
def _screen_terminate(self):
pass
def _screen_refresh(self):
pass
def _screen_create_command_window(self):
pass
def _screen_create_command_textbox(self, existing_command):
"""Override to insert observer of existing commands.
Used in testing of history navigation and tab completion.
Args:
existing_command: Command string entered to the textbox at textbox
creation time. Note that the textbox does not actually exist in this
mock subclass. This method only keeps track of and records the state.
"""
self.existing_commands.append(existing_command)
self._curr_existing_command = existing_command
def _screen_new_output_pad(self, rows, cols):
return "mock_pad"
def _screen_add_line_to_output_pad(self, pad, row, txt, color_segments=None):
pass
def _screen_draw_text_line(self, row, line, attr=curses.A_NORMAL, color=None):
pass
def _screen_scroll_output_pad(self, pad, viewport_top, viewport_left,
screen_location_top, screen_location_left,
screen_location_bottom, screen_location_right):
pass
def _screen_get_user_command(self):
command = self._command_sequence[self._command_counter]
self._command_key_counter = 0
for c in command:
if c == curses.KEY_RESIZE:
# Special case for simulating a terminal resize event in curses.
self._height = command[1]
self._width = command[2]
self._on_textbox_keypress(c)
self._command_counter += 1
return ""
y = self._on_textbox_keypress(c)
self._command_key_counter += 1
if y == curses_ui.CursesUI.CLI_TERMINATOR_KEY:
break
self._command_counter += 1
# Take into account pre-existing string automatically entered on textbox
# creation.
return self._curr_existing_command + codes_to_string(command)
def _screen_gather_textbox_str(self):
return codes_to_string(self._command_sequence[self._command_counter]
[:self._command_key_counter])
def _scroll_output(self, direction, line_index=None):
"""Override to observe screen output.
This method is invoked after every command that generates a new screen
output and after every keyboard triggered screen scrolling. Therefore
it is a good place to insert the observer.
Args:
direction: which direction to scroll.
line_index: (int or None) Optional line index to scroll to. See doc string
of the overridden method for more information.
"""
curses_ui.CursesUI._scroll_output(self, direction, line_index=line_index)
self.unwrapped_outputs.append(self._curr_unwrapped_output)
self.wrapped_outputs.append(self._curr_wrapped_output)
self.scroll_messages.append(self._scroll_info)
self.output_array_pointer_indices.append(self._output_array_pointer_indices)
self.output_pad_rows.append(self._output_pad_row)
def _display_candidates(self, candidates):
curses_ui.CursesUI._display_candidates(self, candidates)
self.candidates_lists.append(candidates)
def _toast(self, message, color=None, line_index=None):
curses_ui.CursesUI._toast(self, message, color=color, line_index=line_index)
self.toasts.append(message)
class CursesTest(test_util.TensorFlowTestCase):
_EXIT = string_to_codes("exit\n")
def _babble(self, args, screen_info=None):
ap = argparse.ArgumentParser(
description="Do babble.", usage=argparse.SUPPRESS)
ap.add_argument(
"-n",
"--num_times",
dest="num_times",
type=int,
default=60,
help="How many times to babble")
ap.add_argument(
"-l",
"--line",
dest="line",
type=str,
default="bar",
help="The content of each line")
parsed = ap.parse_args(args)
return debugger_cli_common.RichTextLines([parsed.line] * parsed.num_times)
def _print_ones(self, args, screen_info=None):
ap = argparse.ArgumentParser(
description="Print all-one matrix.", usage=argparse.SUPPRESS)
ap.add_argument(
"-s",
"--size",
dest="size",
type=int,
default=3,
help="Size of the matrix. For example, of the value is 3, "
"the matrix will have shape (3, 3)")
parsed = ap.parse_args(args)
m = np.ones([parsed.size, parsed.size])
return tensor_format.format_tensor(m, "m")
def testInitialization(self):
ui = MockCursesUI(40, 80)
self.assertEqual(0, ui._command_pointer)
self.assertEqual([], ui._active_command_history)
self.assertEqual("", ui._pending_command)
def testRunUIExitImmediately(self):
"""Make sure that the UI can exit properly after launch."""
ui = MockCursesUI(40, 80, command_sequence=[self._EXIT])
ui.run_ui()
# No screen output should have happened.
self.assertEqual(0, len(ui.unwrapped_outputs))
def testRunUIEmptyCommand(self):
"""Issue an empty command then exit."""
ui = MockCursesUI(40, 80, command_sequence=[[], self._EXIT])
ui.run_ui()
# Empty command should not lead to any screen output.
self.assertEqual(0, len(ui.unwrapped_outputs))
def testRunUIInvalidCommandPrefix(self):
"""Handle an unregistered command prefix."""
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("foo\n"), self._EXIT])
ui.run_ui()
# Screen output/scrolling should have happened exactly once.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["ERROR: Invalid command prefix \"foo\""],
ui.unwrapped_outputs[0].lines)
# TODO(cais): Add explanation for the 35 extra lines.
self.assertEqual(["ERROR: Invalid command prefix \"foo\""],
ui.wrapped_outputs[0].lines[:1])
# A single line of output should not have caused scrolling.
self.assertEqual("-" * 80, ui.scroll_messages[0])
def testRunUIInvalidCommandSyntax(self):
"""Handle a command with invalid syntax."""
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("babble -z\n"), self._EXIT])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
# Screen output/scrolling should have happened exactly once.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(
["Syntax error for command: babble", "For help, do \"help babble\""],
ui.unwrapped_outputs[0].lines)
def testRunUIScrollTallOutputPageDownUp(self):
"""Scroll tall output with PageDown and PageUp."""
# Use PageDown and PageUp to scroll back and forth a little before exiting.
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("babble\n"), [curses.KEY_NPAGE] * 2 +
[curses.KEY_PPAGE] + self._EXIT])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
# Screen output/scrolling should have happened exactly once.
self.assertEqual(4, len(ui.unwrapped_outputs))
self.assertEqual(4, len(ui.wrapped_outputs))
self.assertEqual(4, len(ui.scroll_messages))
# Before scrolling.
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
# Initial scroll: At the top.
self.assertIn("Scroll (PgDn): 0.00%", ui.scroll_messages[0])
# After 1st scrolling (PageDown).
# The screen output shouldn't have changed. Only the viewport should.
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
self.assertIn("Scroll (PgDn/PgUp): 1.69%", ui.scroll_messages[1])
# After 2nd scrolling (PageDown).
self.assertIn("Scroll (PgDn/PgUp): 3.39%", ui.scroll_messages[2])
# After 3rd scrolling (PageUp).
self.assertIn("Scroll (PgDn/PgUp): 1.69%", ui.scroll_messages[3])
def testCutOffTooManyOutputLines(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("babble -n 20\n"), self._EXIT])
# Modify max_output_lines so that this test doesn't use too much time or
# memory.
ui.max_output_lines = 10
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
self.assertEqual(["bar"] * 10 + ["Output cut off at 10 lines!"],
ui.wrapped_outputs[0].lines[:11])
def testRunUIScrollTallOutputEndHome(self):
"""Scroll tall output with PageDown and PageUp."""
# Use End and Home to scroll a little before exiting to test scrolling.
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble\n"),
[curses.KEY_END] * 2 + [curses.KEY_HOME] + self._EXIT
])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
# Screen output/scrolling should have happened exactly once.
self.assertEqual(4, len(ui.unwrapped_outputs))
self.assertEqual(4, len(ui.wrapped_outputs))
self.assertEqual(4, len(ui.scroll_messages))
# Before scrolling.
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
# Initial scroll: At the top.
self.assertIn("Scroll (PgDn): 0.00%", ui.scroll_messages[0])
# After 1st scrolling (End).
self.assertIn("Scroll (PgUp): 100.00%", ui.scroll_messages[1])
# After 2nd scrolling (End).
self.assertIn("Scroll (PgUp): 100.00%", ui.scroll_messages[2])
# After 3rd scrolling (Hhome).
self.assertIn("Scroll (PgDn): 0.00%", ui.scroll_messages[3])
def testRunUIWithInitCmd(self):
"""Run UI with an initial command specified."""
ui = MockCursesUI(40, 80, command_sequence=[self._EXIT])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui(init_command="babble")
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
self.assertIn("Scroll (PgDn): 0.00%", ui.scroll_messages[0])
def testCompileHelpWithoutHelpIntro(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"), self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[0].lines[:4])
def testCompileHelpWithHelpIntro(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"), self._EXIT])
help_intro = ["This is a curses UI.", "All it can do is 'babble'.", ""]
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.set_help_intro(help_intro)
ui.run_ui()
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(
help_intro + ["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[0].lines[:7])
def testCommandHistoryNavBackwardOnce(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"),
[curses.KEY_UP], # Hit Up and Enter.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(2, len(ui.unwrapped_outputs))
for i in [0, 1]:
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[i].lines[:4])
def testCommandHistoryNavBackwardTwice(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"),
string_to_codes("babble\n"),
[curses.KEY_UP],
[curses.KEY_UP], # Hit Up twice and Enter.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(3, len(ui.unwrapped_outputs))
# The 1st and 3rd outputs are for command "help".
for i in [0, 2]:
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[i].lines[:4])
# The 2nd output is for command "babble".
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[1].lines)
def testCommandHistoryNavBackwardOverLimit(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"),
string_to_codes("babble\n"),
[curses.KEY_UP],
[curses.KEY_UP],
[curses.KEY_UP], # Hit Up three times and Enter.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(3, len(ui.unwrapped_outputs))
# The 1st and 3rd outputs are for command "help".
for i in [0, 2]:
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[i].lines[:4])
# The 2nd output is for command "babble".
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[1].lines)
def testCommandHistoryNavBackwardThenForward(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("help\n"),
string_to_codes("babble\n"),
[curses.KEY_UP],
[curses.KEY_UP],
[curses.KEY_DOWN], # Hit Up twice and Down once.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(3, len(ui.unwrapped_outputs))
# The 1st output is for command "help".
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[0].lines[:4])
# The 2nd and 3rd outputs are for command "babble".
for i in [1, 2]:
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[i].lines)
def testCommandHistoryPrefixNavBackwardOnce(self):
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 1\n"),
string_to_codes("babble -n 10\n"),
string_to_codes("help\n"),
string_to_codes("b") + [curses.KEY_UP], # Navigate with prefix.
string_to_codes("\n"),
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(["bar"], ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 10, ui.unwrapped_outputs[1].lines)
self.assertEqual(["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[2].lines[:4])
self.assertEqual(["bar"] * 10, ui.unwrapped_outputs[3].lines)
def testTerminalResize(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("babble\n"),
[curses.KEY_RESIZE, 100, 85], # Resize to [100, 85]
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
# The resize event should have caused a second screen output event.
self.assertEqual(2, len(ui.unwrapped_outputs))
self.assertEqual(2, len(ui.wrapped_outputs))
self.assertEqual(2, len(ui.scroll_messages))
# The 1st and 2nd screen outputs should be identical (unwrapped).
self.assertEqual(ui.unwrapped_outputs[0], ui.unwrapped_outputs[1])
# The 1st scroll info should contain scrolling, because the screen size
# is less than the number of lines in the output.
self.assertIn("Scroll (PgDn): 0.00%", ui.scroll_messages[0])
def testTabCompletionWithCommonPrefix(self):
# Type "b" and trigger tab completion.
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("b\t"), string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["ba"])
ui.run_ui()
# The automatically registered exit commands "exit" and "quit" should not
# appear in the tab completion candidates because they don't start with
# "b".
self.assertEqual([["ba", "babble"]], ui.candidates_lists)
# "ba" is a common prefix of the two candidates. So the "ba" command should
# have been issued after the Enter.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
def testTabCompletionEmptyTriggerWithoutCommonPrefix(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("\t"), # Trigger tab completion.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["a"])
# Use a different alias "a" instead.
ui.run_ui()
# The manually registered command, along with the automatically registered
# exit commands should appear in the candidates.
self.assertEqual([["a", "babble", "exit", "h", "help", "quit"]],
ui.candidates_lists)
# The two candidates have no common prefix. So no command should have been
# issued.
self.assertEqual(0, len(ui.unwrapped_outputs))
self.assertEqual(0, len(ui.wrapped_outputs))
self.assertEqual(0, len(ui.scroll_messages))
def testTabCompletionNonemptyTriggerSingleCandidate(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("b\t"), # Trigger tab completion.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["a"])
ui.run_ui()
# There is only one candidate, so no candidates should have been displayed.
# Instead, the completion should have been automatically keyed in, leading
# to the "babble" command being issue.
self.assertEqual([[]], ui.candidates_lists)
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["bar"] * 60, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 60, ui.wrapped_outputs[0].lines[:60])
def testTabCompletionNoMatch(self):
ui = MockCursesUI(
40,
80,
command_sequence=[string_to_codes("c\t"), # Trigger tab completion.
string_to_codes("\n"),
self._EXIT])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["a"])
ui.run_ui()
# Only the invalid command "c" should have been issued.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["ERROR: Invalid command prefix \"c\""],
ui.unwrapped_outputs[0].lines)
self.assertEqual(["ERROR: Invalid command prefix \"c\""],
ui.wrapped_outputs[0].lines[:1])
def testTabCompletionOneWordContext(self):
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\t"), # Trigger tab completion.
string_to_codes("\n"),
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.register_tab_comp_context(["babble", "b"], ["10", "20", "30", "300"])
ui.run_ui()
self.assertEqual([["30", "300"]], ui.candidates_lists)
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["bar"] * 30, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 30, ui.wrapped_outputs[0].lines[:30])
def testTabCompletionTwice(self):
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 1\t"), # Trigger tab completion.
string_to_codes("2\t"), # With more prefix, tab again.
string_to_codes("3\n"),
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.register_tab_comp_context(["babble", "b"], ["10", "120", "123"])
ui.run_ui()
# There should have been two different lists of candidates.
self.assertEqual([["10", "120", "123"], ["120", "123"]],
ui.candidates_lists)
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.wrapped_outputs))
self.assertEqual(1, len(ui.scroll_messages))
self.assertEqual(["bar"] * 123, ui.unwrapped_outputs[0].lines)
self.assertEqual(["bar"] * 123, ui.wrapped_outputs[0].lines[:123])
def testRegexSearch(self):
"""Test regex search."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/(b|r)\n"), # Regex search and highlight.
string_to_codes("/a\n"), # Regex search and highlight.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
# The unwrapped (original) output should never have any highlighting.
self.assertEqual(3, len(ui.unwrapped_outputs))
for i in range(3):
self.assertEqual(["bar"] * 3, ui.unwrapped_outputs[i].lines)
self.assertEqual({}, ui.unwrapped_outputs[i].font_attr_segs)
# The wrapped outputs should show highlighting depending on the regex.
self.assertEqual(3, len(ui.wrapped_outputs))
# The first output should have no highlighting.
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[0].lines[:3])
self.assertEqual({}, ui.wrapped_outputs[0].font_attr_segs)
# The second output should have highlighting for "b" and "r".
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[1].lines[:3])
for i in range(3):
self.assertEqual([(0, 1, "black_on_white"), (2, 3, "black_on_white")],
ui.wrapped_outputs[1].font_attr_segs[i])
# The third output should have highlighting for "a" only.
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[1].lines[:3])
for i in range(3):
self.assertEqual([(1, 2, "black_on_white")],
ui.wrapped_outputs[2].font_attr_segs[i])
def testRegexSearchContinuation(self):
"""Test continuing scrolling down to next regex match."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/(b|r)\n"), # Regex search and highlight.
string_to_codes("/\n"), # Continue scrolling down: 1st time.
string_to_codes("/\n"), # Continue scrolling down: 2nd time.
string_to_codes("/\n"), # Continue scrolling down: 3rd time.
string_to_codes("/\n"), # Continue scrolling down: 4th time.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
# The 1st output is for the non-searched output. The other three are for
# the searched output. Even though continuation search "/" is performed
# four times, there should be only three searched outputs, because the
# last one has exceeded the end.
self.assertEqual(4, len(ui.unwrapped_outputs))
for i in range(4):
self.assertEqual(["bar"] * 3, ui.unwrapped_outputs[i].lines)
self.assertEqual({}, ui.unwrapped_outputs[i].font_attr_segs)
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[0].lines[:3])
self.assertEqual({}, ui.wrapped_outputs[0].font_attr_segs)
for j in range(1, 4):
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[j].lines[:3])
self.assertEqual({
0: [(0, 1, "black_on_white"), (2, 3, "black_on_white")],
1: [(0, 1, "black_on_white"), (2, 3, "black_on_white")],
2: [(0, 1, "black_on_white"), (2, 3, "black_on_white")]
}, ui.wrapped_outputs[j].font_attr_segs)
self.assertEqual([0, 0, 1, 2], ui.output_pad_rows)
def testRegexSearchUnderLineWrapping(self):
ui = MockCursesUI(
40,
5, # Use a narrow window to trigger line wrapping
command_sequence=[
string_to_codes("babble -n 3 -l foo-bar-baz-qux\n"),
string_to_codes("/foo\n"), # Regex search and highlight.
string_to_codes("/\n"), # Continue scrolling down: 1st time.
string_to_codes("/\n"), # Continue scrolling down: 2nd time.
string_to_codes("/\n"), # Continue scrolling down: 3rd time.
string_to_codes("/\n"), # Continue scrolling down: 4th time.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some")
ui.run_ui()
self.assertEqual(4, len(ui.wrapped_outputs))
for wrapped_output in ui.wrapped_outputs:
self.assertEqual(["foo-", "bar-", "baz-", "qux"] * 3,
wrapped_output.lines[0 : 12])
# The scroll location should reflect the line wrapping.
self.assertEqual([0, 0, 4, 8], ui.output_pad_rows)
def testRegexSearchNoMatchContinuation(self):
"""Test continuing scrolling when there is no regex match."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/foo\n"), # Regex search and highlight.
string_to_codes("/\n"), # Continue scrolling down.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
# The regex search and continuation search in the 3rd command should not
# have produced any output.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual([0], ui.output_pad_rows)
def testRegexSearchContinuationWithoutSearch(self):
"""Test continuation scrolling when no regex search has been performed."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/\n"), # Continue scrolling without search first.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual([0], ui.output_pad_rows)
def testRegexSearchWithInvalidRegex(self):
"""Test using invalid regex to search."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/[\n"), # Continue scrolling without search first.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
# Invalid regex should not have led to a new screen of output.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual([0], ui.output_pad_rows)
# Invalid regex should have led to a toast error message.
self.assertEqual(["ERROR: Invalid regular expression: \"[\""], ui.toasts)
def testRegexSearchFromCommandHistory(self):
"""Test regex search commands are recorded in command history."""
ui = MockCursesUI(
40,
80,
command_sequence=[
string_to_codes("babble -n 3\n"),
string_to_codes("/(b|r)\n"), # Regex search and highlight.
string_to_codes("babble -n 4\n"),
[curses.KEY_UP],
[curses.KEY_UP],
string_to_codes("\n"), # Hit Up twice and Enter.
self._EXIT
])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.run_ui()
self.assertEqual(4, len(ui.wrapped_outputs))
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[0].lines[:3])
self.assertEqual({}, ui.wrapped_outputs[0].font_attr_segs)
self.assertEqual(["bar"] * 3, ui.wrapped_outputs[1].lines[:3])
for i in range(3):
self.assertEqual([(0, 1, "black_on_white"), (2, 3, "black_on_white")],
ui.wrapped_outputs[1].font_attr_segs[i])
self.assertEqual(["bar"] * 4, ui.wrapped_outputs[2].lines[:4])
self.assertEqual({}, ui.wrapped_outputs[2].font_attr_segs)
# The regex search command loaded from history should have worked on the
# new screen output.
self.assertEqual(["bar"] * 4, ui.wrapped_outputs[3].lines[:4])
for i in range(4):
self.assertEqual([(0, 1, "black_on_white"), (2, 3, "black_on_white")],
ui.wrapped_outputs[3].font_attr_segs[i])
def testDisplayTensorWithIndices(self):
"""Test displaying tensor with indices."""
ui = MockCursesUI(
8, # Use a small screen height to cause scrolling.
80,
command_sequence=[
string_to_codes("print_ones --size 5\n"),
[curses.KEY_NPAGE],
[curses.KEY_NPAGE],
[curses.KEY_NPAGE],
[curses.KEY_END],
[curses.KEY_NPAGE], # This PageDown goes over the bottom limit.
[curses.KEY_PPAGE],
[curses.KEY_PPAGE],
[curses.KEY_PPAGE],
[curses.KEY_HOME],
[curses.KEY_PPAGE], # This PageDown goes over the top limit.
self._EXIT
])
ui.register_command_handler("print_ones", self._print_ones,
"print an all-one matrix of specified size")
ui.run_ui()
self.assertEqual(11, len(ui.unwrapped_outputs))
self.assertEqual(11, len(ui.output_array_pointer_indices))
self.assertEqual(11, len(ui.scroll_messages))
for i in range(11):
self.assertEqual([
"Tensor \"m\":", "", "array([[ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.]])"
], ui.unwrapped_outputs[i].lines)
self.assertEqual({
0: None,
-1: [1, 0]
}, ui.output_array_pointer_indices[0])
self.assertIn(" Scroll (PgDn): 0.00% -[1,0] ", ui.scroll_messages[0])
# Scrolled down one line.
self.assertEqual({
0: None,
-1: [2, 0]
}, ui.output_array_pointer_indices[1])
self.assertIn(" Scroll (PgDn/PgUp): 16.67% -[2,0] ", ui.scroll_messages[1])
# Scrolled down one line.
self.assertEqual({
0: [0, 0],
-1: [3, 0]
}, ui.output_array_pointer_indices[2])
self.assertIn(" Scroll (PgDn/PgUp): 33.33% [0,0]-[3,0] ",
ui.scroll_messages[2])
# Scrolled down one line.
self.assertEqual({
0: [1, 0],
-1: [4, 0]
}, ui.output_array_pointer_indices[3])
self.assertIn(" Scroll (PgDn/PgUp): 50.00% [1,0]-[4,0] ",
ui.scroll_messages[3])
# Scroll to the bottom.
self.assertEqual({
0: [4, 0],
-1: None
}, ui.output_array_pointer_indices[4])
self.assertIn(" Scroll (PgUp): 100.00% [4,0]- ", ui.scroll_messages[4])
# Attempt to scroll beyond the bottom should lead to no change.
self.assertEqual({
0: [4, 0],
-1: None
}, ui.output_array_pointer_indices[5])
self.assertIn(" Scroll (PgUp): 100.00% [4,0]- ", ui.scroll_messages[5])
<|fim▁hole|> }, ui.output_array_pointer_indices[6])
self.assertIn(" Scroll (PgDn/PgUp): 83.33% [3,0]- ", ui.scroll_messages[6])
# Scrolled up one line.
self.assertEqual({
0: [2, 0],
-1: None
}, ui.output_array_pointer_indices[7])
self.assertIn(" Scroll (PgDn/PgUp): 66.67% [2,0]- ", ui.scroll_messages[7])
# Scrolled up one line.
self.assertEqual({
0: [1, 0],
-1: [4, 0]
}, ui.output_array_pointer_indices[8])
self.assertIn(" Scroll (PgDn/PgUp): 50.00% [1,0]-[4,0] ",
ui.scroll_messages[8])
# Scroll to the top.
self.assertEqual({
0: None,
-1: [1, 0]
}, ui.output_array_pointer_indices[9])
self.assertIn(" Scroll (PgDn): 0.00% -[1,0] ", ui.scroll_messages[9])
# Attempt to scroll pass the top limit should lead to no change.
self.assertEqual({
0: None,
-1: [1, 0]
}, ui.output_array_pointer_indices[10])
self.assertIn(" Scroll (PgDn): 0.00% -[1,0] ", ui.scroll_messages[10])
def testScrollTensorByValidIndices(self):
"""Test scrolling to specified (valid) indices in a tensor."""
ui = MockCursesUI(
8, # Use a small screen height to cause scrolling.
80,
command_sequence=[
string_to_codes("print_ones --size 5\n"),
string_to_codes("@[0, 0]\n"), # Scroll to element [0, 0].
string_to_codes("@1,0\n"), # Scroll to element [3, 0].
string_to_codes("@[0,2]\n"), # Scroll back to line 0.
self._EXIT
])
ui.register_command_handler("print_ones", self._print_ones,
"print an all-one matrix of specified size")
ui.run_ui()
self.assertEqual(4, len(ui.unwrapped_outputs))
self.assertEqual(4, len(ui.output_array_pointer_indices))
for i in range(4):
self.assertEqual([
"Tensor \"m\":", "", "array([[ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.],",
" [ 1., 1., 1., 1., 1.]])"
], ui.unwrapped_outputs[i].lines)
self.assertEqual({
0: None,
-1: [1, 0]
}, ui.output_array_pointer_indices[0])
self.assertEqual({
0: [0, 0],
-1: [3, 0]
}, ui.output_array_pointer_indices[1])
self.assertEqual({
0: [1, 0],
-1: [4, 0]
}, ui.output_array_pointer_indices[2])
self.assertEqual({
0: [0, 0],
-1: [3, 0]
}, ui.output_array_pointer_indices[3])
def testScrollTensorByInvalidIndices(self):
"""Test scrolling to specified invalid indices in a tensor."""
ui = MockCursesUI(
8, # Use a small screen height to cause scrolling.
80,
command_sequence=[
string_to_codes("print_ones --size 5\n"),
string_to_codes("@[10, 0]\n"), # Scroll to invalid indices.
string_to_codes("@[]\n"), # Scroll to invalid indices.
string_to_codes("@\n"), # Scroll to invalid indices.
self._EXIT
])
ui.register_command_handler("print_ones", self._print_ones,
"print an all-one matrix of specified size")
ui.run_ui()
# Because all scroll-by-indices commands are invalid, there should be only
# one output event.
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(1, len(ui.output_array_pointer_indices))
# Check error messages.
self.assertEqual("ERROR: Indices exceed tensor dimensions.", ui.toasts[1])
self.assertEqual("ERROR: invalid literal for int() with base 10: ''",
ui.toasts[2])
self.assertEqual("ERROR: Empty indices.", ui.toasts[3])
if __name__ == "__main__":
googletest.main()<|fim▁end|> | # Scrolled up one line.
self.assertEqual({
0: [3, 0],
-1: None |
<|file_name|>sort-by-letter.pipe.ts<|end_file_name|><|fim▁begin|>// 按照中文首字母顺序排序
import {Pipe, PipeTransform} from '@angular/core';
@Pipe({
name: 'sortByLetter'
})
export class SortByLetterPipe implements PipeTransform {
transform(value: Array<any>, key?: any): any {
if (key) {
if (!value) { return value; }
const valueKeyArray = value.map(valueItem => valueItem[key]);
console.log(JSON.stringify(valueKeyArray));<|fim▁hole|> .sort((a, b) => a.localeCompare(b, 'zh-Hans-CN', {sensitivity: 'accent'}));
} else {
return value
.sort((a, b) => a.localeCompare(b, 'zh-Hans-CN', {sensitivity: 'accent'}));
}
}
}<|fim▁end|> | return valueKeyArray |
<|file_name|>ssh_session.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Eric S. Raymond
#
# Greatly modified by Nigel W. Moriarty
# April 2003
#
from pexpect import *
import os, sys
import getpass
import time
class ssh_session:
"Session with extra state including the password to be used."
def __init__(self, user, host, password=None, verbose=0):<|fim▁hole|> self.user = user
self.host = host
self.verbose = verbose
self.password = password
self.keys = [
'authenticity',
'assword:',
'@@@@@@@@@@@@',
'Command not found.',
EOF,
]
self.f = open('ssh.out','w')
def __repr__(self):
outl = 'class :'+self.__class__.__name__
for attr in self.__dict__:
if attr == 'password':
outl += '\n\t'+attr+' : '+'*'*len(self.password)
else:
outl += '\n\t'+attr+' : '+str(getattr(self, attr))
return outl
def __exec(self, command):
"Execute a command on the remote host. Return the output."
child = spawn(command,
#timeout=10,
)
if self.verbose:
sys.stderr.write("-> " + command + "\n")
seen = child.expect(self.keys)
self.f.write(str(child.before) + str(child.after)+'\n')
if seen == 0:
child.sendline('yes')
seen = child.expect(self.keys)
if seen == 1:
if not self.password:
self.password = getpass.getpass('Remote password: ')
child.sendline(self.password)
child.readline()
time.sleep(5)
# Added to allow the background running of remote process
if not child.isalive():
seen = child.expect(self.keys)
if seen == 2:
lines = child.readlines()
self.f.write(lines)
if self.verbose:
sys.stderr.write("<- " + child.before + "|\n")
try:
self.f.write(str(child.before) + str(child.after)+'\n')
except:
pass
self.f.close()
return child.before
def ssh(self, command):
return self.__exec("ssh -l %s %s \"%s\"" \
% (self.user,self.host,command))
def scp(self, src, dst):
return self.__exec("scp %s %s@%s:%s" \
% (src, session.user, session.host, dst))
def exists(self, file):
"Retrieve file permissions of specified remote file."
seen = self.ssh("/bin/ls -ld %s" % file)
if string.find(seen, "No such file") > -1:
return None # File doesn't exist
else:
return seen.split()[0] # Return permission field of listing.<|fim▁end|> | |
<|file_name|>parse.rs<|end_file_name|><|fim▁begin|>use diagnostic::Pos;
use nom::{ErrorKind, IResult, Needed};
use std::cell::RefCell;
use std::vec::Drain;
use syntax::Expr::*;
use syntax::{Expr, Ty};
use typed_arena::Arena;
pub mod lex {
use super::*;
fn space(i: &str) -> IResult<&str, ()> {
let mut chars = i.chars();
match chars.next() {
Some(c) if c.is_whitespace() => IResult::Done(chars.as_str(), ()),
Some('#') => map!(i, take_until!("\n"), |_| ()),
Some(_) => IResult::Error(ErrorKind::Custom(0)),
None => IResult::Incomplete(Needed::Size(1)),
}
}
macro_rules! token (
(pub $name:ident<$i:ty, $o:ty>, $submac:ident!( $($args:tt)* )) => (
#[allow(unused_variables)]
pub fn $name( i: $i ) -> $crate::nom::IResult<$i, $o, u32> {
delimited!(i,
many0!(call!($crate::parse::lex::space)),
$submac!($($args)*),
many0!(call!($crate::parse::lex::space))
)
}
);
);
// FIXME: Use is_xid_start and is_xid_continue.
fn is_ident_continue(c: char) -> bool {
c.is_alphanumeric() || c == '_' || c == '%'
}
token!(pub ident<&str, String>, do_parse!(
not!(call!(keyw::any)) >>
name: take_while1!(is_ident_continue) >>
(name.to_string())
));
token!(pub str<&str, String>, do_parse!(
tag!("\"") >>
name: take_until!("\"") >>
tag!("\"") >>
(name.to_string())
));
pub mod keyw {
named!(pub any<&str, &str>, alt!(
call!(end) |
call!(false_) |
call!(forall) |
call!(fun) |
call!(in_) |
call!(let_) |
call!(true_) |
call!(val)
));
token!(pub end <&str, &str>, tag!("end"));
token!(pub false_<&str, &str>, tag!("false"));
token!(pub forall<&str, &str>, alt!(tag!("forall") | tag!("∀")));
token!(pub fun <&str, &str>, tag!("fun"));
token!(pub in_ <&str, &str>, tag!("in"));
token!(pub let_ <&str, &str>, tag!("let"));
token!(pub true_ <&str, &str>, tag!("true"));
token!(pub val <&str, &str>, tag!("val"));
}
pub mod punc {
token!(pub arrow <&str, &str>, tag!("->"));
token!(pub equals <&str, &str>, tag!("="));
token!(pub colon <&str, &str>, tag!(":"));
token!(pub comma <&str, &str>, tag!(","));
token!(pub left_paren <&str, &str>, tag!("("));
token!(pub right_paren<&str, &str>, tag!(")"));
}
}
pub mod expr {
use super::*;
type A<'e, 't> = (&'e Arena<Expr<'e, 't>>, &'t Arena<Ty<'t>>);
type I<'s> = &'s str;
type O<'e, 't> = &'e Expr<'e, 't>;
static POS: Pos = Pos(0);
pub fn level_1<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
map!(i, fold_many1!(call!(level_2, a), None, |acc, arg| {
match acc {
Some(callee) => Some(&*a.0.alloc(App(POS, callee, arg))),
None => Some(arg),
}
}), Option::unwrap)
}
pub fn level_2<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
alt!(i,
do_parse!(
call!(lex::punc::left_paren) >>
expr: call!(level_1, a) >>
call!(lex::punc::right_paren) >>
(expr)
) |
call!(bool, a) |
call!(str, a) |
call!(var, a) |
call!(abs, a) |
call!(let_, a)
)
}
pub fn bool<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
alt!(i,
map!(call!(lex::keyw::false_), |_| &*a.0.alloc(Bool(POS, false))) |
map!(call!(lex::keyw::true_), |_| &*a.0.alloc(Bool(POS, true)))
)
}
pub fn str<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
map!(i, call!(lex::str), |x| &*a.0.alloc(Str(POS, RefCell::new(x))))
}
pub fn var<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
map!(i, call!(lex::ident), |x| &*a.0.alloc(Var(POS, x)))
}
pub fn abs<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
do_parse!(i,
call!(lex::keyw::fun) >>
param: call!(lex::ident) >>
call!(lex::punc::arrow) >>
body: call!(level_1, a) >>
call!(lex::keyw::end) >>
(a.0.alloc(Abs(POS, param, body)))
)
}
pub fn let_<'s, 'e, 't>(i: I<'s>, a: A<'e, 't>) -> IResult<I<'s>, O<'e, 't>> {
let make_let = |acc, mut vals|
drain_all(&mut vals)
.rev()
.fold(acc, |acc, (name, ty, value)|
a.0.alloc(Let(POS, name, ty, value, acc)));
do_parse!(i,
call!(lex::keyw::let_) >>
vals: many0!(do_parse!(
call!(lex::keyw::val) >>
name: call!(lex::ident) >>
ty: opt!(do_parse!(
call!(lex::punc::colon) >>
ty: call!(ty::level_1, a.1) >>
(ty)
)) >>
call!(lex::punc::equals) >>
value: call!(level_1, a) >>
(name, ty, value)
)) >>
call!(lex::keyw::in_) >>
body: call!(level_1, a) >>
call!(lex::keyw::end) >>
(make_let(body, vals))
)
}
fn drain_all<T>(vec: &mut Vec<T>) -> Drain<T> {
let range = 0 .. vec.len();
vec.drain(range)
}
}
pub mod ty {
use super::*;
type A<'t> = &'t Arena<Ty<'t>>;
type I<'s> = &'s str;
type O<'t> = &'t Ty<'t>;
pub fn level_1<'s, 't>(i: I<'s>, a: A<'t>) -> IResult<I<'s>, O<'t>> {
do_parse!(i,
left: call!(level_2, a) >>
right: opt!(do_parse!(
call!(lex::punc::arrow) >>
right: call!(level_1, a) >>
(right)
)) >>
(right.iter().fold(left, |acc, ty| a.alloc(Ty::Func(acc, ty))))
)
}
pub fn level_2<'s, 't>(i: I<'s>, a: A<'t>) -> IResult<I<'s>, O<'t>> {<|fim▁hole|> do_parse!(
call!(lex::punc::left_paren) >>
ty: call!(level_1, a) >>
call!(lex::punc::right_paren) >>
(ty)
) |
call!(var, a) |
call!(forall, a)
)
}
pub fn var<'s, 't>(i: I<'s>, a: A<'t>) -> IResult<I<'s>, O<'t>> {
map!(i, call!(lex::ident), |x| &*a.alloc(Ty::Var(x)))
}
pub fn forall<'s, 't>(i: I<'s>, a: A<'t>) -> IResult<I<'s>, O<'t>> {
do_parse!(i,
call!(lex::keyw::forall) >>
var: call!(lex::ident) >>
call!(lex::punc::comma) >>
inner: call!(level_1, a) >>
(a.alloc(Ty::Forall(var, inner)))
)
}
}
#[cfg(test)]
mod test {
use super::*;
mod expr_test {
use super::*;
#[test]
fn test_bool() {
let ta = Arena::new();
let ea = Arena::new();
let r = expr::level_1("false", (&ea, &ta));
println!("{:?}", r);
}
#[test]
fn test_var() {
let ta = Arena::new();
let ea = Arena::new();
let r = expr::level_1("fantastic", (&ea, &ta));
println!("{:?}", r);
}
#[test]
fn test_abs() {
let ta = Arena::new();
let ea = Arena::new();
let r = expr::level_1("fun a -> a end", (&ea, &ta));
println!("{:?}", r);
}
#[test]
fn test_app() {
let ta = Arena::new();
let ea = Arena::new();
let r = expr::level_1("foo bar (baz qux)", (&ea, &ta));
println!("{:?}", r);
}
#[test]
fn test_let() {
let ta = Arena::new();
let ea = Arena::new();
{
let r = expr::level_1("let val x = y in z end", (&ea, &ta));
println!("{:?}", r);
}
{
let r = expr::level_1("let val v = w val x = y in z end", (&ea, &ta));
println!("{:?}", r);
}
}
}
}<|fim▁end|> | alt!(i, |
<|file_name|>monitor.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Watch a running build job and output changes to the screen.
"""
import fcntl
import os
import select
import socket
import sys
import tempfile
import termios
import time
import traceback
from rmake import errors
from rmake.build import buildjob, buildtrove
from rmake.cmdline import query
def _getUri(client):
if not isinstance(client.uri, str) or client.uri.startswith('unix://'):
fd, tmpPath = tempfile.mkstemp()
os.close(fd)
uri = 'unix://' + tmpPath
else:
host = socket.gethostname()
uri = 'http://%s' % host
tmpPath = None
return uri, tmpPath
def monitorJob(client, jobId, showTroveDetails=False, showBuildLogs=False,
exitOnFinish=None, uri=None, serve=True, out=None,
displayClass=None):
if not uri:
uri, tmpPath = _getUri(client)
else:
tmpPath = None
if not displayClass:
displayClass = JobLogDisplay
try:
display = displayClass(client, showBuildLogs=showBuildLogs, out=out,
exitOnFinish=exitOnFinish)
client = client.listenToEvents(uri, jobId, display,
showTroveDetails=showTroveDetails,
serve=serve)
return client
finally:
if serve and tmpPath:
os.remove(tmpPath)
def waitForJob(client, jobId, uri=None, serve=True):
if not uri:
uri, tmpPath = _getUri(client)
else:
tmpPath = None
try:
display = SilentDisplay(client)
display._primeOutput(jobId)
return client.listenToEvents(uri, jobId, display, serve=serve)
finally:
if tmpPath:
os.remove(tmpPath)
class _AbstractDisplay(object):#xmlrpc.BasicXMLRPCStatusSubscriber):
def __init__(self, client, showBuildLogs=True, out=None,
exitOnFinish=True):
self.client = client
self.finished = False
self.exitOnFinish = True # override exitOnFinish setting
self.showBuildLogs = showBuildLogs
if not out:
out = sys.stdout
self.out = out
def close(self):
pass
def _serveLoopHook(self):
pass
def _msg(self, msg, *args):
self.out.write('[%s] %s\n' % (time.strftime('%X'), msg))
self.out.flush()
def _jobStateUpdated(self, jobId, state, status):
isFinished = (state in (buildjob.JOB_STATE_FAILED,
buildjob.JOB_STATE_BUILT))
if isFinished:
self._setFinished()
def _setFinished(self):
self.finished = True
def _isFinished(self):
return self.finished
def _shouldExit(self):
return self._isFinished() and self.exitOnFinish
def _primeOutput(self, jobId):
job = self.client.getJob(jobId, withTroves=False)
if job.isFinished():
self._setFinished()
class SilentDisplay(_AbstractDisplay):
pass
class JobLogDisplay(_AbstractDisplay):
def __init__(self, client, showBuildLogs=True, out=None,
exitOnFinish=None):
_AbstractDisplay.__init__(self, client, out=out,
showBuildLogs=showBuildLogs,
exitOnFinish=exitOnFinish)
self.buildingTroves = {}
def _tailBuildLog(self, jobId, troveTuple):
mark = self.buildingTroves.get((jobId, troveTuple), [0])[0]
self.buildingTroves[jobId, troveTuple] = [mark, True]
self.out.write('Tailing %s build log:\n\n' % troveTuple[0])
def _stopTailing(self, jobId, troveTuple):
mark = self.buildingTroves.get((jobId, troveTuple), [0])[0]
self.buildingTroves[jobId, troveTuple] = [ mark, False ]
def _serveLoopHook(self):
if not self.buildingTroves:
return
for (jobId, troveTuple), (mark, tail) in self.buildingTroves.items():
if not tail:
continue
try:
moreData, data, mark = self.client.getTroveBuildLog(jobId,
troveTuple,
mark)
except:
moreData = True
data = ''
self.out.write(data)
if not moreData:
del self.buildingTroves[jobId, troveTuple]
else:
self.buildingTroves[jobId, troveTuple][0] = mark
def _jobTrovesSet(self, jobId, troveData):
self._msg('[%d] - job troves set' % jobId)
def _jobStateUpdated(self, jobId, state, status):
_AbstractDisplay._jobStateUpdated(self, jobId, state, status)
state = buildjob.stateNames[state]
if self._isFinished():
self._serveLoopHook()
self._msg('[%d] - State: %s' % (jobId, state))
if status:
self._msg('[%d] - %s' % (jobId, status))
def _jobLogUpdated(self, jobId, state, status):
self._msg('[%d] %s' % (jobId, status))
def _troveStateUpdated(self, (jobId, troveTuple), state, status):
isBuilding = (state in (buildtrove.TroveState.BUILDING,
buildtrove.TroveState.RESOLVING))
state = buildtrove.stateNames[state]
self._msg('[%d] - %s - State: %s' % (jobId, troveTuple[0], state))
if status:
self._msg('[%d] - %s - %s' % (jobId, troveTuple[0], status))
if isBuilding and self.showBuildLogs:
self._tailBuildLog(jobId, troveTuple)
else:
self._stopTailing(jobId, troveTuple)
def _troveLogUpdated(self, (jobId, troveTuple), state, status):
state = buildtrove.stateNames[state]
self._msg('[%d] - %s - %s' % (jobId, troveTuple[0], status))
def _trovePreparingChroot(self, (jobId, troveTuple), host, path):
if host == '_local_':
msg = 'Chroot at %s' % path
else:
msg = 'Chroot at Node %s:%s' % (host, path)
self._msg('[%d] - %s - %s' % (jobId, troveTuple[0], msg))
def _primeOutput(self, jobId):
logMark = 0
while True:
newLogs = self.client.getJobLogs(jobId, logMark)
if not newLogs:
break
logMark += len(newLogs)
for (timeStamp, message, args) in newLogs:
print '[%s] [%s] - %s' % (timeStamp, jobId, message)
BUILDING = buildtrove.TroveState.BUILDING
troveTups = self.client.listTrovesByState(jobId, BUILDING).get(BUILDING, [])
for troveTuple in troveTups:
self._tailBuildLog(jobId, troveTuple)
_AbstractDisplay._primeOutput(self, jobId)
def set_raw_mode():
fd = sys.stdin.fileno()
oldTerm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldFlags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldFlags | os.O_NONBLOCK)
return oldTerm, oldFlags
def restore_terminal(oldTerm, oldFlags):
fd = sys.stdin.fileno()
if oldTerm:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldTerm)
if oldFlags:
fcntl.fcntl(fd, fcntl.F_SETFL, oldFlags)
class _AbstractDisplay(object):#xmlrpc.BasicXMLRPCStatusSubscriber):
def __init__(self, client, showBuildLogs=True, out=None):
self.client = client
self.finished = False
self.showBuildLogs = showBuildLogs
self.troveStates = {}
self.troveIndex = None
self.troveDislay = False
self.out = OutBuffer(out)
def close(self):
pass
def _msg(self, msg, *args):
self.out.write('\r[%s] %s\n' % (time.strftime('%X'), msg))
self.out.write('(h for help)>')
self.out.flush()
def _jobStateUpdated(self, jobId, state, status):
isFinished = (state in (buildjob.JOB_STATE_FAILED,
buildjob.JOB_STATE_BUILT))
if isFinished:
self._setFinished()
def _setFinished(self):
self.finished = True
def _isFinished(self):
return self.finished
def _shouldExit(self):
return self._isFinished() and self.exitOnFinish
def _primeOutput(self, jobId):
job = self.client.getJob(jobId, withTroves=False)
if job.isFinished():
self._setFinished()
def _dispatch(self, methodname, (callData, responseHandler, args)):
if methodname.startswith('_'):
raise NoSuchMethodError(methodname)
else:
responseHandler.sendResponse('')
getattr(self, methodname)(*args)
class SilentDisplay(_AbstractDisplay):
def _updateBuildLog(self):
pass
class JobLogDisplay(_AbstractDisplay):
def __init__(self, client, state, out=None):
_AbstractDisplay.__init__(self, client, out)
self.troveToWatch = None
self.watchTroves = False
self.buildingTroves = {}
self.state = state
self.lastLen = 0
self.promptFormat = '%(jobId)s %(name)s%(context)s - %(state)s - (%(tailing)s) ([h]elp)>'
self.updatePrompt()
def close(self):
self.out.write('\n')
self.out.flush()
def _msg(self, msg, *args):
self.erasePrompt()
self.out.write('[%s] %s\n' % (time.strftime('%X'), msg))
self.writePrompt()
def updatePrompt(self):
if self.troveToWatch:
if self.troveToWatch not in self.state.troves:
self.troveToWatch = self.state.troves[0]
state = self.state.getTroveState(*self.troveToWatch)
state = buildtrove.stateNames[state]
name = self.troveToWatch[1][0].split(':', 1)[0] # remove :source
context = self.troveToWatch[1][3]
d = dict(jobId=self.troveToWatch[0], name=name, state=state,
context=(context and '{%s}' % context or ''))
else:
d = dict(jobId='(None)', name='(None)', state='', context='')
if not self.state.jobActive():
tailing = 'Job %s' % self.state.getJobStateName()
elif self.watchTroves:
tailing = 'Details on'
else:
tailing = 'Details off'
d['tailing'] = tailing
self.prompt = self.promptFormat % d
self.erasePrompt()
self.writePrompt()
def erasePrompt(self):
self.out.write('\r%s\r' % (' '*self.lastLen))
def writePrompt(self):
self.out.write(self.prompt)
self.lastLen = len(self.prompt)
self.out.flush()
def setWatchTroves(self, watchTroves=True):
self.watchTroves = watchTroves
self.updatePrompt()
def getWatchTroves(self):
return self.watchTroves
def setTroveToWatch(self, jobId, troveTuple):
self.troveToWatch = jobId, troveTuple
self.updatePrompt()
def _watchTrove(self, jobId, troveTuple):
if not self.watchTroves:
return False
return self.troveToWatch == (jobId, troveTuple)
def displayTroveStates(self):
if not self.troveToWatch:
return
self.erasePrompt()
job = self.client.getJob(self.troveToWatch[0])
query.displayTrovesByState(job, out=self.out)
self.writePrompt()
def setPrompt(self, promptFormat):
self.promptFormat = promptFormat
self.updatePrompt()
def updateBuildLog(self, jobId, troveTuple):
if not self._watchTrove(jobId, troveTuple):
return
mark = self.getMark(jobId, troveTuple)
if mark is None:
return
try:
moreData, data, mark = self.client.getTroveBuildLog(jobId,
troveTuple,
mark)
except:
return
if data and data != '\n':
self.erasePrompt()
if data[0] == '\n':
# we've already got a \n because we've cleared
# the prompt.
data = data[1:]
self.out.write(data)
if data[-1] != '\n':
self.out.write('\n')
self.writePrompt()
if not moreData:
mark = None
self.setMark(jobId, troveTuple, mark)
def getMark(self, jobId, troveTuple):
if (jobId, troveTuple) not in self.buildingTroves:
# display max 80 lines of back log
self.buildingTroves[jobId, troveTuple] = -80
return self.buildingTroves[jobId, troveTuple]
def setMark(self, jobId, troveTuple, mark):
self.buildingTroves[jobId, troveTuple] = mark
def _jobTrovesSet(self, jobId, troveList):
self._msg('[%d] - job troves set' % jobId)
self.troveToWatch = jobId, troveList[0]
self.updatePrompt()
def _jobStateUpdated(self, jobId, state, status):
_AbstractDisplay._jobStateUpdated(self, jobId, state, status)
state = buildjob.stateNames[state]
if self._isFinished() and self.troveToWatch:
self.updateBuildLog(*self.troveToWatch)
self._msg('[%d] - State: %s' % (jobId, state))
if status:
self._msg('[%d] - %s' % (jobId, status))
self.updatePrompt()
def _jobLogUpdated(self, jobId, state, status):
self._msg('[%d] %s' % (jobId, status))
def _troveStateUpdated(self, (jobId, troveTuple), state, status):
isBuilding = (state == buildtrove.TroveState.BUILDING)
state = buildtrove.stateNames[state]
if troveTuple[3]:
name = '%s{%s}' % (troveTuple[0], troveTuple[3])
else:
name = troveTuple[0]
self._msg('[%d] - %s - State: %s' % (jobId, name, state))
if status and self._watchTrove(jobId, troveTuple):
self._msg('[%d] - %s - %s' % (jobId, name, status))
self.updatePrompt()
def _troveLogUpdated(self, (jobId, troveTuple), state, status):
if self._watchTrove(jobId, troveTuple):
state = buildtrove.stateNames[state]
self._msg('[%d] - %s - %s' % (jobId, troveTuple[0], status))
def _trovePreparingChroot(self, (jobId, troveTuple), host, path):
if not self._watchTrove(jobId, troveTuple):
return
if host == '_local_':
msg = 'Chroot at %s' % path
else:
msg = 'Chroot at Node %s:%s' % (host, path)
self._msg('[%d] - %s - %s' % (jobId, troveTuple[0], msg))
class OutBuffer(object):
def __init__(self, fd):
if fd is None:
fd = sys.stdout.fileno()
elif not isinstance(out, int):
fd = out.fileno()
self.fd = fd
self.data = []
def write(self, data):
self.data.append(data)
def fileno(self):
return self.fd
def flush(self):
while self.data:
self.check()
def check(self):
while self.data:
ready = select.select([], [self.fd], [], 0.1)[1]
if not ready:
return
rc = os.write(self.fd, self.data[0])
if rc < len(self.data[0]):
self.data[0] = self.data[0][rc:]
else:
self.data.pop(0)
class DisplayState(object):#xmlrpc.BasicXMLRPCStatusSubscriber):
def __init__(self, client):
self.troves = []
self.states = {}
self.buildingTroves = {}
self.jobId = None
self.client = client
self.jobState = None
def _primeOutput(self, jobId):
#assert(not self.jobId)
self.jobId = jobId
job = self.client.getJob(jobId, withTroves=False)
self.jobState = job.state
if job.isBuilding() or job.isFinished() or job.isFailed():
self.updateTrovesForJob(jobId)
def jobActive(self):
return self.jobState in (
buildjob.JOB_STATE_STARTED,
buildjob.JOB_STATE_LOADING,
buildjob.JOB_STATE_LOADED,
buildjob.JOB_STATE_BUILD,
)
def getJobStateName(self):
if self.jobState is None:
return 'None'
return buildjob.stateNames[self.jobState]
def isFailed(self, jobId, troveTuple):
return (self.getTroveState(jobId, troveTuple)
== buildtrove.TroveState.FAILED)
def isBuilding(self, jobId, troveTuple):
return self.getTroveState(jobId, troveTuple) in (
buildtrove.TroveState.BUILDING,
buildtrove.TroveState.PREPARING,
buildtrove.TroveState.RESOLVING)
def isFailed(self, jobId, troveTuple):
# don't iterate through unbuildable - they are failures due to
# secondary causes.
return self.getTroveState(jobId, troveTuple) in (
buildtrove.TroveState.FAILED,)
def findTroveByName(self, troveName):
startsWith = None
for jobId, troveTuple in sorted(self.states):
if troveTuple[0].split(':', 1)[0] == troveName:
# exact matches take priority
return (jobId, troveTuple)
elif troveTuple[0].startswith(troveName) and startsWith is None:
startsWith = (jobId, troveTuple)
return startsWith
def getTroveState(self, jobId, troveTuple):
return self.states[jobId, troveTuple]
def getBuildingTroves(self):
return [ x[0] for x in self.states.iteritems()
if x[1] in (buildtrove.TroveState.BUILDING,
buildtrove.TroveState.RESOLVING) ]
def updateTrovesForJob(self, jobId):
self.troves = []
self.states = {}
for state, troveTupleList in self.client.listTrovesByState(jobId).items():
for troveTuple in troveTupleList:
self.troves.append((jobId, troveTuple))
self.states[jobId, troveTuple] = state
self.troves.sort()
def _troveStateUpdated(self, (jobId, troveTuple), state, status):
if (jobId, troveTuple) not in self.states:<|fim▁hole|>
def _jobStateUpdated(self, jobId, state, status):
self.jobState = state
if self._isBuilding():
self.updateTrovesForJob(jobId)
def _jobTrovesSet(self, jobId, troveList):
self.updateTrovesForJob(jobId)
def _isBuilding(self):
return self.jobState in (buildjob.JOB_STATE_BUILD,
buildjob.JOB_STATE_STARTED)
def _isFinished(self):
return self.jobState in (
buildjob.JOB_STATE_FAILED, buildjob.JOB_STATE_BUILT)
class DisplayManager(object):#xmlrpc.BasicXMLRPCStatusSubscriber):
displayClass = JobLogDisplay
stateClass = DisplayState
def __init__(self, client, showBuildLogs, out=None, exitOnFinish=None):
self.termInfo = set_raw_mode()
if out is None:
out = open('/dev/tty', 'w')
self.state = self.stateClass(client)
self.display = self.displayClass(client, self.state, out)
self.client = client
self.troveToWatch = None
self.troveIndex = 0
self.showBuildLogs = showBuildLogs
if exitOnFinish is None:
exitOnFinish = False
self.exitOnFinish = exitOnFinish
def _receiveEvents(self, *args, **kw):
methodname = '_receiveEvents'
method = getattr(self.state, methodname, None)
if method:
try:
method(*args)
except errors.uncatchableExceptions:
raise
except Exception, err:
print 'Error in handler: %s\n%s' % (err,
traceback.format_exc())
method = getattr(self.display, methodname, None)
if method:
try:
method(*args)
except errors.uncatchableExceptions:
raise
except Exception, err:
print 'Error in handler: %s\n%s' % (err,
traceback.format_exc())
return ''
def getCurrentTrove(self):
if self.state.troves:
return self.state.troves[self.troveIndex]
else:
return None
def _primeOutput(self, jobId):
self.state._primeOutput(jobId)
self.display._msg('Watching job %s' % jobId)
if self.getCurrentTrove():
self.displayTrove(*self.getCurrentTrove())
def displayTrove(self, jobId, troveTuple):
self.display.setTroveToWatch(jobId, troveTuple)
state = self.state.getTroveState(jobId, troveTuple)
state = buildtrove.stateNames[state]
def _serveLoopHook(self):
ready = select.select([sys.stdin], [], [], 0.1)[0]
if ready:
cmd = sys.stdin.read(1)
if cmd == '\x1b':
cmd += sys.stdin.read(2)
if cmd == ' ':
self.do_switch_log()
elif cmd == 'n' or cmd == '\x1b[C':
self.do_next()
elif cmd == 'p' or cmd == '\x1b[D':
self.do_prev()
elif cmd == 'q':
sys.exit(0)
elif cmd == 'h':
self.do_help()
elif cmd == 'b':
self.do_next_building()
elif cmd == 'f':
self.do_next_failed()
elif cmd == 'i':
self.do_info()
elif cmd == 'l':
self.do_log()
elif cmd == 's':
self.do_status()
elif cmd == 'g':
self.do_goto()
if self.showBuildLogs:
for jobId, troveTuple in self.state.getBuildingTroves():
self.display.updateBuildLog(jobId, troveTuple)
def do_next(self):
if not self.state.troves:
return
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
if self.getCurrentTrove():
self.displayTrove(*self.getCurrentTrove())
def do_next_building(self):
if not self.state.troves:
return
startIndex = self.troveIndex
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
while (not self.state.isBuilding(*self.getCurrentTrove())
and self.troveIndex != startIndex):
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
if self.troveIndex != startIndex:
self.displayTrove(*self.getCurrentTrove())
def do_goto(self):
if not self.state.troves:
print 'No troves loaded yet'
return
self.display.erasePrompt()
restore_terminal(*self.termInfo)
try:
troveName = raw_input("\nName or part of name of trove: ")
troveInfo = self.state.findTroveByName(troveName)
if not troveInfo:
print 'No trove starting with "%s"' % troveName
self.display.writePrompt()
return
while not self.getCurrentTrove() == troveInfo:
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
self.displayTrove(*self.getCurrentTrove())
finally:
self.termInfo = set_raw_mode()
def do_next_failed(self):
if not self.state.troves:
return
startIndex = self.troveIndex
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
while (not self.state.isFailed(*self.getCurrentTrove())
and self.troveIndex != startIndex):
self.troveIndex = (self.troveIndex + 1) % len(self.state.troves)
if self.troveIndex != startIndex:
self.displayTrove(*self.getCurrentTrove())
def do_prev(self):
if not self.state.troves:
return
self.troveIndex = (self.troveIndex - 1) % len(self.state.troves)
if self.getCurrentTrove():
self.displayTrove(*self.getCurrentTrove())
def do_info(self):
if not self.getCurrentTrove():
return
jobId, troveTuple = self.getCurrentTrove()
job = self.client.getJob(jobId)
trove = job.getTrove(*troveTuple)
dcfg = query.DisplayConfig(self.client, showTracebacks=True)
self.display.setWatchTroves(False)
self.display.erasePrompt()
query.displayTroveDetail(dcfg, job, trove, out=self.display.out)
self.display.writePrompt()
def do_log(self):
if not self.getCurrentTrove():
return
jobId, troveTuple = self.getCurrentTrove()
job = self.client.getJob(jobId)
trove = job.getTrove(*troveTuple)
moreData, data, mark = self.client.getTroveBuildLog(jobId,
troveTuple, 0)
if not data:
self.display._msg('No log yet.')
return
fd, path = tempfile.mkstemp()
os.fdopen(fd, 'w').write(data)
try:
os.system('less %s' % path)
finally:
os.remove(path)
def do_help(self):
print
print "<space>: Turn on/off tailing of log"
print "<left>/<right>: move to next/prev trove in list"
print "b: move to next building trove"
print "f: move to next failed trove"
print "g: go to a particular trove"
print "h: print help"
print "i: display info for this trove"
print "l: display log for this trove in less"
print "q: quit"
print "s: display status on all troves"
def do_status(self):
self.display.setWatchTroves(False)
self.display.displayTroveStates()
def do_switch_log(self):
self.display.setWatchTroves(not self.display.getWatchTroves())
def _isFinished(self):
return self.display._isFinished()
def _shouldExit(self):
return self._isFinished() and self.exitOnFinish
def close(self):
self.display.close()
restore_terminal(*self.termInfo)<|fim▁end|> | self.updateTrovesForJob(jobId)
else:
self.states[jobId, troveTuple] = state |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Érudit.org documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 14 17:16:39 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../eruditorg'))
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'base.settings.base')
import django
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Érudit.org'
copyright = '2016 Érudit'
author = 'David Cormier'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'fr'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
<|fim▁hole|>
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ruditorgdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ruditorg.tex', 'Érudit.org Documentation',
'Érudit', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ruditorg', 'Érudit.org Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ruditorg', 'Érudit.org Documentation',
author, 'ruditorg', 'One line description of project.',
'Miscellaneous'),
]
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/stable/', None),
'django': ('http://docs.djangoproject.com/en/1.8/', 'https://docs.djangoproject.com/en/1.8/_objects/'),
}
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | # If false, no index is generated.
#html_use_index = True |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
//
// Copyright (c) 2014 Jeremy Letang
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#![crate_name = "verdigris"]
#![desc = "Multi plateform opengl windowing for Rust"]
#![license = "MIT"]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![allow(dead_code, non_camel_case_types, missing_doc)]
#![feature(struct_variant)]
#![feature(globs)]
#![unstable]
extern crate libc;
pub use self::window::Window;
pub use self::video_mode::VideoMode;
pub use self::window_builder::WindowBuilder;
pub use self::context_settings::ContextSettings;
#[cfg(target_os = "macos")]
#[path = "mac_os/mod.rs"]
mod imp;<|fim▁hole|>
#[cfg(target_os = "linux")]
#[path = "linux/mod.rs"]
mod imp;
mod native;
mod window;
mod video_mode;
mod window_builder;
pub mod context_settings;
pub mod window_style;
pub mod event;
pub mod inputs;
pub mod gl;<|fim▁end|> |
#[cfg(target_os = "wind32")]
#[path = "windows/mod.rs"]
mod imp; |
<|file_name|>reflection.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2004-2005 The Dojo Foundation, Licensed under the Academic Free License version 2.1 or above */dojo.provide("dojo.reflect");
/*****************************************************************
reflect.js
v.1.5.0
(c) 2003-2004 Thomas R. Trenka, Ph.D.
Derived from the reflection functions of f(m).
http://dojotoolkit.org
http://fm.dept-z.com
There is a dependency on the variable dJ_global, which
should always refer to the global object.
******************************************************************/
if(!dj_global){ var dj_global = this; }
dojo.reflect = {} ;
dojo.reflect.$unknownType = function(){ } ;
dojo.reflect.ParameterInfo = function(name, type){
this.name = name ;
this.type = (type) ? type : dojo.reflect.$unknownType ;
} ;
dojo.reflect.PropertyInfo = function(name, type) {
this.name = name ;
this.type = (type) ? type : dojo.reflect.$unknownType ;
} ;
dojo.reflect.MethodInfo = function(name, fn){
var parse = function(f) {
var o = {} ;
var s = f.toString() ;
var param = ((s.substring(s.indexOf('(')+1, s.indexOf(')'))).replace(/\s+/g, "")).split(",") ;
o.parameters = [] ;
for (var i = 0; i < param.length; i++) {
o.parameters.push(new dojo.reflect.ParameterInfo(param[i])) ;
}
o.body = (s.substring(s.indexOf('{')+1, s.lastIndexOf('}'))).replace(/(^\s*)|(\s*$)/g, "") ;
return o ;
} ;
var tmp = parse(fn) ;
var p = tmp.parameters ;
var body = tmp.body ;
this.name = (name) ? name : "anonymous" ;
this.getParameters = function(){ return p ; } ;
this.getNullArgumentsObject = function() {
var a = [] ;
for (var i = 0; i < p.length; i++){
a.push(null);
}
return a ;
} ;
this.getBody = function() { return body ; } ;
this.type = Function ;
this.invoke = function(src, args){ return fn.apply(src, args) ; } ;
} ;
// Static object that can activate instances of the passed type.
dojo.reflect.Activator = new (function(){
this.createInstance = function(type, args) {
switch (typeof(type)) {
case "function" : {
var o = {} ;
type.apply(o, args) ;
return o ;
} ;
case "string" : {
var o = {} ;
(dojo.reflect.Reflector.getTypeFromString(type)).apply(o, args) ;
return o ;
} ;
}
throw new Error("dojo.reflect.Activator.createInstance(): no such type exists.");
}
})() ;
dojo.reflect.Reflector = new (function(){
this.getTypeFromString = function(s) {
var parts = s.split("."), i = 0, obj = dj_global ;
do { obj = obj[parts[i++]] ; } while (i < parts.length && obj) ;
return (obj != dj_global) ? obj : null ;
};
this.typeExists = function(s) {
var parts = s.split("."), i = 0, obj = dj_global ;
do { obj = obj[parts[i++]] ; } while (i < parts.length && obj) ;
return (obj && obj != dj_global) ;
};
this.getFieldsFromType = function(s) {
var type = s ;
if (typeof(s) == "string") {
type = this.getTypeFromString(s) ;
}
var nullArgs = (new dojo.reflect.MethodInfo(type)).getNullArgumentsObject() ;
return this.getFields(dojo.reflect.Activator.createInstance(s, nullArgs)) ;
};
this.getPropertiesFromType = function(s) {
var type = s ;
if (typeof(s) == "string") {
type = this.getTypeFromString(s);
}
var nullArgs = (new dojo.reflect.MethodInfo(type)).getNullArgumentsObject() ;
return this.getProperties(dojo.reflect.Activator.createInstance(s, nullArgs)) ;
};
this.getMethodsFromType = function(s) {
var type = s ;
if (typeof(s) == "string") {
type = this.getTypeFromString(s) ;
}
var nullArgs = (new dojo.reflect.MethodInfo(type)).getNullArgumentsObject() ;
return this.getMethods(dojo.reflect.Activator.createInstance(s, nullArgs)) ;
};
this.getType = function(o) { return o.constructor ; } ;
this.getFields = function(obj) {
var arr = [] ;
for (var p in obj) {
if(this.getType(obj[p]) != Function){
arr.push(new dojo.reflect.PropertyInfo(p, this.getType(obj[p]))) ;
}else{
arr.push(new dojo.reflect.MethodInfo(p, obj[p]));
}
}
return arr ;
};
this.getProperties = function(obj) {
var arr = [] ;
var fi = this.getFields(obj) ;
for (var i = 0; i < fi.length; i++){
if (this.isInstanceOf(fi[i], dojo.reflect.PropertyInfo)){
arr.push(fi[i]) ;
}
}
return arr ;
};
this.getMethods = function(obj) {
var arr = [] ;
var fi = this.getFields(obj) ;
for (var i = 0; i < fi.length; i++){
if (this.isInstanceOf(fi[i], dojo.reflect.MethodInfo)){
arr.push(fi[i]) ;
}
}
return arr ;
};
/*
this.implements = function(o, type) {
if (this.isSubTypeOf(o, type)) return false ;
var f = this.getFieldsFromType(type) ;
for (var i = 0; i < f.length; i++) {
if (typeof(o[(f[i].name)]) == "undefined"){
return false;
}
}
return true ;
};
*/
this.getBaseClass = function(o) {
if (o.getType().prototype.prototype.constructor){
return (o.getType()).prototype.prototype.constructor ;
}
return Object ;
} ;
this.isInstanceOf = function(o, type) {
return (this.getType(o) == type) ;
};
this.isSubTypeOf = function(o, type) {
return (o instanceof type) ;
};
<|fim▁hole|>})();
// back-compat
dojo.provide("dojo.reflect.reflection");<|fim▁end|> | this.isBaseTypeOf = function(o, type) {
return (type instanceof o);
};
|
<|file_name|>system.rs<|end_file_name|><|fim▁begin|>use super::world::World;
use super::event::{EventQueue, EventEmitter};
<|fim▁hole|> /// This method is called each frame, giving the `System` access to the `World`, `EventQueue`,
/// and `EventEmitter`. `dt` is the time in milliseconds since the last update.
fn update(&mut self, world: &mut World, queue: &EventQueue, emitter: &mut EventEmitter, dt: f32);
}<|fim▁end|> | /// Trait that must be implemented by all systems in the `Simulation`.
pub trait System { |
<|file_name|>performance.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::PerformanceBinding;
use dom::bindings::codegen::Bindings::PerformanceBinding::{DOMHighResTimeStamp, PerformanceMethods};
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceEntryList as DOMPerformanceEntryList;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::bindings::root::{Dom, DomRoot};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom::performanceentry::PerformanceEntry;
use dom::performancemark::PerformanceMark;
use dom::performancemeasure::PerformanceMeasure;
use dom::performanceobserver::PerformanceObserver as DOMPerformanceObserver;
use dom::performancetiming::PerformanceTiming;
use dom::window::Window;
use dom_struct::dom_struct;
use metrics::ToMs;
use std::cell::Cell;
use std::cmp::Ordering;
use time;
const INVALID_ENTRY_NAMES: &'static [&'static str] = &[
"navigationStart",
"unloadEventStart",
"unloadEventEnd",
"redirectStart",
"redirectEnd",
"fetchStart",
"domainLookupStart",
"domainLookupEnd",
"connectStart",
"connectEnd",
"secureConnectionStart",
"requestStart",
"responseStart",
"responseEnd",
"domLoading",
"domInteractive",
"domContentLoadedEventStart",
"domContentLoadedEventEnd",
"domComplete",
"loadEventStart",
"loadEventEnd",
];
/// Implementation of a list of PerformanceEntry items shared by the
/// Performance and PerformanceObserverEntryList interfaces implementations.
#[derive(JSTraceable, MallocSizeOf)]
pub struct PerformanceEntryList {
entries: DOMPerformanceEntryList,
}
impl PerformanceEntryList {
pub fn new(entries: DOMPerformanceEntryList) -> Self {
PerformanceEntryList { entries }
}
pub fn get_entries_by_name_and_type(
&self,
name: Option<DOMString>,
entry_type: Option<DOMString>,
) -> Vec<DomRoot<PerformanceEntry>> {
let mut res = self
.entries
.iter()
.filter(|e| {
name.as_ref().map_or(true, |name_| *e.name() == *name_) && entry_type
.as_ref()
.map_or(true, |type_| *e.entry_type() == *type_)
}).map(|e| e.clone())
.collect::<Vec<DomRoot<PerformanceEntry>>>();<|fim▁hole|> .unwrap_or(Ordering::Equal)
});
res
}
pub fn clear_entries_by_name_and_type(
&mut self,
name: Option<DOMString>,
entry_type: Option<DOMString>,
) {
self.entries.retain(|e| {
name.as_ref().map_or(true, |name_| *e.name() == *name_) && entry_type
.as_ref()
.map_or(true, |type_| *e.entry_type() == *type_)
});
}
fn get_last_entry_start_time_with_name_and_type(
&self,
name: DOMString,
entry_type: DOMString,
) -> f64 {
match self
.entries
.iter()
.rev()
.find(|e| *e.entry_type() == *entry_type && *e.name() == *name)
{
Some(entry) => entry.start_time(),
None => 0.,
}
}
}
impl IntoIterator for PerformanceEntryList {
type Item = DomRoot<PerformanceEntry>;
type IntoIter = ::std::vec::IntoIter<DomRoot<PerformanceEntry>>;
fn into_iter(self) -> Self::IntoIter {
self.entries.into_iter()
}
}
#[derive(JSTraceable, MallocSizeOf)]
struct PerformanceObserver {
observer: DomRoot<DOMPerformanceObserver>,
entry_types: Vec<DOMString>,
}
#[dom_struct]
pub struct Performance {
reflector_: Reflector,
timing: Option<Dom<PerformanceTiming>>,
entries: DomRefCell<PerformanceEntryList>,
observers: DomRefCell<Vec<PerformanceObserver>>,
pending_notification_observers_task: Cell<bool>,
navigation_start_precise: u64,
}
impl Performance {
fn new_inherited(
global: &GlobalScope,
navigation_start: u64,
navigation_start_precise: u64,
) -> Performance {
Performance {
reflector_: Reflector::new(),
timing: if global.is::<Window>() {
Some(Dom::from_ref(&*PerformanceTiming::new(
global.as_window(),
navigation_start,
navigation_start_precise,
)))
} else {
None
},
entries: DomRefCell::new(PerformanceEntryList::new(Vec::new())),
observers: DomRefCell::new(Vec::new()),
pending_notification_observers_task: Cell::new(false),
navigation_start_precise,
}
}
pub fn new(
global: &GlobalScope,
navigation_start: u64,
navigation_start_precise: u64,
) -> DomRoot<Performance> {
reflect_dom_object(
Box::new(Performance::new_inherited(
global,
navigation_start,
navigation_start_precise,
)),
global,
PerformanceBinding::Wrap,
)
}
/// Add a PerformanceObserver to the list of observers with a set of
/// observed entry types.
pub fn add_observer(
&self,
observer: &DOMPerformanceObserver,
entry_types: Vec<DOMString>,
buffered: bool,
) {
if buffered {
let entries = self.entries.borrow();
let mut new_entries = entry_types
.iter()
.flat_map(|e| entries.get_entries_by_name_and_type(None, Some(e.clone())))
.collect::<DOMPerformanceEntryList>();
let mut obs_entries = observer.entries();
obs_entries.append(&mut new_entries);
observer.set_entries(obs_entries);
}
let mut observers = self.observers.borrow_mut();
match observers.iter().position(|o| *o.observer == *observer) {
// If the observer is already in the list, we only update the observed
// entry types.
Some(p) => observers[p].entry_types = entry_types,
// Otherwise, we create and insert the new PerformanceObserver.
None => observers.push(PerformanceObserver {
observer: DomRoot::from_ref(observer),
entry_types,
}),
};
}
/// Remove a PerformanceObserver from the list of observers.
pub fn remove_observer(&self, observer: &DOMPerformanceObserver) {
let mut observers = self.observers.borrow_mut();
let index = match observers.iter().position(|o| &(*o.observer) == observer) {
Some(p) => p,
None => return,
};
observers.remove(index);
}
/// Queue a notification for each performance observer interested in
/// this type of performance entry and queue a low priority task to
/// notify the observers if no other notification task is already queued.
///
/// Algorithm spec:
/// <https://w3c.github.io/performance-timeline/#queue-a-performanceentry>
pub fn queue_entry(&self, entry: &PerformanceEntry, add_to_performance_entries_buffer: bool) {
// Steps 1-3.
// Add the performance entry to the list of performance entries that have not
// been notified to each performance observer owner, filtering the ones it's
// interested in.
for o in self
.observers
.borrow()
.iter()
.filter(|o| o.entry_types.contains(entry.entry_type()))
{
o.observer.queue_entry(entry);
}
// Step 4.
// If the "add to performance entry buffer flag" is set, add the
// new entry to the buffer.
if add_to_performance_entries_buffer {
self.entries
.borrow_mut()
.entries
.push(DomRoot::from_ref(entry));
}
// Step 5.
// If there is already a queued notification task, we just bail out.
if self.pending_notification_observers_task.get() {
return;
}
// Step 6.
// Queue a new notification task.
self.pending_notification_observers_task.set(true);
let task_source = self.global().performance_timeline_task_source();
task_source.queue_notification(&self.global());
}
/// Observers notifications task.
///
/// Algorithm spec (step 7):
/// <https://w3c.github.io/performance-timeline/#queue-a-performanceentry>
pub fn notify_observers(&self) {
// Step 7.1.
self.pending_notification_observers_task.set(false);
// Step 7.2.
// We have to operate over a copy of the performance observers to avoid
// the risk of an observer's callback modifying the list of registered
// observers.
let observers: Vec<DomRoot<DOMPerformanceObserver>> = self
.observers
.borrow()
.iter()
.map(|o| {
DOMPerformanceObserver::new(
&self.global(),
o.observer.callback(),
o.observer.entries(),
)
}).collect();
// Step 7.3.
for o in observers.iter() {
o.notify();
}
}
fn now(&self) -> f64 {
let nav_start = match self.timing {
Some(ref timing) => timing.navigation_start_precise(),
None => self.navigation_start_precise,
};
(time::precise_time_ns() - nav_start).to_ms()
}
}
impl PerformanceMethods for Performance {
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html#performance-timing-attribute
fn Timing(&self) -> DomRoot<PerformanceTiming> {
match self.timing {
Some(ref timing) => DomRoot::from_ref(&*timing),
None => unreachable!("Are we trying to expose Performance.timing in workers?"),
}
}
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/HighResolutionTime/Overview.html#dom-performance-now
fn Now(&self) -> DOMHighResTimeStamp {
Finite::wrap(self.now())
}
// https://www.w3.org/TR/performance-timeline-2/#dom-performance-getentries
fn GetEntries(&self) -> Vec<DomRoot<PerformanceEntry>> {
self.entries
.borrow()
.get_entries_by_name_and_type(None, None)
}
// https://www.w3.org/TR/performance-timeline-2/#dom-performance-getentriesbytype
fn GetEntriesByType(&self, entry_type: DOMString) -> Vec<DomRoot<PerformanceEntry>> {
self.entries
.borrow()
.get_entries_by_name_and_type(None, Some(entry_type))
}
// https://www.w3.org/TR/performance-timeline-2/#dom-performance-getentriesbyname
fn GetEntriesByName(
&self,
name: DOMString,
entry_type: Option<DOMString>,
) -> Vec<DomRoot<PerformanceEntry>> {
self.entries
.borrow()
.get_entries_by_name_and_type(Some(name), entry_type)
}
// https://w3c.github.io/user-timing/#dom-performance-mark
fn Mark(&self, mark_name: DOMString) -> Fallible<()> {
let global = self.global();
// Step 1.
if global.is::<Window>() && INVALID_ENTRY_NAMES.contains(&mark_name.as_ref()) {
return Err(Error::Syntax);
}
// Steps 2 to 6.
let entry = PerformanceMark::new(&global, mark_name, self.now(), 0.);
// Steps 7 and 8.
self.queue_entry(
&entry.upcast::<PerformanceEntry>(),
true, /* buffer performance entry */
);
// Step 9.
Ok(())
}
// https://w3c.github.io/user-timing/#dom-performance-clearmarks
fn ClearMarks(&self, mark_name: Option<DOMString>) {
self.entries
.borrow_mut()
.clear_entries_by_name_and_type(mark_name, Some(DOMString::from("mark")));
}
// https://w3c.github.io/user-timing/#dom-performance-measure
fn Measure(
&self,
measure_name: DOMString,
start_mark: Option<DOMString>,
end_mark: Option<DOMString>,
) -> Fallible<()> {
// Steps 1 and 2.
let end_time = match end_mark {
Some(name) => self
.entries
.borrow()
.get_last_entry_start_time_with_name_and_type(DOMString::from("mark"), name),
None => self.now(),
};
// Step 3.
let start_time = match start_mark {
Some(name) => self
.entries
.borrow()
.get_last_entry_start_time_with_name_and_type(DOMString::from("mark"), name),
None => 0.,
};
// Steps 4 to 8.
let entry = PerformanceMeasure::new(
&self.global(),
measure_name,
start_time,
end_time - start_time,
);
// Step 9 and 10.
self.queue_entry(
&entry.upcast::<PerformanceEntry>(),
true, /* buffer performance entry */
);
// Step 11.
Ok(())
}
// https://w3c.github.io/user-timing/#dom-performance-clearmeasures
fn ClearMeasures(&self, measure_name: Option<DOMString>) {
self.entries
.borrow_mut()
.clear_entries_by_name_and_type(measure_name, Some(DOMString::from("measure")));
}
}<|fim▁end|> | res.sort_by(|a, b| {
a.start_time()
.partial_cmp(&b.start_time()) |
<|file_name|>issue-73827-bounds-check-index-in-subexpr.rs<|end_file_name|><|fim▁begin|>// This test checks that bounds checks are elided when
// index is part of a (x | y) < C style condition
// compile-flags: -O
#![crate_type = "lib"]
// CHECK-LABEL: @get
#[no_mangle]
pub fn get(array: &[u8; 8], x: usize, y: usize) -> u8 {
if x > 7 || y > 7 {
0
} else {
// CHECK-NOT: panic_bounds_check
array[y]
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>csearch.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Searching for information from the cstore
use ast_map;
use metadata::common::*;
use metadata::cstore;
use metadata::decoder;
use middle::lang_items;
use middle::ty;
use rbml;
use rbml::reader;
use std::rc::Rc;
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::diagnostic::expect;
use std::collections::hash_map::HashMap;
#[derive(Copy, Clone)]
pub struct MethodInfo {
pub name: ast::Name,
pub def_id: ast::DefId,
pub vis: ast::Visibility,
}
pub fn get_symbol(cstore: &cstore::CStore, def: ast::DefId) -> String {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_symbol(cdata.data(), def.node)
}
/// Iterates over all the language items in the given crate.
pub fn each_lang_item<F>(cstore: &cstore::CStore,
cnum: ast::CrateNum,
f: F)
-> bool where
F: FnMut(ast::NodeId, usize) -> bool,
{
let crate_data = cstore.get_crate_data(cnum);
decoder::each_lang_item(&*crate_data, f)
}
/// Iterates over each child of the given item.
pub fn each_child_of_item<F>(cstore: &cstore::CStore,
def_id: ast::DefId,
callback: F) where
F: FnMut(decoder::DefLike, ast::Name, ast::Visibility),
{
let crate_data = cstore.get_crate_data(def_id.krate);
let get_crate_data = |cnum| {
cstore.get_crate_data(cnum)
};
decoder::each_child_of_item(cstore.intr.clone(),
&*crate_data,
def_id.node,
get_crate_data,
callback)
}
/// Iterates over each top-level crate item.
pub fn each_top_level_item_of_crate<F>(cstore: &cstore::CStore,
cnum: ast::CrateNum,
callback: F) where
F: FnMut(decoder::DefLike, ast::Name, ast::Visibility),
{
let crate_data = cstore.get_crate_data(cnum);
let get_crate_data = |cnum| {
cstore.get_crate_data(cnum)
};
decoder::each_top_level_item_of_crate(cstore.intr.clone(),
&*crate_data,
get_crate_data,
callback)
}
pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
let path = decoder::get_item_path(&*cdata, def.node);
cdata.with_local_path(|cpath| {
let mut r = Vec::with_capacity(cpath.len() + path.len());
r.push_all(cpath);
r.push_all(&path);
r
})
}
pub enum FoundAst<'ast> {
Found(&'ast ast::InlinedItem),
FoundParent(ast::DefId, &'ast ast::InlinedItem),
NotFound,
}
// Finds the AST for this item in the crate metadata, if any. If the item was
// not marked for inlining, then the AST will not be present and hence none
// will be returned.
pub fn maybe_get_item_ast<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId,
decode_inlined_item: decoder::DecodeInlinedItem)
-> FoundAst<'tcx> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::maybe_get_item_ast(&*cdata, tcx, def.node, decode_inlined_item)
}
pub fn get_enum_variants<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
-> Vec<Rc<ty::VariantInfo<'tcx>>> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_enum_variants(cstore.intr.clone(), &*cdata, def.node, tcx)
}
/// Returns information about the given implementation.
pub fn get_impl_items(cstore: &cstore::CStore, impl_def_id: ast::DefId)
-> Vec<ty::ImplOrTraitItemId> {
let cdata = cstore.get_crate_data(impl_def_id.krate);
decoder::get_impl_items(&*cdata, impl_def_id.node)
}
pub fn get_impl_or_trait_item<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
-> ty::ImplOrTraitItem<'tcx> {
let cdata = tcx.sess.cstore.get_crate_data(def.krate);
decoder::get_impl_or_trait_item(tcx.sess.cstore.intr.clone(),
&*cdata,
def.node,
tcx)
}
pub fn get_trait_name(cstore: &cstore::CStore, def: ast::DefId) -> ast::Name {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_trait_name(cstore.intr.clone(),
&*cdata,
def.node)
}
pub fn is_static_method(cstore: &cstore::CStore, def: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(def.krate);
decoder::is_static_method(&*cdata, def.node)
}
pub fn get_trait_item_def_ids(cstore: &cstore::CStore, def: ast::DefId)
-> Vec<ty::ImplOrTraitItemId> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_trait_item_def_ids(&*cdata, def.node)
}
pub fn get_item_variances(cstore: &cstore::CStore,
def: ast::DefId) -> ty::ItemVariances {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_item_variances(&*cdata, def.node)
}
pub fn get_provided_trait_methods<'tcx>(tcx: &ty::ctxt<'tcx>,
def: ast::DefId)
-> Vec<Rc<ty::Method<'tcx>>> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_provided_trait_methods(cstore.intr.clone(), &*cdata, def.node, tcx)
}
pub fn get_associated_consts<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
-> Vec<Rc<ty::AssociatedConst<'tcx>>> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_associated_consts(cstore.intr.clone(), &*cdata, def.node, tcx)
}
pub fn get_type_name_if_impl(cstore: &cstore::CStore, def: ast::DefId)
-> Option<ast::Name> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_type_name_if_impl(&*cdata, def.node)
}
pub fn get_methods_if_impl(cstore: &cstore::CStore,
def: ast::DefId)
-> Option<Vec<MethodInfo> > {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_methods_if_impl(cstore.intr.clone(), &*cdata, def.node)
}
pub fn get_item_attrs(cstore: &cstore::CStore,
def_id: ast::DefId)
-> Vec<ast::Attribute> {
let cdata = cstore.get_crate_data(def_id.krate);
decoder::get_item_attrs(&*cdata, def_id.node)
}
pub fn get_struct_fields(cstore: &cstore::CStore,
def: ast::DefId)
-> Vec<ty::FieldTy> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_struct_fields(cstore.intr.clone(), &*cdata, def.node)
}
pub fn get_struct_field_attrs(cstore: &cstore::CStore, def: ast::DefId) -> HashMap<ast::NodeId,
Vec<ast::Attribute>> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_struct_field_attrs(&*cdata)
}
pub fn get_type<'tcx>(tcx: &ty::ctxt<'tcx>,
def: ast::DefId)
-> ty::TypeScheme<'tcx> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_type(&*cdata, def.node, tcx)
}
pub fn get_trait_def<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId) -> ty::TraitDef<'tcx> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_trait_def(&*cdata, def.node, tcx)
}
pub fn get_predicates<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
-> ty::GenericPredicates<'tcx>
{
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_predicates(&*cdata, def.node, tcx)
}
pub fn get_super_predicates<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
-> ty::GenericPredicates<'tcx>
{
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_super_predicates(&*cdata, def.node, tcx)
}
pub fn get_field_type<'tcx>(tcx: &ty::ctxt<'tcx>, class_id: ast::DefId,
def: ast::DefId) -> ty::TypeScheme<'tcx> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(class_id.krate);
let all_items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_items);
let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items),
|| {
(format!("get_field_type: class ID {:?} not found",
class_id)).to_string()
});
let the_field = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(def.node, class_doc),
|| {
(format!("get_field_type: in class {:?}, field ID {:?} not found",
class_id,
def)).to_string()
});
let ty = decoder::item_type(def, the_field, tcx, &*cdata);
ty::TypeScheme {
generics: ty::Generics::empty(),
ty: ty,
}
}
pub fn get_impl_polarity<'tcx>(tcx: &ty::ctxt<'tcx>,
def: ast::DefId)
-> Option<ast::ImplPolarity>
{
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_impl_polarity(&*cdata, def.node)
}
pub fn get_custom_coerce_unsized_kind<'tcx>(tcx: &ty::ctxt<'tcx>,
def: ast::DefId)
-> Option<ty::CustomCoerceUnsized> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_custom_coerce_unsized_kind(&*cdata, def.node)
}
// Given a def_id for an impl, return the trait it implements,
// if there is one.
pub fn get_impl_trait<'tcx>(tcx: &ty::ctxt<'tcx>,
def: ast::DefId)
-> Option<ty::TraitRef<'tcx>> {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_impl_trait(&*cdata, def.node, tcx)
}
pub fn get_native_libraries(cstore: &cstore::CStore, crate_num: ast::CrateNum)
-> Vec<(cstore::NativeLibraryKind, String)> {
let cdata = cstore.get_crate_data(crate_num);
decoder::get_native_libraries(&*cdata)
}
pub fn each_inherent_implementation_for_type<F>(cstore: &cstore::CStore,
def_id: ast::DefId,
callback: F) where
F: FnMut(ast::DefId),
{
let cdata = cstore.get_crate_data(def_id.krate);
decoder::each_inherent_implementation_for_type(&*cdata, def_id.node, callback)
}
pub fn each_implementation_for_trait<F>(cstore: &cstore::CStore,
def_id: ast::DefId,
mut callback: F) where
F: FnMut(ast::DefId),
{
cstore.iter_crate_data(|_, cdata| {
decoder::each_implementation_for_trait(cdata, def_id, &mut callback)
})
}
/// If the given def ID describes an item belonging to a trait (either a
/// default method or an implementation of a trait method), returns the ID of
/// the trait that the method belongs to. Otherwise, returns `None`.
pub fn get_trait_of_item(cstore: &cstore::CStore,
def_id: ast::DefId,
tcx: &ty::ctxt)
-> Option<ast::DefId> {
let cdata = cstore.get_crate_data(def_id.krate);
decoder::get_trait_of_item(&*cdata, def_id.node, tcx)
}
pub fn get_tuple_struct_definition_if_ctor(cstore: &cstore::CStore,
def_id: ast::DefId)
-> Option<ast::DefId>
{
let cdata = cstore.get_crate_data(def_id.krate);
decoder::get_tuple_struct_definition_if_ctor(&*cdata, def_id.node)
}
pub fn get_dylib_dependency_formats(cstore: &cstore::CStore,
cnum: ast::CrateNum)
-> Vec<(ast::CrateNum, cstore::LinkagePreference)>
{
let cdata = cstore.get_crate_data(cnum);
decoder::get_dylib_dependency_formats(&*cdata)
}
pub fn get_missing_lang_items(cstore: &cstore::CStore, cnum: ast::CrateNum)
-> Vec<lang_items::LangItem>
{
let cdata = cstore.get_crate_data(cnum);
decoder::get_missing_lang_items(&*cdata)
}
pub fn get_method_arg_names(cstore: &cstore::CStore, did: ast::DefId)
-> Vec<String>
{
let cdata = cstore.get_crate_data(did.krate);
decoder::get_method_arg_names(&*cdata, did.node)
}
pub fn get_reachable_extern_fns(cstore: &cstore::CStore, cnum: ast::CrateNum)
-> Vec<ast::DefId>
{
let cdata = cstore.get_crate_data(cnum);
decoder::get_reachable_extern_fns(&*cdata)
}
pub fn is_typedef(cstore: &cstore::CStore, did: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(did.krate);
decoder::is_typedef(&*cdata, did.node)
}
pub fn is_const_fn(cstore: &cstore::CStore, did: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(did.krate);
decoder::is_const_fn(&*cdata, did.node)
}
pub fn is_impl(cstore: &cstore::CStore, did: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(did.krate);
decoder::is_impl(&*cdata, did.node)
}
pub fn get_stability(cstore: &cstore::CStore,
def: ast::DefId)
-> Option<attr::Stability> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_stability(&*cdata, def.node)
}
pub fn is_staged_api(cstore: &cstore::CStore, krate: ast::CrateNum) -> bool {
let cdata = cstore.get_crate_data(krate);
let attrs = decoder::get_crate_attributes(cdata.data());
for attr in &attrs {
if &attr.name()[..] == "staged_api" {
match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) }
}
}
return false;
}
<|fim▁hole|> let cdata = cstore.get_crate_data(def.krate);
decoder::get_repr_attrs(&*cdata, def.node)
}
pub fn is_defaulted_trait(cstore: &cstore::CStore, trait_def_id: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(trait_def_id.krate);
decoder::is_defaulted_trait(&*cdata, trait_def_id.node)
}
pub fn is_default_impl(cstore: &cstore::CStore, impl_did: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(impl_did.krate);
decoder::is_default_impl(&*cdata, impl_did.node)
}<|fim▁end|> | pub fn get_repr_attrs(cstore: &cstore::CStore, def: ast::DefId)
-> Vec<attr::ReprAttr> { |
<|file_name|>message_dialogs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# message_dialogs.py
# misc Gtk.MessageDialogs
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vojtech Trefny <[email protected]>
#
#------------------------------------------------------------------------------#
import os
import gettext
from gi.repository import Gtk, GdkPixbuf
#------------------------------------------------------------------------------#
_ = lambda x: gettext.ldgettext("blivet-gui", x)
#------------------------------------------------------------------------------#
def locate_ui_file(filename):
""" Locate Glade ui files
"""
path = [os.path.split(os.path.abspath(__file__))[0] + '/../../data/ui/',
'/usr/share/blivet-gui/ui/']
for folder in path:
filepath = folder + filename
if os.access(filepath, os.R_OK):
return filepath
raise RuntimeError("Unable to find glade file %s" % filename)
#------------------------------------------------------------------------------#
class WarningDialog(object):
""" Basic warning dialog
"""
def __init__(self, parent_window, msg):
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('warning_dialog.ui'))
dialog = builder.get_object("warning_dialog")
dialog.set_transient_for(parent_window)
dialog.format_secondary_text(msg)
dialog.show_all()
dialog.run()
dialog.destroy()
class ErrorDialog(object):
""" Basic error dialog
"""
def __init__(self, parent_window, msg):
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('error_dialog.ui'))
dialog = builder.get_object("error_dialog")
dialog.set_transient_for(parent_window)
dialog.format_secondary_text(msg)
dialog.show_all()
dialog.run()
dialog.destroy()
class InfoDialog(object):
""" Basic error dialog
"""
def __init__(self, parent_window, msg):
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('info_dialog.ui'))
dialog = builder.get_object("info_dialog")
dialog.set_transient_for(parent_window)
dialog.format_secondary_text(msg)
dialog.show_all()
dialog.run()
dialog.destroy()
class ExceptionDialog(object):
""" Error dialog with traceback
"""
def __init__(self, parent_window, msg, traceback):
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('exception_dialog.ui'))
dialog = builder.get_object("exception_dialog")
dialog.set_transient_for(parent_window)
dialog.format_secondary_text(msg)
exception_label = builder.get_object("exception_label")
exception_label.set_text(traceback)
dialog.show_all()
dialog.run()
dialog.destroy()
class ConfirmDialog(object):
""" General confirmation dialog
"""
def __init__(self, parent_window, title, msg):
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('confirm_dialog.ui'))
self.dialog = builder.get_object("confirm_dialog")
self.dialog.set_transient_for(parent_window)
self.dialog.set_markup("<b>" + title + "</b>")
self.dialog.format_secondary_text(msg)
self.dialog.show_all()
def run(self):
""" Run the dialog
"""
response = self.dialog.run()
self.dialog.destroy()
return response == Gtk.ResponseType.OK
<|fim▁hole|> def __init__(self, parent_window, title, msg, actions):
self.actions = actions
builder = Gtk.Builder()
builder.add_from_file(locate_ui_file('confirm_actions_dialog.ui'))
self.dialog = builder.get_object("confirm_actions_dialog")
self.dialog.set_transient_for(parent_window)
self.dialog.set_markup("<b>" + title + "</b>")
self.dialog.format_secondary_text(msg)
scrolledwindow = builder.get_object("scrolledwindow")
self.treeview, self.selection_signal = self.show_actions(scrolledwindow)
self.dialog.show_all()
width = self.treeview.size_request().width
height = self.treeview.size_request().height
win_width = int(parent_window.get_allocated_width()*0.60)
win_height = int(parent_window.get_allocated_height()*0.60)
if width < win_width and height < win_height:
scrolledwindow.set_policy(Gtk.PolicyType.NEVER,
Gtk.PolicyType.NEVER)
elif width < win_width and height >= win_height:
scrolledwindow.set_size_request(width, win_height)
scrolledwindow.set_policy(Gtk.PolicyType.NEVER,
Gtk.PolicyType.AUTOMATIC)
elif width >= win_width and height < win_height:
scrolledwindow.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.NEVER)
else:
scrolledwindow.set_size_request(win_width, win_height)
scrolledwindow.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
def show_actions(self, scrolledwindow):
""" Show list of pending actions
"""
icon_theme = Gtk.IconTheme.get_default()
icon_add = Gtk.IconTheme.load_icon(icon_theme, "list-add", 16, 0)
icon_delete = Gtk.IconTheme.load_icon(icon_theme, "edit-delete", 16, 0)
icon_edit = Gtk.IconTheme.load_icon(icon_theme, "edit-select-all", 16, 0)
actions_list = Gtk.ListStore(GdkPixbuf.Pixbuf, str)
for action in self.actions:
if action.isDestroy or action.isRemove:
actions_list.append([icon_delete, str(action)])
elif action.isAdd or action.isCreate:
actions_list.append([icon_add, str(action)])
else:
actions_list.append([icon_edit, str(action)])
treeview = Gtk.TreeView(model=actions_list)
treeview.set_headers_visible(False)
treeview.set_vexpand(True)
treeview.set_hexpand(True)
selection = treeview.get_selection()
selection_signal = selection.connect("changed", self.on_action_clicked)
renderer_pixbuf = Gtk.CellRendererPixbuf()
column_pixbuf = Gtk.TreeViewColumn(None, renderer_pixbuf, pixbuf=0)
treeview.append_column(column_pixbuf)
renderer_text = Gtk.CellRendererText()
column_text = Gtk.TreeViewColumn(None, renderer_text, text=1)
treeview.append_column(column_text)
scrolledwindow.add(treeview)
return treeview, selection_signal
def on_action_clicked(self, selection):
""" Onclick action for treeview
"""
model, treeiter = selection.get_selected()
if treeiter and model:
selection.handler_block(self.selection_signal)
selection.unselect_iter(treeiter)
selection.handler_unblock(self.selection_signal)
def run(self):
""" Run the dialog
"""
response = self.dialog.run()
self.dialog.destroy()
return response == Gtk.ResponseType.OK<|fim▁end|> | class ConfirmActionsDialog(object):
""" Confirm execute actions
"""
|
<|file_name|>0011_auto_20170831_1618.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-08-31 14:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('checkout', '0010_auto_20170614_1434'),
]
operations = [
migrations.DeleteModel(
name='Payment',
),<|fim▁hole|> ),
]<|fim▁end|> | migrations.AlterField(
model_name='orderitem',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='order_items', to='products.Product'), |
<|file_name|>ui.scroll_view.js<|end_file_name|><|fim▁begin|>/**
* DevExtreme (ui/scroll_view/ui.scroll_view.js)
* Version: 16.2.6
* Build date: Tue Mar 28 2017
*
* Copyright (c) 2012 - 2017 Developer Express Inc. ALL RIGHTS RESERVED
* EULA: https://www.devexpress.com/Support/EULAs/DevExtreme.xml
*/
"use strict";
var $ = require("jquery"),
devices = require("../../core/devices"),
messageLocalization = require("../../localization/message"),
registerComponent = require("../../core/component_registrator"),
PullDownStrategy = require("./ui.scroll_view.native.pull_down"),
SwipeDownStrategy = require("./ui.scroll_view.native.swipe_down"),
SlideDownStrategy = require("./ui.scroll_view.native.slide_down"),
SimulatedStrategy = require("./ui.scroll_view.simulated"),
Scrollable = require("./ui.scrollable"),
LoadIndicator = require("../load_indicator"),
config = require("../../core/config"),
LoadPanel = require("../load_panel");
var SCROLLVIEW_CLASS = "dx-scrollview",
SCROLLVIEW_CONTENT_CLASS = SCROLLVIEW_CLASS + "-content",
SCROLLVIEW_TOP_POCKET_CLASS = SCROLLVIEW_CLASS + "-top-pocket",
SCROLLVIEW_BOTTOM_POCKET_CLASS = SCROLLVIEW_CLASS + "-bottom-pocket",
SCROLLVIEW_PULLDOWN_CLASS = SCROLLVIEW_CLASS + "-pull-down",
SCROLLVIEW_REACHBOTTOM_CLASS = SCROLLVIEW_CLASS + "-scrollbottom",
SCROLLVIEW_REACHBOTTOM_INDICATOR_CLASS = SCROLLVIEW_REACHBOTTOM_CLASS + "-indicator",
SCROLLVIEW_REACHBOTTOM_TEXT_CLASS = SCROLLVIEW_REACHBOTTOM_CLASS + "-text",
SCROLLVIEW_LOADPANEL = SCROLLVIEW_CLASS + "-loadpanel";
var refreshStrategies = {
pullDown: PullDownStrategy,
swipeDown: SwipeDownStrategy,
slideDown: SlideDownStrategy,
simulated: SimulatedStrategy
};
var ScrollView = Scrollable.inherit({
_getDefaultOptions: function() {
return $.extend(this.callBase(), {
pullingDownText: messageLocalization.format("dxScrollView-pullingDownText"),
pulledDownText: messageLocalization.format("dxScrollView-pulledDownText"),
refreshingText: messageLocalization.format("dxScrollView-refreshingText"),
reachBottomText: messageLocalization.format("dxScrollView-reachBottomText"),
onPullDown: null,
onReachBottom: null,
refreshStrategy: "pullDown"
})
},
_defaultOptionsRules: function() {
return this.callBase().concat([{
device: function() {
var realDevice = devices.real();
return "android" === realDevice.platform
},
options: {
refreshStrategy: "swipeDown"
}
}, {
device: function() {
return "win" === devices.real().platform
},
options: {
refreshStrategy: "slideDown"
}
}])
},
_init: function() {
this.callBase();
this._loadingIndicatorEnabled = true
},
_initMarkup: function() {
this.callBase();
this.element().addClass(SCROLLVIEW_CLASS);
this._initContent();
this._initTopPocket();
this._initBottomPocket();
this._initLoadPanel()
},
_initContent: function() {
var $content = $("<div>").addClass(SCROLLVIEW_CONTENT_CLASS);
this._$content.wrapInner($content)
},
_initTopPocket: function() {
var $topPocket = this._$topPocket = $("<div>").addClass(SCROLLVIEW_TOP_POCKET_CLASS),
$pullDown = this._$pullDown = $("<div>").addClass(SCROLLVIEW_PULLDOWN_CLASS);
$topPocket.append($pullDown);
this._$content.prepend($topPocket)
},
_initBottomPocket: function() {
var $bottomPocket = this._$bottomPocket = $("<div>").addClass(SCROLLVIEW_BOTTOM_POCKET_CLASS),
$reachBottom = this._$reachBottom = $("<div>").addClass(SCROLLVIEW_REACHBOTTOM_CLASS),
$loadContainer = $("<div>").addClass(SCROLLVIEW_REACHBOTTOM_INDICATOR_CLASS),
$loadIndicator = new LoadIndicator($("<div>")).element(),
$text = this._$reachBottomText = $("<div>").addClass(SCROLLVIEW_REACHBOTTOM_TEXT_CLASS);
this._updateReachBottomText();
$reachBottom.append($loadContainer.append($loadIndicator)).append($text);
$bottomPocket.append($reachBottom);
this._$content.append($bottomPocket)
},
_initLoadPanel: function() {
this._loadPanel = this._createComponent($("<div>").addClass(SCROLLVIEW_LOADPANEL).appendTo(this.element()), LoadPanel, {
shading: false,
delay: 400,
message: this.option("refreshingText"),
position: { of: this.element()
}
})
},
_updateReachBottomText: function() {
this._$reachBottomText.text(this.option("reachBottomText"))
},
_createStrategy: function() {
var strategyName = this.option("useNative") ? this.option("refreshStrategy") : "simulated";
var strategyClass = refreshStrategies[strategyName];
if (!strategyClass) {
throw Error("E1030", this.option("refreshStrategy"))
}
this._strategy = new strategyClass(this);
this._strategy.pullDownCallbacks.add($.proxy(this._pullDownHandler, this));
this._strategy.releaseCallbacks.add($.proxy(this._releaseHandler, this));
this._strategy.reachBottomCallbacks.add($.proxy(this._reachBottomHandler, this))
},
_createActions: function() {
this.callBase();
this._pullDownAction = this._createActionByOption("onPullDown");
this._reachBottomAction = this._createActionByOption("onReachBottom");
this._refreshPocketState()
},
_refreshPocketState: function() {
this._pullDownEnable(this.hasActionSubscription("onPullDown") && !config().designMode);
this._reachBottomEnable(this.hasActionSubscription("onReachBottom") && !config().designMode)
},
on: function(eventName) {
var result = this.callBase.apply(this, arguments);
if ("pullDown" === eventName || "reachBottom" === eventName) {
this._refreshPocketState()
}
return result
},
_pullDownEnable: function(enabled) {
if (0 === arguments.length) {
return this._pullDownEnabled
}
this._$pullDown.toggle(enabled);
this._strategy.pullDownEnable(enabled);
this._pullDownEnabled = enabled
},
_reachBottomEnable: function(enabled) {
if (0 === arguments.length) {
return this._reachBottomEnabled
}
this._$reachBottom.toggle(enabled);
this._strategy.reachBottomEnable(enabled);
this._reachBottomEnabled = enabled
},
_pullDownHandler: function() {
this._loadingIndicator(false);
this._pullDownLoading()
},
_loadingIndicator: function(value) {
if (arguments.length < 1) {
return this._loadingIndicatorEnabled
}
this._loadingIndicatorEnabled = value
},
_pullDownLoading: function() {
this.startLoading();
this._pullDownAction()
},
_reachBottomHandler: function() {
this._loadingIndicator(false);
this._reachBottomLoading()
},
_reachBottomLoading: function() {
this.startLoading();
this._reachBottomAction()
},
_releaseHandler: function() {
this.finishLoading();
this._loadingIndicator(true)
},
_optionChanged: function(args) {
switch (args.name) {
case "onPullDown":
case "onReachBottom":
this._createActions();
break;
case "pullingDownText":
case "pulledDownText":
case "refreshingText":
<|fim▁hole|> case "reachBottomText":
this._updateReachBottomText();
break;
default:
this.callBase(args)
}
},
isEmpty: function() {
return !this.content().children().length
},
content: function() {
return this._$content.children().eq(1)
},
release: function(preventReachBottom) {
if (void 0 !== preventReachBottom) {
this.toggleLoading(!preventReachBottom)
}
return this._strategy.release()
},
toggleLoading: function(showOrHide) {
this._reachBottomEnable(showOrHide)
},
isFull: function() {
return this.content().height() > this._$container.height()
},
refresh: function() {
if (!this.hasActionSubscription("onPullDown")) {
return
}
this._strategy.pendingRelease();
this._pullDownLoading()
},
startLoading: function() {
if (this._loadingIndicator() && this.element().is(":visible")) {
this._loadPanel.show()
}
this._lock()
},
finishLoading: function() {
this._loadPanel.hide();
this._unlock()
},
_dispose: function() {
this._strategy.dispose();
this.callBase();
if (this._loadPanel) {
this._loadPanel.element().remove()
}
}
});
registerComponent("dxScrollView", ScrollView);
module.exports = ScrollView;<|fim▁end|> | case "refreshStrategy":
this._invalidate();
break;
|
<|file_name|>routerless.module.d.ts<|end_file_name|><|fim▁begin|>import { ModuleWithProviders } from '@angular/core';<|fim▁hole|>import { Angulartics2Settings } from 'angulartics2';
export declare class Angulartics2RouterlessModule {
static forRoot(settings?: Partial<Angulartics2Settings>): ModuleWithProviders;
}<|fim▁end|> | |
<|file_name|>addressbook_upgrade_from_1_to_2.py<|end_file_name|><|fim▁begin|># -*- test-case-name: txdav.common.datastore.upgrade.sql.test -*-
# #
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #
from twext.enterprise.dal.syntax import Update
from twisted.internet.defer import inlineCallbacks
from txdav.base.propertystore.base import PropertyName
from txdav.common.datastore.sql_tables import _ABO_KIND_GROUP, schema
from txdav.common.datastore.upgrade.sql.upgrades.util import updateAddressBookDataVersion, \
doToEachHomeNotAtVersion, removeProperty, cleanPropertyStore, \
logUpgradeStatus
from txdav.xml import element
"""
AddressBook Data upgrade from database version 1 to 2
"""
UPGRADE_TO_VERSION = 2
@inlineCallbacks
def doUpgrade(sqlStore):
"""
fill in members tables and increment data version
"""
yield populateMemberTables(sqlStore)
yield removeResourceType(sqlStore)
# bump data version
yield updateAddressBookDataVersion(sqlStore, UPGRADE_TO_VERSION)
@inlineCallbacks
def populateMemberTables(sqlStore):
"""
Set the group kind and and members tables
"""
@inlineCallbacks
def doIt(txn, homeResourceID):<|fim▁hole|> """
KIND is set to person by schema upgrade.
To upgrade MEMBERS and FOREIGN_MEMBERS:
1. Set group KIND (avoids assert)
2. Write groups. Write logic will fill in MEMBERS and FOREIGN_MEMBERS
(Remember that all members resource IDs must already be in the address book).
"""
home = yield txn.addressbookHomeWithResourceID(homeResourceID)
abObjectResources = yield home.addressbook().objectResources()
for abObject in abObjectResources:
component = yield abObject.component()
lcResourceKind = component.resourceKind().lower() if component.resourceKind() else component.resourceKind()
if lcResourceKind == "group":
# update kind
abo = schema.ADDRESSBOOK_OBJECT
yield Update(
{abo.KIND: _ABO_KIND_GROUP},
Where=abo.RESOURCE_ID == abObject._resourceID,
).on(txn)
abObject._kind = _ABO_KIND_GROUP
# update rest
yield abObject.setComponent(component)
logUpgradeStatus("Starting Addressbook Populate Members")
# Do this to each calendar home not already at version 2
yield doToEachHomeNotAtVersion(sqlStore, schema.ADDRESSBOOK_HOME, UPGRADE_TO_VERSION, doIt, "Populate Members")
@inlineCallbacks
def removeResourceType(sqlStore):
logUpgradeStatus("Starting Addressbook Remove Resource Type")
sqlTxn = sqlStore.newTransaction(label="addressbook_upgrade_from_1_to_2.removeResourceType")
yield removeProperty(sqlTxn, PropertyName.fromElement(element.ResourceType))
yield sqlTxn.commit()
yield cleanPropertyStore()
logUpgradeStatus("End Addressbook Remove Resource Type")<|fim▁end|> | |
<|file_name|>Observable.java<|end_file_name|><|fim▁begin|>package com.ebrightmoon.doraemonkit.ui.widget.tableview.listener;
import java.util.ArrayList;
import java.util.List;
public abstract class Observable<T> {
public final ArrayList<T> observables = new ArrayList<>();
/**AttachObserver(通过实例注册观察者)
**/
public void register(T observer){
if(observer==null) throw new NullPointerException();
synchronized(observables){
if(!observables.contains(observer)){
observables.add(observer);
}
}
}
/**UnattachObserver(注销观察者)
**/
public void unRegister(T observer){
if(observer==null) throw new NullPointerException();
if(observables.contains(observer)){
observables.remove(observer);
}
}
public void unRegisterAll(){
synchronized(observables){<|fim▁hole|> }
}
/**Ruturnthesizeofobservers*/
public int countObservers(){
synchronized(observables){
return observables.size();
}
}
/**
*notify all observer(通知所有观察者,在子类中实现)
*/
public abstract void notifyObservers(List<T> observers);
}<|fim▁end|> | observables.clear(); |
<|file_name|>spinners-routing.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { SpinnersComponent } from './spinners.component';
const routes: Routes = [
{
path: '',
component: SpinnersComponent,
data: {
title: 'Spinners'
}
}
];
<|fim▁hole|> imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
})
export class SpinnersRoutingModule {}<|fim▁end|> | @NgModule({ |
<|file_name|>JobStatusDisplay.py<|end_file_name|><|fim▁begin|># Copyright 1999-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import formatter<|fim▁hole|>
import portage
from portage import os
from portage import _encodings
from portage import _unicode_encode
from portage.output import xtermTitle
from _emerge.getloadavg import getloadavg
if sys.hexversion >= 0x3000000:
basestring = str
class JobStatusDisplay(object):
_bound_properties = ("curval", "failed", "running")
# Don't update the display unless at least this much
# time has passed, in units of seconds.
_min_display_latency = 2
_default_term_codes = {
'cr' : '\r',
'el' : '\x1b[K',
'nel' : '\n',
}
_termcap_name_map = {
'carriage_return' : 'cr',
'clr_eol' : 'el',
'newline' : 'nel',
}
def __init__(self, quiet=False, xterm_titles=True):
object.__setattr__(self, "quiet", quiet)
object.__setattr__(self, "xterm_titles", xterm_titles)
object.__setattr__(self, "maxval", 0)
object.__setattr__(self, "merges", 0)
object.__setattr__(self, "_changed", False)
object.__setattr__(self, "_displayed", False)
object.__setattr__(self, "_last_display_time", 0)
self.reset()
isatty = os.environ.get('TERM') != 'dumb' and \
hasattr(self.out, 'isatty') and \
self.out.isatty()
object.__setattr__(self, "_isatty", isatty)
if not isatty or not self._init_term():
term_codes = {}
for k, capname in self._termcap_name_map.items():
term_codes[k] = self._default_term_codes[capname]
object.__setattr__(self, "_term_codes", term_codes)
encoding = sys.getdefaultencoding()
for k, v in self._term_codes.items():
if not isinstance(v, basestring):
self._term_codes[k] = v.decode(encoding, 'replace')
if self._isatty:
width = portage.output.get_term_size()[1]
else:
width = 80
self._set_width(width)
def _set_width(self, width):
if width == getattr(self, 'width', None):
return
if width <= 0 or width > 80:
width = 80
object.__setattr__(self, "width", width)
object.__setattr__(self, "_jobs_column_width", width - 32)
@property
def out(self):
"""Use a lazy reference to sys.stdout, in case the API consumer has
temporarily overridden stdout."""
return sys.stdout
def _write(self, s):
# avoid potential UnicodeEncodeError
s = _unicode_encode(s,
encoding=_encodings['stdio'], errors='backslashreplace')
out = self.out
if sys.hexversion >= 0x3000000:
out = out.buffer
out.write(s)
out.flush()
def _init_term(self):
"""
Initialize term control codes.
@rtype: bool
@return: True if term codes were successfully initialized,
False otherwise.
"""
term_type = os.environ.get("TERM", "").strip()
if not term_type:
return False
tigetstr = None
try:
import curses
try:
curses.setupterm(term_type, self.out.fileno())
tigetstr = curses.tigetstr
except curses.error:
pass
except ImportError:
pass
if tigetstr is None:
return False
term_codes = {}
for k, capname in self._termcap_name_map.items():
# Use _native_string for PyPy compat (bug #470258).
code = tigetstr(portage._native_string(capname))
if code is None:
code = self._default_term_codes[capname]
term_codes[k] = code
object.__setattr__(self, "_term_codes", term_codes)
return True
def _format_msg(self, msg):
return ">>> %s" % msg
def _erase(self):
self._write(
self._term_codes['carriage_return'] + \
self._term_codes['clr_eol'])
self._displayed = False
def _display(self, line):
self._write(line)
self._displayed = True
def _update(self, msg):
if not self._isatty:
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = True
return
if self._displayed:
self._erase()
self._display(self._format_msg(msg))
def displayMessage(self, msg):
was_displayed = self._displayed
if self._isatty and self._displayed:
self._erase()
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = False
if was_displayed:
self._changed = True
self.display()
def reset(self):
self.maxval = 0
self.merges = 0
for name in self._bound_properties:
object.__setattr__(self, name, 0)
if self._displayed:
self._write(self._term_codes['newline'])
self._displayed = False
def __setattr__(self, name, value):
old_value = getattr(self, name)
if value == old_value:
return
object.__setattr__(self, name, value)
if name in self._bound_properties:
self._property_change(name, old_value, value)
def _property_change(self, name, old_value, new_value):
self._changed = True
self.display()
def _load_avg_str(self):
try:
avg = getloadavg()
except OSError:
return 'unknown'
max_avg = max(avg)
if max_avg < 10:
digits = 2
elif max_avg < 100:
digits = 1
else:
digits = 0
return ", ".join(("%%.%df" % digits ) % x for x in avg)
def display(self):
"""
Display status on stdout, but only if something has
changed since the last call. This always returns True,
for continuous scheduling via timeout_add.
"""
if self.quiet:
return True
current_time = time.time()
time_delta = current_time - self._last_display_time
if self._displayed and \
not self._changed:
if not self._isatty:
return True
if time_delta < self._min_display_latency:
return True
self._last_display_time = current_time
self._changed = False
self._display_status()
return True
def _display_status(self):
# Don't use len(self._completed_tasks) here since that also
# can include uninstall tasks.
curval_str = "%s" % (self.curval,)
maxval_str = "%s" % (self.maxval,)
running_str = "%s" % (self.running,)
failed_str = "%s" % (self.failed,)
load_avg_str = self._load_avg_str()
color_output = io.StringIO()
plain_output = io.StringIO()
style_file = portage.output.ConsoleStyleFile(color_output)
style_file.write_listener = plain_output
style_writer = portage.output.StyleWriter(file=style_file, maxcol=9999)
style_writer.style_listener = style_file.new_styles
f = formatter.AbstractFormatter(style_writer)
number_style = "INFORM"
f.add_literal_data("Jobs: ")
f.push_style(number_style)
f.add_literal_data(curval_str)
f.pop_style()
f.add_literal_data(" of ")
f.push_style(number_style)
f.add_literal_data(maxval_str)
f.pop_style()
f.add_literal_data(" complete")
if self.running:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(running_str)
f.pop_style()
f.add_literal_data(" running")
if self.failed:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(failed_str)
f.pop_style()
f.add_literal_data(" failed")
padding = self._jobs_column_width - len(plain_output.getvalue())
if padding > 0:
f.add_literal_data(padding * " ")
f.add_literal_data("Load avg: ")
f.add_literal_data(load_avg_str)
# Truncate to fit width, to avoid making the terminal scroll if the
# line overflows (happens when the load average is large).
plain_output = plain_output.getvalue()
if self._isatty and len(plain_output) > self.width:
# Use plain_output here since it's easier to truncate
# properly than the color output which contains console
# color codes.
self._update(plain_output[:self.width])
else:
self._update(color_output.getvalue())
if self.xterm_titles:
# If the HOSTNAME variable is exported, include it
# in the xterm title, just like emergelog() does.
# See bug #390699.
title_str = " ".join(plain_output.split())
hostname = os.environ.get("HOSTNAME")
if hostname is not None:
title_str = "%s: %s" % (hostname, title_str)
xtermTitle(title_str)<|fim▁end|> | import io
import sys
import time |
<|file_name|>Tag.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/states/model/Tag.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace SFN
{
namespace Model
{
Tag::Tag() :
m_keyHasBeenSet(false),
m_valueHasBeenSet(false)
{
}
Tag::Tag(JsonView jsonValue) :
m_keyHasBeenSet(false),
m_valueHasBeenSet(false)
{
*this = jsonValue;
}
Tag& Tag::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("key"))
{
m_key = jsonValue.GetString("key");
m_keyHasBeenSet = true;
}
if(jsonValue.ValueExists("value"))
{
m_value = jsonValue.GetString("value");
m_valueHasBeenSet = true;
}
return *this;
}
JsonValue Tag::Jsonize() const
{
JsonValue payload;
if(m_keyHasBeenSet)
{
payload.WithString("key", m_key);
}
if(m_valueHasBeenSet)
{
payload.WithString("value", m_value);
}
<|fim▁hole|>} // namespace Model
} // namespace SFN
} // namespace Aws<|fim▁end|> | return payload;
}
|
<|file_name|>WikiSearch.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.power.text.Run;
import static com.power.text.dialogs.WebSearch.squerry;
import java.awt.Desktop;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import javax.swing.JOptionPane;
import static com.power.text.Main.searchbox;
/**
*
* @author thecarisma
*/
public class WikiSearch {
<|fim▁hole|> searchqueryw = searchqueryw.replace(' ', '-');
String squeryw = squerry.getText();
squeryw = squeryw.replace(' ', '-');
if ("".equals(searchqueryw)){
searchqueryw = squeryw ;
} else {}
String url = "https://www.wikipedia.org/wiki/" + searchqueryw ;
try
{
URI uri = new URL(url).toURI();
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE))
desktop.browse(uri);
}
catch (URISyntaxException | IOException e)
{
/*
* I know this is bad practice
* but we don't want to do anything clever for a specific error
*/
JOptionPane.showMessageDialog(null, e.getMessage());
// Copy URL to the clipboard so the user can paste it into their browser
StringSelection stringSelection = new StringSelection(url);
Clipboard clpbrd = Toolkit.getDefaultToolkit().getSystemClipboard();
clpbrd.setContents(stringSelection, null);
// Notify the user of the failure
System.out.println("This program just tried to open a webpage." + "\n"
+ "The URL has been copied to your clipboard, simply paste into your browser to accessWebpage: " + url);
}
}
}<|fim▁end|> |
public static void wikisearch(){
String searchqueryw = searchbox.getText();
|
<|file_name|>test_ordering.py<|end_file_name|><|fim▁begin|># pylint: disable=missing-docstring
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser<|fim▁hole|>
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from resolwe.flow.models import Process
class ProcessOrderingTest(APITestCase):
def setUp(self):
super().setUp()
user_model = get_user_model()
user = user_model.objects.create(username='user')
self.proc_1 = Process.objects.create(name='My process', contributor=user, version=1)
self.proc_2 = Process.objects.create(name='My process', contributor=user, version=2)
assign_perm('view_process', AnonymousUser(), self.proc_1)
assign_perm('view_process', AnonymousUser(), self.proc_2)
self.url = reverse('resolwe-api:process-list')
def test_ordering_version(self):
# pylint: disable=no-member
response = self.client.get(self.url, {'ordering': 'version'}, format='json')
self.assertEqual(response.data[0]['id'], self.proc_1.id)
self.assertEqual(response.data[1]['id'], self.proc_2.id)
response = self.client.get(self.url, {'ordering': '-version'}, format='json')
self.assertEqual(response.data[0]['id'], self.proc_2.id)
self.assertEqual(response.data[1]['id'], self.proc_1.id)<|fim▁end|> | from django.core.urlresolvers import reverse |
<|file_name|>compile_info.py<|end_file_name|><|fim▁begin|>from telecommand import Telecommand
class GetCompileInfoTelecommand(Telecommand):
def __init__(self):
Telecommand.__init__(self)
def apid(self):
return 0x27
def payload(self):
<|fim▁hole|><|fim▁end|> | return [] |
<|file_name|>BootStrapJmxServlet.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package net.gcolin.jmx.console.embedded;
import net.gcolin.jmx.console.JmxHtml;
import net.gcolin.jmx.console.JmxProcessException;
import net.gcolin.jmx.console.JmxResult;
import net.gcolin.jmx.console.JmxTool;
import java.io.IOException;
import java.io.Writer;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* A Jmx servlet with bootstrap style.
*
* @author Gael COLIN
*
*/
public class BootStrapJmxServlet extends HttpServlet {
private static final long serialVersionUID = 7998004606230901933L;
protected transient JmxTool tool;
protected transient JmxHtml html;
@Override
public void init() throws ServletException {
tool = new JmxTool();
html = new JmxHtml() {
protected String getButtonCss() {
return "btn btn-primary";
}
protected String getInputTextClass() {
return "form-control";
}
protected String getFormClass() {
return "form-inline";
}
protected String getSelectClass() {
return "form-control";
}
protected String getMenuUlClass() {
return "menu";
}
@Override
protected String getTableClass() {
return "table table-bordered";
}
protected void writeCss(Writer writer) throws IOException {
writer.write("<link href=\"/css/bootstrap.min.css\" rel=\"stylesheet\" />");
writer.write("<link href=\"/css/bootstrap-theme.min.css\" rel=\"stylesheet\" />");
writer.write("<style type='text/css'>.menu li.active>a{font-weight:bold}"
+ ".col{display:table-cell}.space{padding-left:16px;}</style>");
}
};
}
@SuppressWarnings("unchecked")
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
Map<String, String> parameters = new HashMap<String, String>();
for (Object elt : req.getParameterMap().entrySet()) {
Entry<String, String[]> entry = (Entry<String, String[]>) elt;
parameters.put(entry.getKey(), entry.getValue()[0]);
}
JmxResult result;
try {
result = tool.build(parameters);
} catch (JmxProcessException ex) {
throw new ServletException(ex);
}
result.setRequestUri(req.getRequestURI());
result.setQueryParams(req.getQueryString());
resp.setContentType("text/html");
html.write(result, parameters, resp.getWriter());
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
<|fim▁hole|> }
}<|fim▁end|> | throws ServletException, IOException {
doGet(req, resp);
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Ingenieria ADHOC - ADHOC SA
# https://launchpad.net/~ingenieria-adhoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import waybill
import wizard
import travel
import vehicle
import requirement
import res_partner
import waybill_expense
import account_invoice
<|fim▁hole|># vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>Ares.cpp<|end_file_name|><|fim▁begin|>/*
_<|fim▁hole|>*/
#include "Ares.h"
/*
Description:
Function that is called when the library is loaded, use this as an entry point.
*/
int __attribute__((constructor)) Ares() {
SDL2::SetupSwapWindow();
return 0;
}<|fim▁end|> | /_\ _ _ ___ ___
/ _ \| '_/ -_|_-<
/_/ \_\_| \___/__/
|
<|file_name|>access_manager.py<|end_file_name|><|fim▁begin|>import uuid
import time
from hashlib import md5
from ..json_store import LoginTokenStore
__author__ = 'bromix'
class AccessManager(object):
def __init__(self, context):
self._settings = context.get_settings()
self._jstore = LoginTokenStore()
self._json = self._jstore.get_data()
self._user = self._json['access_manager'].get('current_user', '0')
self._last_origin = self._json['access_manager'].get('last_origin', 'plugin.video.youtube')
def get_current_user_id(self):
"""
:return: uuid of the current user
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'][self.get_user()]['id']
def get_new_user(self, user_name='', addon_id=''):
"""
:param user_name: string, users name
:param addon_id: string, addon id
:return: a new user dict
"""
uuids = list()
new_uuid = uuid.uuid4().hex
for k in list(self._json['access_manager']['users'].keys()):
user_uuid = self._json['access_manager']['users'][k].get('id')
if user_uuid:
uuids.append(user_uuid)
while new_uuid in uuids:
new_uuid = uuid.uuid4().hex
return {'access_token': '', 'refresh_token': '', 'token_expires': -1, 'last_key_hash': '',
'name': user_name, 'id': new_uuid, 'watch_later': ' WL', 'watch_history': 'HL'}
def get_users(self):
"""
Returns users
:return: users
"""
return self._json['access_manager'].get('users', {})
def set_users(self, users):
"""
Updates the users
:param users: dict, users
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'] = users
self._jstore.save(self._json)
def set_user(self, user, switch_to=False):
"""
Updates the user
:param user: string, username
:param switch_to: boolean, change current user
:return:
"""
self._user = user
if switch_to:
self._json = self._jstore.get_data()
self._json['access_manager']['current_user'] = user
self._jstore.save(self._json)
def get_user(self):
"""
Returns the current user
:return: user
"""
return self._user
def get_watch_later_id(self):
"""
Returns the current users watch later playlist id
:return: the current users watch later playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
settings_playlist_id = self._settings.get_string('youtube.folder.watch_later.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_later'] = settings_playlist_id
self._jstore.save(self._json)
self._settings.set_string('youtube.folder.watch_later.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
def set_watch_later_id(self, playlist_id):
"""
Sets the current users watch later playlist id
:param playlist_id: string, watch later playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_later'] = playlist_id
self._settings.set_string('youtube.folder.watch_later.playlist', '')
self._jstore.save(self._json)
def get_watch_history_id(self):
"""
Returns the current users watch history playlist id<|fim▁hole|> :return: the current users watch history playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
settings_playlist_id = self._settings.get_string('youtube.folder.history.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_history'] = settings_playlist_id
self._jstore.save(self._json)
self._settings.set_string('youtube.folder.history.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
def set_watch_history_id(self, playlist_id):
"""
Sets the current users watch history playlist id
:param playlist_id: string, watch history playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_history'] = playlist_id
self._settings.set_string('youtube.folder.history.playlist', '')
self._jstore.save(self._json)
def set_last_origin(self, origin):
"""
Updates the origin
:param user: string, origin
:param switch_to: boolean, change last origin
:return:
"""
self._last_origin = origin
self._json = self._jstore.get_data()
self._json['access_manager']['last_origin'] = origin
self._jstore.save(self._json)
def get_last_origin(self):
"""
Returns the last origin
:return:
"""
return self._last_origin
def get_access_token(self):
"""
Returns the access token for some API
:return: access_token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
def get_refresh_token(self):
"""
Returns the refresh token
:return: refresh token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('refresh_token', '')
def has_refresh_token(self):
return self.get_refresh_token() != ''
def is_access_token_expired(self):
"""
Returns True if the access_token is expired otherwise False.
If no expiration date was provided and an access_token exists
this method will always return True
:return:
"""
self._json = self._jstore.get_data()
access_token = self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
expires = int(self._json['access_manager']['users'].get(self._user, {}).get('token_expires', -1))
# with no access_token it must be expired
if not access_token:
return True
# in this case no expiration date was set
if expires == -1:
return False
now = int(time.time())
return expires <= now
def update_access_token(self, access_token, unix_timestamp=None, refresh_token=None):
"""
Updates the old access token with the new one.
:param access_token:
:param unix_timestamp:
:param refresh_token:
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['access_token'] = access_token
if unix_timestamp is not None:
self._json['access_manager']['users'][self._user]['token_expires'] = int(unix_timestamp)
if refresh_token is not None:
self._json['access_manager']['users'][self._user]['refresh_token'] = refresh_token
self._jstore.save(self._json)
def get_new_developer(self, addon_id):
"""
:param addon_id: string, addon id
:return: a new developer dict
"""
return {'access_token': '', 'refresh_token': '', 'token_expires': -1, 'last_key_hash': ''}
def get_developers(self):
"""
Returns developers
:return: dict, developers
"""
return self._json['access_manager'].get('developers', {})
def set_developers(self, developers):
"""
Updates the users
:param developers: dict, developers
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['developers'] = developers
self._jstore.save(self._json)
def get_dev_access_token(self, addon_id):
"""
Returns the access token for some API
:param addon_id: addon id
:return: access_token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['developers'].get(addon_id, {}).get('access_token', '')
def get_dev_refresh_token(self, addon_id):
"""
Returns the refresh token
:return: refresh token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['developers'].get(addon_id, {}).get('refresh_token', '')
def developer_has_refresh_token(self, addon_id):
return self.get_dev_refresh_token(addon_id) != ''
def is_dev_access_token_expired(self, addon_id):
"""
Returns True if the access_token is expired otherwise False.
If no expiration date was provided and an access_token exists
this method will always return True
:return:
"""
self._json = self._jstore.get_data()
access_token = self._json['access_manager']['developers'].get(addon_id, {}).get('access_token', '')
expires = int(self._json['access_manager']['developers'].get(addon_id, {}).get('token_expires', -1))
# with no access_token it must be expired
if not access_token:
return True
# in this case no expiration date was set
if expires == -1:
return False
now = int(time.time())
return expires <= now
def update_dev_access_token(self, addon_id, access_token, unix_timestamp=None, refresh_token=None):
"""
Updates the old access token with the new one.
:param addon_id:
:param access_token:
:param unix_timestamp:
:param refresh_token:
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['developers'][addon_id]['access_token'] = access_token
if unix_timestamp is not None:
self._json['access_manager']['developers'][addon_id]['token_expires'] = int(unix_timestamp)
if refresh_token is not None:
self._json['access_manager']['developers'][addon_id]['refresh_token'] = refresh_token
self._jstore.save(self._json)
def get_dev_last_key_hash(self, addon_id):
self._json = self._jstore.get_data()
return self._json['access_manager']['developers'][addon_id]['last_key_hash']
def set_dev_last_key_hash(self, addon_id, key_hash):
self._json = self._jstore.get_data()
self._json['access_manager']['developers'][addon_id]['last_key_hash'] = key_hash
self._jstore.save(self._json)
def dev_keys_changed(self, addon_id, api_key, client_id, client_secret):
self._json = self._jstore.get_data()
last_hash = self._json['access_manager']['developers'][addon_id]['last_key_hash']
current_hash = self.__calc_key_hash(api_key, client_id, client_secret)
if not last_hash and current_hash:
self.set_dev_last_key_hash(addon_id, current_hash)
return False
if last_hash != current_hash:
self.set_dev_last_key_hash(addon_id, current_hash)
return True
else:
return False
@staticmethod
def __calc_key_hash(api_key, client_id, client_secret):
m = md5()
try:
m.update(api_key.encode('utf-8'))
m.update(client_id.encode('utf-8'))
m.update(client_secret.encode('utf-8'))
except:
m.update(api_key)
m.update(client_id)
m.update(client_secret)
return m.hexdigest()<|fim▁end|> | |
<|file_name|>test_thread.py<|end_file_name|><|fim▁begin|>"""TestCases for multi-threaded access to a DB.
"""
import os
import sys
import time
import errno
from random import random
DASH = '-'
try:
WindowsError
except NameError:
class WindowsError(Exception):
pass
import unittest
from test_all import db, dbutils, test_support, verbose, have_threads, \
get_new_environment_path, get_new_database_path
if have_threads :
from threading import Thread
import sys
if sys.version_info[0] < 3 :
from threading import currentThread
else :
from threading import current_thread as currentThread
#----------------------------------------------------------------------
class BaseThreadedTestCase(unittest.TestCase):
dbtype = db.DB_UNKNOWN # must be set in derived class
dbopenflags = 0
dbsetflags = 0
envflags = 0
import sys
if sys.version_info[:3] < (2, 4, 0):
def assertTrue(self, expr, msg=None):
self.failUnless(expr,msg=msg)
def setUp(self):
if verbose:
dbutils._deadlock_VerboseFile = sys.stdout
self.homeDir = get_new_environment_path()
self.env = db.DBEnv()
self.setEnvOpts()
self.env.open(self.homeDir, self.envflags | db.DB_CREATE)
self.filename = self.__class__.__name__ + '.db'
self.d = db.DB(self.env)
if self.dbsetflags:
self.d.set_flags(self.dbsetflags)
self.d.open(self.filename, self.dbtype, self.dbopenflags|db.DB_CREATE)
def tearDown(self):
self.d.close()
self.env.close()
test_support.rmtree(self.homeDir)
def setEnvOpts(self):
pass
def makeData(self, key):
return DASH.join([key] * 5)
#----------------------------------------------------------------------
class ConcurrentDataStoreBase(BaseThreadedTestCase):
dbopenflags = db.DB_THREAD
envflags = db.DB_THREAD | db.DB_INIT_CDB | db.DB_INIT_MPOOL
readers = 0 # derived class should set
writers = 0
records = 1000
def test01_1WriterMultiReaders(self):
if verbose:
print '\n', '-=' * 30
print "Running %s.test01_1WriterMultiReaders..." % \
self.__class__.__name__
keys=range(self.records)
import random
random.shuffle(keys)
records_per_writer=self.records//self.writers
readers_per_writer=self.readers//self.writers
self.assertEqual(self.records,self.writers*records_per_writer)
self.assertEqual(self.readers,self.writers*readers_per_writer)
self.assertTrue((records_per_writer%readers_per_writer)==0)
readers = []
for x in xrange(self.readers):
rt = Thread(target = self.readerThread,
args = (self.d, x),
name = 'reader %d' % x,
)#verbose = verbose)
import sys
if sys.version_info[0] < 3 :
rt.setDaemon(True)
else :
rt.daemon = True
readers.append(rt)
writers=[]
for x in xrange(self.writers):
a=keys[records_per_writer*x:records_per_writer*(x+1)]
a.sort() # Generate conflicts
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
wt = Thread(target = self.writerThread,
args = (self.d, a, b),
name = 'writer %d' % x,
)#verbose = verbose)
writers.append(wt)
for t in writers:
import sys
if sys.version_info[0] < 3 :
t.setDaemon(True)
else :
t.daemon = True
t.start()
for t in writers:
t.join()
for t in readers:
t.join()
def writerThread(self, d, keys, readers):
import sys
if sys.version_info[0] < 3 :
name = currentThread().getName()
else :
name = currentThread().name
if verbose:
print "%s: creating records %d - %d" % (name, start, stop)
count=len(keys)//len(readers)
count2=count
for x in keys :
key = '%04d' % x
dbutils.DeadlockWrap(d.put, key, self.makeData(key),
max_retries=12)
if verbose and x % 100 == 0:
print "%s: records %d - %d finished" % (name, start, x)
count2-=1
if not count2 :
readers.pop().start()
count2=count
if verbose:
print "%s: finished creating records" % name
if verbose:
print "%s: thread finished" % name
def readerThread(self, d, readerNum):
import sys
<|fim▁hole|> name = currentThread().getName()
else :
name = currentThread().name
for i in xrange(5) :
c = d.cursor()
count = 0
rec = c.first()
while rec:
count += 1
key, data = rec
self.assertEqual(self.makeData(key), data)
rec = c.next()
if verbose:
print "%s: found %d records" % (name, count)
c.close()
if verbose:
print "%s: thread finished" % name
class BTreeConcurrentDataStore(ConcurrentDataStoreBase):
dbtype = db.DB_BTREE
writers = 2
readers = 10
records = 1000
class HashConcurrentDataStore(ConcurrentDataStoreBase):
dbtype = db.DB_HASH
writers = 2
readers = 10
records = 1000
#----------------------------------------------------------------------
class SimpleThreadedBase(BaseThreadedTestCase):
dbopenflags = db.DB_THREAD
envflags = db.DB_THREAD | db.DB_INIT_MPOOL | db.DB_INIT_LOCK
readers = 10
writers = 2
records = 1000
def setEnvOpts(self):
self.env.set_lk_detect(db.DB_LOCK_DEFAULT)
def test02_SimpleLocks(self):
if verbose:
print '\n', '-=' * 30
print "Running %s.test02_SimpleLocks..." % self.__class__.__name__
keys=range(self.records)
import random
random.shuffle(keys)
records_per_writer=self.records//self.writers
readers_per_writer=self.readers//self.writers
self.assertEqual(self.records,self.writers*records_per_writer)
self.assertEqual(self.readers,self.writers*readers_per_writer)
self.assertTrue((records_per_writer%readers_per_writer)==0)
readers = []
for x in xrange(self.readers):
rt = Thread(target = self.readerThread,
args = (self.d, x),
name = 'reader %d' % x,
)#verbose = verbose)
import sys
if sys.version_info[0] < 3 :
rt.setDaemon(True)
else :
rt.daemon = True
readers.append(rt)
writers = []
for x in xrange(self.writers):
a=keys[records_per_writer*x:records_per_writer*(x+1)]
a.sort() # Generate conflicts
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
wt = Thread(target = self.writerThread,
args = (self.d, a, b),
name = 'writer %d' % x,
)#verbose = verbose)
writers.append(wt)
for t in writers:
import sys
if sys.version_info[0] < 3 :
t.setDaemon(True)
else :
t.daemon = True
t.start()
for t in writers:
t.join()
for t in readers:
t.join()
def writerThread(self, d, keys, readers):
import sys
if sys.version_info[0] < 3 :
name = currentThread().getName()
else :
name = currentThread().name
if verbose:
print "%s: creating records %d - %d" % (name, start, stop)
count=len(keys)//len(readers)
count2=count
for x in keys :
key = '%04d' % x
dbutils.DeadlockWrap(d.put, key, self.makeData(key),
max_retries=12)
if verbose and x % 100 == 0:
print "%s: records %d - %d finished" % (name, start, x)
count2-=1
if not count2 :
readers.pop().start()
count2=count
if verbose:
print "%s: thread finished" % name
def readerThread(self, d, readerNum):
import sys
if sys.version_info[0] < 3 :
name = currentThread().getName()
else :
name = currentThread().name
c = d.cursor()
count = 0
rec = dbutils.DeadlockWrap(c.first, max_retries=10)
while rec:
count += 1
key, data = rec
self.assertEqual(self.makeData(key), data)
rec = dbutils.DeadlockWrap(c.next, max_retries=10)
if verbose:
print "%s: found %d records" % (name, count)
c.close()
if verbose:
print "%s: thread finished" % name
class BTreeSimpleThreaded(SimpleThreadedBase):
dbtype = db.DB_BTREE
class HashSimpleThreaded(SimpleThreadedBase):
dbtype = db.DB_HASH
#----------------------------------------------------------------------
class ThreadedTransactionsBase(BaseThreadedTestCase):
dbopenflags = db.DB_THREAD | db.DB_AUTO_COMMIT
envflags = (db.DB_THREAD |
db.DB_INIT_MPOOL |
db.DB_INIT_LOCK |
db.DB_INIT_LOG |
db.DB_INIT_TXN
)
readers = 0
writers = 0
records = 2000
txnFlag = 0
def setEnvOpts(self):
#self.env.set_lk_detect(db.DB_LOCK_DEFAULT)
pass
def test03_ThreadedTransactions(self):
if verbose:
print '\n', '-=' * 30
print "Running %s.test03_ThreadedTransactions..." % \
self.__class__.__name__
keys=range(self.records)
import random
random.shuffle(keys)
records_per_writer=self.records//self.writers
readers_per_writer=self.readers//self.writers
self.assertEqual(self.records,self.writers*records_per_writer)
self.assertEqual(self.readers,self.writers*readers_per_writer)
self.assertTrue((records_per_writer%readers_per_writer)==0)
readers=[]
for x in xrange(self.readers):
rt = Thread(target = self.readerThread,
args = (self.d, x),
name = 'reader %d' % x,
)#verbose = verbose)
import sys
if sys.version_info[0] < 3 :
rt.setDaemon(True)
else :
rt.daemon = True
readers.append(rt)
writers = []
for x in xrange(self.writers):
a=keys[records_per_writer*x:records_per_writer*(x+1)]
b=readers[readers_per_writer*x:readers_per_writer*(x+1)]
wt = Thread(target = self.writerThread,
args = (self.d, a, b),
name = 'writer %d' % x,
)#verbose = verbose)
writers.append(wt)
dt = Thread(target = self.deadlockThread)
import sys
if sys.version_info[0] < 3 :
dt.setDaemon(True)
else :
dt.daemon = True
dt.start()
for t in writers:
import sys
if sys.version_info[0] < 3 :
t.setDaemon(True)
else :
t.daemon = True
t.start()
for t in writers:
t.join()
for t in readers:
t.join()
self.doLockDetect = False
dt.join()
def writerThread(self, d, keys, readers):
import sys
if sys.version_info[0] < 3 :
name = currentThread().getName()
else :
name = currentThread().name
count=len(keys)//len(readers)
while len(keys):
try:
txn = self.env.txn_begin(None, self.txnFlag)
keys2=keys[:count]
for x in keys2 :
key = '%04d' % x
d.put(key, self.makeData(key), txn)
if verbose and x % 100 == 0:
print "%s: records %d - %d finished" % (name, start, x)
txn.commit()
keys=keys[count:]
readers.pop().start()
except (db.DBLockDeadlockError, db.DBLockNotGrantedError), val:
if verbose:
print "%s: Aborting transaction (%s)" % (name, val.args[1])
txn.abort()
if verbose:
print "%s: thread finished" % name
def readerThread(self, d, readerNum):
import sys
if sys.version_info[0] < 3 :
name = currentThread().getName()
else :
name = currentThread().name
finished = False
while not finished:
try:
txn = self.env.txn_begin(None, self.txnFlag)
c = d.cursor(txn)
count = 0
rec = c.first()
while rec:
count += 1
key, data = rec
self.assertEqual(self.makeData(key), data)
rec = c.next()
if verbose: print "%s: found %d records" % (name, count)
c.close()
txn.commit()
finished = True
except (db.DBLockDeadlockError, db.DBLockNotGrantedError), val:
if verbose:
print "%s: Aborting transaction (%s)" % (name, val.args[1])
c.close()
txn.abort()
if verbose:
print "%s: thread finished" % name
def deadlockThread(self):
self.doLockDetect = True
while self.doLockDetect:
time.sleep(0.05)
try:
aborted = self.env.lock_detect(
db.DB_LOCK_RANDOM, db.DB_LOCK_CONFLICT)
if verbose and aborted:
print "deadlock: Aborted %d deadlocked transaction(s)" \
% aborted
except db.DBError:
pass
class BTreeThreadedTransactions(ThreadedTransactionsBase):
dbtype = db.DB_BTREE
writers = 2
readers = 10
records = 1000
class HashThreadedTransactions(ThreadedTransactionsBase):
dbtype = db.DB_HASH
writers = 2
readers = 10
records = 1000
class BTreeThreadedNoWaitTransactions(ThreadedTransactionsBase):
dbtype = db.DB_BTREE
writers = 2
readers = 10
records = 1000
txnFlag = db.DB_TXN_NOWAIT
class HashThreadedNoWaitTransactions(ThreadedTransactionsBase):
dbtype = db.DB_HASH
writers = 2
readers = 10
records = 1000
txnFlag = db.DB_TXN_NOWAIT
#----------------------------------------------------------------------
def test_suite():
suite = unittest.TestSuite()
if have_threads:
suite.addTest(unittest.makeSuite(BTreeConcurrentDataStore))
suite.addTest(unittest.makeSuite(HashConcurrentDataStore))
suite.addTest(unittest.makeSuite(BTreeSimpleThreaded))
suite.addTest(unittest.makeSuite(HashSimpleThreaded))
suite.addTest(unittest.makeSuite(BTreeThreadedTransactions))
suite.addTest(unittest.makeSuite(HashThreadedTransactions))
suite.addTest(unittest.makeSuite(BTreeThreadedNoWaitTransactions))
suite.addTest(unittest.makeSuite(HashThreadedNoWaitTransactions))
else:
print "Threads not available, skipping thread tests."
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')<|fim▁end|> | if sys.version_info[0] < 3 :
|
<|file_name|>Opcode8030Handler.cpp<|end_file_name|><|fim▁begin|>#include "../../VM/Handler/Opcode8030Handler.h"
#include "../../VM/Script.h"
namespace Falltergeist
{
namespace VM
{
namespace Handler
{
Opcode8030::Opcode8030(VM::Script *script, std::shared_ptr<ILogger> logger) : OpcodeHandler(script)
{
this->logger = std::move(logger);
}
void Opcode8030::_run()
{
logger->debug() << "[8030] [*] op_while(address, condition)" << std::endl;
auto condition = _script->dataStack()->popLogical();
if (!condition) {<|fim▁hole|> }
}<|fim▁end|> | _script->setProgramCounter(_script->dataStack()->popInteger());
}
}
} |
<|file_name|>links.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'Patrick Michl'
__email__ = '[email protected]'
__license__ = 'GPLv3'
import nemoa
import numpy
class Links:
"""Class to unify common ann link attributes."""
params = {}
def __init__(self): pass
@staticmethod
def energy(dSrc, dTgt, src, tgt, links, calc = 'mean'):
"""Return link energy as numpy array."""
if src['class'] == 'gauss':
M = - links['A'] * links['W'] \
/ numpy.sqrt(numpy.exp(src['lvar'])).T<|fim▁hole|> else: raise ValueError('unsupported unit class')
return numpy.einsum('ij,ik,jk->ijk', dSrc, dTgt, M)
@staticmethod
def get_updates(data, model):
"""Return weight updates of a link layer."""
D = numpy.dot(data[0].T, data[1]) / float(data[1].size)
M = numpy.dot(model[0].T, model[1]) / float(data[1].size)
return { 'W': D - M }
@staticmethod
def get_updates_delta(data, delta):
return { 'W': -numpy.dot(data.T, delta) / float(data.size) }<|fim▁end|> | elif src['class'] == 'sigmoid':
M = - links['A'] * links['W'] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>MODULE_DESCRIPTION = "Web pages"<|fim▁end|> | |
<|file_name|>timeline_demo.rs<|end_file_name|><|fim▁begin|>use nannou::prelude::*;
use nannou::ui::prelude::*;
use nannou_timeline as timeline;
use pitch_calc as pitch;
use std::iter::once;
use time_calc as time;
use timeline::track::automation::{BangValue as Bang, Envelope, Point, ToggleValue as Toggle};
use timeline::track::piano_roll;
use timeline::{bars, track};
const BPM: time::calc::Bpm = 140.0;
const ONE_SECOND_MS: time::calc::Ms = 1_000.0;
const PPQN: time::Ppqn = 9600;
const WIDTH: u32 = 800;
const HEIGHT: u32 = 600;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
_window: window::Id,
ui: Ui,
ids: Ids,
timeline_data: TimelineData,
playing: bool,
}
// Implement the Serialize and Deserialize traits only if the serde feature is enabled.
#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
struct TimelineData {
playhead_ticks: time::Ticks,
bars: Vec<time::TimeSig>,
notes: Vec<piano_roll::Note>,
tempo_envelope: track::automation::numeric::Envelope<f32>,
octave_envelope: track::automation::numeric::Envelope<i32>,
toggle_envelope: track::automation::toggle::Envelope,
bang_envelope: track::automation::bang::Envelope,
}
// Create all of our unique `WidgetId`s with the `widget_ids!` macro.
widget_ids! {
struct Ids {
window,
ruler,
timeline,
}
}
fn model(app: &App) -> Model {
let _window = app
.new_window()
.key_pressed(key_pressed)
.size(WIDTH, HEIGHT)
.title("Timeline Demo")
.view(view)
.build()
.unwrap();
// Create the UI.
let mut ui = app.new_ui().build().unwrap();
let ids = Ids::new(ui.widget_id_generator());
// Start the playhead at the beginning.
let playhead_ticks = time::Ticks::from(0);
// A sequence of bars with varying time signatures.
let bars = vec![
time::TimeSig { top: 4, bottom: 4 },
time::TimeSig { top: 4, bottom: 4 },
time::TimeSig { top: 6, bottom: 8 },
time::TimeSig { top: 6, bottom: 8 },
time::TimeSig { top: 4, bottom: 4 },
time::TimeSig { top: 4, bottom: 4 },
time::TimeSig { top: 7, bottom: 8 },
time::TimeSig { top: 7, bottom: 8 },
];
let notes = bars::WithStarts::new(bars.iter().cloned(), PPQN)
.enumerate()
.map(|(i, (time_sig, start))| {
let end = start + time_sig.ticks_per_bar(PPQN);
let period = timeline::Period { start, end };
let pitch = pitch::Step((24 + (i * 5) % 12) as f32).to_letter_octave();
piano_roll::Note { period, pitch }
})
.collect();
let tempo_envelope = {
let start = Point {
ticks: time::Ticks(0),
value: 20.0,
};
let points = bars::Periods::new(bars.iter().cloned(), PPQN)
.enumerate()
.map(|(i, period)| Point {
ticks: period.end,
value: 20.0 + (i + 1) as f32 * 60.0 % 220.0,
});
Envelope::from_points(once(start).chain(points), 20.0, 240.0)
};
let octave_envelope = {
let start = Point {
ticks: time::Ticks(0),
value: 0,
};
let points = bars::WithStarts::new(bars.iter().cloned(), PPQN)
.enumerate()
.flat_map(|(i, (ts, mut start))| {
let bar_end = start + ts.ticks_per_bar(PPQN);
let mut j = 0;
std::iter::from_fn(move || {
if start >= bar_end {
return None;
}
let end = start + time::Ticks(PPQN as _);
let end = if end > bar_end { bar_end } else { end };
let point = Point {
ticks: end,
value: 1 + ((i as i32 + j as i32) * 3) % 12,
};
start = end;
j += 1;
Some(point)
})
});
Envelope::from_points(once(start).chain(points), 0, 12)
};
let toggle_envelope = {
let start = Point {
ticks: time::Ticks(0),
value: Toggle(random()),
};
let points = bars::Periods::new(bars.iter().cloned(), PPQN).map(|period| Point {
ticks: period.end,
value: Toggle(random()),
});
Envelope::from_points(once(start).chain(points), Toggle(false), Toggle(true))
};
let bang_envelope = {
let points = bars::Periods::new(bars.iter().cloned(), PPQN).map(|period| Point {
ticks: period.start,
value: Bang,
});
Envelope::from_points(points, Bang, Bang)
};
let timeline_data = TimelineData {
playhead_ticks,
bars,
notes,
tempo_envelope,
octave_envelope,
toggle_envelope,
bang_envelope,
};
Model {
_window,
ui,
ids,
timeline_data,
playing: false,
}
}
fn update(_app: &App, model: &mut Model, update: Update) {
let Model {
ids,
ui,
timeline_data,
playing,
..
} = model;
// Update the user interface.
set_widgets(&mut ui.set_widgets(), ids, timeline_data);
// Get the current bpm from the tempo_envelope automation track.
use timeline::track::automation::EnvelopeTrait; // needed to use the .y(Ticks) method on the envelope
let tempo_value = timeline_data.tempo_envelope.y(timeline_data.playhead_ticks);
let current_bpm = tempo_value.unwrap_or(BPM as f32) as f64;
// Update the playhead.
let delta_secs = if *playing {
update.since_last.secs()
} else {
0.0
};
let delta_ticks = time::Ms(delta_secs * ONE_SECOND_MS).to_ticks(current_bpm, PPQN);
let total_duration_ticks =
timeline::bars_duration_ticks(timeline_data.bars.iter().cloned(), PPQN);
let previous_playhead_ticks = timeline_data.playhead_ticks.clone();
timeline_data.playhead_ticks =
(timeline_data.playhead_ticks + delta_ticks) % total_duration_ticks;
// Check if a bang in the bang_envelope has banged.
for bang_point in timeline_data.bang_envelope.points() {
if bang_point.ticks > previous_playhead_ticks
&& bang_point.ticks <= timeline_data.playhead_ticks
{
println!("BANG!");
}
}
// Check if a note is playing
for note in &timeline_data.notes {
if timeline_data.playhead_ticks >= note.period.start
&& timeline_data.playhead_ticks < note.period.end
{
println!("Note playing: {:?}", note.pitch);
}
}
}
fn view(app: &App, model: &Model, frame: Frame) {
model.ui.draw_to_frame(app, &frame).unwrap();
}
// Update / draw the Ui.
fn set_widgets(ui: &mut UiCell, ids: &Ids, data: &mut TimelineData) {
use timeline::Timeline;
// Main window canvas.
widget::Canvas::new()
.border(0.0)
.color(ui::color::DARK_CHARCOAL.alpha(0.5))
.set(ids.window, ui);
let TimelineData {
playhead_ticks,
bars,
notes,
tempo_envelope,
octave_envelope,
toggle_envelope,
bang_envelope,
} = data;
let ticks = playhead_ticks.clone();
let color = ui::color::LIGHT_BLUE;
////////////////////
///// TIMELINE /////
////////////////////
//
// Set the `Timeline` widget.
//
// This returns a context on which we can begin setting our tracks, playhead and scrollbar.
//
// The context is used in three stages:
//
// 1. `PinnedTracks` for setting tracks that should be pinned to the top of the timeline.
// 2. `Tracks` for setting regular tracks.
// 3. `Final` for setting the `Playhead` and `Scrollbar` widgets after all tracks are set.
let context = Timeline::new(bars.iter().cloned(), PPQN)
.playhead(ticks)
.color(color)
.wh_of(ids.window)
.middle_of(ids.window)
.border(1.0)
.border_color(ui::color::CHARCOAL)
.set(ids.timeline, ui);
/////////////////////////
///// PINNED TRACKS /////
/////////////////////////
//
// Pin the ruler track to the top of the timeline.
//
// All pinned tracks must be `set` prior to non-pinned tracks.
{
let ruler = track::Ruler::new(context.ruler, &context.bars, PPQN).color(color);
let track = context.set_next_pinned_track(ruler, ui);
for triggered in track.event {
*playhead_ticks = triggered.ticks;
}
}
//////////////////
///// TRACKS /////
//////////////////
// Now that we've finished setting the pinned tracks, move on to the `Tracks` context.
let context = context.start_tracks(ui);
{<|fim▁hole|> for event in track.event {
use timeline::track::piano_roll::Event;
match event {
Event::NoteOn(_note_idx) => (),
Event::NoteOff(_note_idx) => (),
Event::NotePlayed(_note_idx) => (),
}
}
// A macro for common logic between tempo and octave "numeric" envelopes.
macro_rules! numeric_automation {
($envelope:expr) => {
let track = {
let automation =
track::automation::Numeric::new(&context.bars, PPQN, $envelope)
.color(color);
context.set_next_track(automation, ui)
};
for event in track.event {
use timeline::track::automation::numeric::Event;
match event {
Event::Interpolate(number) => println!("{}", number),
Event::Mutate(mutate) => mutate.apply($envelope),
}
}
};
}
// Tempo automation.
numeric_automation!(tempo_envelope);
// Octave automation.
numeric_automation!(octave_envelope);
// Toggle automation.
let track = {
let automation =
track::automation::Toggle::new(&context.bars, PPQN, toggle_envelope).color(color);
context.set_next_track(automation, ui)
};
for event in track.event {
use timeline::track::automation::toggle::Event;
match event {
Event::Interpolate(_toggle) => (),
Event::SwitchTo(_toggle) => (),
Event::Mutate(mutate) => mutate.apply(toggle_envelope),
}
}
// Bang automation.
let track = {
let automation =
track::automation::Bang::new(&context.bars, PPQN, bang_envelope).color(color);
context.set_next_track(automation, ui)
};
for event in track.event {
use timeline::track::automation::bang::Event;
match event {
Event::Mutate(mutate) => mutate.apply(bang_envelope),
_ => (),
}
}
}
////////////////////////////////
///// PLAYHEAD & SCROLLBAR /////
////////////////////////////////
// Now that all tracks have been set, finish up and set the `Playhead` and `Scrollbar`.
let context = context.end_tracks();
// Set the playhead after all tracks have been set.
for event in context.set_playhead(ui) {
use timeline::playhead::Event;
match event {
Event::Pressed => println!("Playhead pressed!"),
Event::DraggedTo(ticks) => *playhead_ticks = ticks,
Event::Released => println!("Playhead released!"),
}
}
// Set the scrollbar if it is visible.
context.set_scrollbar(ui);
}
fn key_pressed(_app: &App, model: &mut Model, key: Key) {
match key {
// Toggle play when space is pressed.
Key::Space => {
model.playing = !model.playing;
}
Key::R => {
let bars = model.timeline_data.bars.clone();
model.timeline_data.notes = bars::WithStarts::new(bars.iter().cloned(), PPQN)
.enumerate()
.map(|(i, (time_sig, start))| {
let end = start + time_sig.ticks_per_bar(PPQN);
let period = timeline::Period { start, end };
let pitch = pitch::Step((24 + (i * (random::<usize>() % 11)) % 12) as f32)
.to_letter_octave();
piano_roll::Note { period, pitch }
})
.collect();
}
Key::S => {
// Save model.timeline_data to a JSON file.
// This part is only included if you compile with the serde feature enabled.
#[cfg(feature = "serde1")]
{
nannou::io::save_to_json("./saved_timeline_data.json", &model.timeline_data)
.expect("Error saving file");
}
}
Key::L => {
// Load the model.timeline_data from a JSON file.
// This part is only included if you compile with the serde feature enabled.
#[cfg(feature = "serde1")]
{
if let Ok(new_data) = nannou::io::load_from_json("./saved_timeline_data.json") {
model.timeline_data = new_data;
}
}
}
_ => {}
}
}<|fim▁end|> | // Piano roll.
let piano_roll = track::PianoRoll::new(&context.bars, PPQN, ¬es[..]).color(color);
let track = context.set_next_track(piano_roll, ui); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2008 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webdav.acp.Acl import ACL
from webdav.acp.Ace import ACE
from webdav.acp.GrantDeny import GrantDeny
from webdav.acp.Privilege import Privilege
from webdav.acp.Principal import Principal
__version__ = "$LastChangedRevision: 2 $"<|fim▁end|> | #
# Unless required by applicable law or agreed to in writing, software |
<|file_name|>ASTContext.cpp<|end_file_name|><|fim▁begin|>//===- ASTContext.cpp - Context to hold long-lived AST nodes --------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file implements the ASTContext interface.
//
//===----------------------------------------------------------------------===//
#include "clang/AST/ASTContext.h"
#include "CXXABI.h"
#include "Interp/Context.h"
#include "clang/AST/APValue.h"
#include "clang/AST/ASTConcept.h"
#include "clang/AST/ASTMutationListener.h"
#include "clang/AST/ASTTypeTraits.h"
#include "clang/AST/Attr.h"
#include "clang/AST/AttrIterator.h"
#include "clang/AST/CharUnits.h"
#include "clang/AST/Comment.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclBase.h"
#include "clang/AST/DeclCXX.h"
#include "clang/AST/DeclContextInternals.h"
#include "clang/AST/DeclObjC.h"
#include "clang/AST/DeclOpenMP.h"
#include "clang/AST/DeclTemplate.h"
#include "clang/AST/DeclarationName.h"
#include "clang/AST/Expr.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/ExternalASTSource.h"
#include "clang/AST/Mangle.h"
#include "clang/AST/MangleNumberingContext.h"
#include "clang/AST/NestedNameSpecifier.h"
#include "clang/AST/RawCommentList.h"
#include "clang/AST/RecordLayout.h"
#include "clang/AST/RecursiveASTVisitor.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/TemplateBase.h"
#include "clang/AST/TemplateName.h"
#include "clang/AST/Type.h"
#include "clang/AST/TypeLoc.h"
#include "clang/AST/UnresolvedSet.h"
#include "clang/AST/VTableBuilder.h"
#include "clang/Basic/AddressSpaces.h"
#include "clang/Basic/Builtins.h"
#include "clang/Basic/CommentOptions.h"
#include "clang/Basic/ExceptionSpecificationType.h"
#include "clang/Basic/FixedPoint.h"
#include "clang/Basic/IdentifierTable.h"
#include "clang/Basic/LLVM.h"
#include "clang/Basic/LangOptions.h"
#include "clang/Basic/Linkage.h"
#include "clang/Basic/ObjCRuntime.h"
#include "clang/Basic/SanitizerBlacklist.h"
#include "clang/Basic/SourceLocation.h"
#include "clang/Basic/SourceManager.h"
#include "clang/Basic/Specifiers.h"
#include "clang/Basic/TargetCXXABI.h"
#include "clang/Basic/TargetInfo.h"
#include "clang/Basic/XRayLists.h"
#include "llvm/ADT/APInt.h"
#include "llvm/ADT/APSInt.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/DenseSet.h"
#include "llvm/ADT/FoldingSet.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/Optional.h"
#include "llvm/ADT/PointerUnion.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/StringExtras.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/ADT/Triple.h"
#include "llvm/Support/Capacity.h"
#include "llvm/Support/Casting.h"
#include "llvm/Support/Compiler.h"
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/raw_ostream.h"
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <cstdint>
#include <cstdlib>
#include <map>
#include <memory>
#include <string>
#include <tuple>
#include <utility>
using namespace clang;
enum FloatingRank {
Float16Rank, HalfRank, FloatRank, DoubleRank, LongDoubleRank, Float128Rank
};
const Expr *ASTContext::traverseIgnored(const Expr *E) const {
return traverseIgnored(const_cast<Expr *>(E));
}
Expr *ASTContext::traverseIgnored(Expr *E) const {
if (!E)
return nullptr;
switch (Traversal) {
case ast_type_traits::TK_AsIs:
return E;
case ast_type_traits::TK_IgnoreImplicitCastsAndParentheses:
return E->IgnoreParenImpCasts();
case ast_type_traits::TK_IgnoreUnlessSpelledInSource:
return E->IgnoreUnlessSpelledInSource();
}
llvm_unreachable("Invalid Traversal type!");
}
ast_type_traits::DynTypedNode
ASTContext::traverseIgnored(const ast_type_traits::DynTypedNode &N) const {
if (const auto *E = N.get<Expr>()) {
return ast_type_traits::DynTypedNode::create(*traverseIgnored(E));
}
return N;
}
/// \returns location that is relevant when searching for Doc comments related
/// to \p D.
static SourceLocation getDeclLocForCommentSearch(const Decl *D,
SourceManager &SourceMgr) {
assert(D);
// User can not attach documentation to implicit declarations.
if (D->isImplicit())
return {};
// User can not attach documentation to implicit instantiations.
if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
if (FD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation)
return {};
}
if (const auto *VD = dyn_cast<VarDecl>(D)) {
if (VD->isStaticDataMember() &&
VD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation)
return {};
}
if (const auto *CRD = dyn_cast<CXXRecordDecl>(D)) {
if (CRD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation)
return {};
}
if (const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(D)) {
TemplateSpecializationKind TSK = CTSD->getSpecializationKind();
if (TSK == TSK_ImplicitInstantiation ||
TSK == TSK_Undeclared)
return {};
}
if (const auto *ED = dyn_cast<EnumDecl>(D)) {
if (ED->getTemplateSpecializationKind() == TSK_ImplicitInstantiation)
return {};
}
if (const auto *TD = dyn_cast<TagDecl>(D)) {
// When tag declaration (but not definition!) is part of the
// decl-specifier-seq of some other declaration, it doesn't get comment
if (TD->isEmbeddedInDeclarator() && !TD->isCompleteDefinition())
return {};
}
// TODO: handle comments for function parameters properly.
if (isa<ParmVarDecl>(D))
return {};
// TODO: we could look up template parameter documentation in the template
// documentation.
if (isa<TemplateTypeParmDecl>(D) ||
isa<NonTypeTemplateParmDecl>(D) ||
isa<TemplateTemplateParmDecl>(D))
return {};
// Find declaration location.
// For Objective-C declarations we generally don't expect to have multiple
// declarators, thus use declaration starting location as the "declaration
// location".
// For all other declarations multiple declarators are used quite frequently,
// so we use the location of the identifier as the "declaration location".
if (isa<ObjCMethodDecl>(D) || isa<ObjCContainerDecl>(D) ||
isa<ObjCPropertyDecl>(D) ||
isa<RedeclarableTemplateDecl>(D) ||
isa<ClassTemplateSpecializationDecl>(D) ||
// Allow association with Y across {} in `typedef struct X {} Y`.
isa<TypedefDecl>(D))
return D->getBeginLoc();
else {
const SourceLocation DeclLoc = D->getLocation();
if (DeclLoc.isMacroID()) {
if (isa<TypedefDecl>(D)) {
// If location of the typedef name is in a macro, it is because being
// declared via a macro. Try using declaration's starting location as
// the "declaration location".
return D->getBeginLoc();
} else if (const auto *TD = dyn_cast<TagDecl>(D)) {
// If location of the tag decl is inside a macro, but the spelling of
// the tag name comes from a macro argument, it looks like a special
// macro like NS_ENUM is being used to define the tag decl. In that
// case, adjust the source location to the expansion loc so that we can
// attach the comment to the tag decl.
if (SourceMgr.isMacroArgExpansion(DeclLoc) &&
TD->isCompleteDefinition())
return SourceMgr.getExpansionLoc(DeclLoc);
}
}
return DeclLoc;
}
return {};
}
RawComment *ASTContext::getRawCommentForDeclNoCacheImpl(
const Decl *D, const SourceLocation RepresentativeLocForDecl,
const std::map<unsigned, RawComment *> &CommentsInTheFile) const {
// If the declaration doesn't map directly to a location in a file, we
// can't find the comment.
if (RepresentativeLocForDecl.isInvalid() ||
!RepresentativeLocForDecl.isFileID())
return nullptr;
// If there are no comments anywhere, we won't find anything.
if (CommentsInTheFile.empty())
return nullptr;
// Decompose the location for the declaration and find the beginning of the
// file buffer.
const std::pair<FileID, unsigned> DeclLocDecomp =
SourceMgr.getDecomposedLoc(RepresentativeLocForDecl);
// Slow path.
auto OffsetCommentBehindDecl =
CommentsInTheFile.lower_bound(DeclLocDecomp.second);
// First check whether we have a trailing comment.
if (OffsetCommentBehindDecl != CommentsInTheFile.end()) {
RawComment *CommentBehindDecl = OffsetCommentBehindDecl->second;
if ((CommentBehindDecl->isDocumentation() ||
LangOpts.CommentOpts.ParseAllComments) &&
CommentBehindDecl->isTrailingComment() &&
(isa<FieldDecl>(D) || isa<EnumConstantDecl>(D) || isa<VarDecl>(D) ||
isa<ObjCMethodDecl>(D) || isa<ObjCPropertyDecl>(D))) {
// Check that Doxygen trailing comment comes after the declaration, starts
// on the same line and in the same file as the declaration.
if (SourceMgr.getLineNumber(DeclLocDecomp.first, DeclLocDecomp.second) ==
Comments.getCommentBeginLine(CommentBehindDecl, DeclLocDecomp.first,
OffsetCommentBehindDecl->first)) {
return CommentBehindDecl;
}
}
}
// The comment just after the declaration was not a trailing comment.
// Let's look at the previous comment.
if (OffsetCommentBehindDecl == CommentsInTheFile.begin())
return nullptr;
auto OffsetCommentBeforeDecl = --OffsetCommentBehindDecl;
RawComment *CommentBeforeDecl = OffsetCommentBeforeDecl->second;
// Check that we actually have a non-member Doxygen comment.
if (!(CommentBeforeDecl->isDocumentation() ||
LangOpts.CommentOpts.ParseAllComments) ||
CommentBeforeDecl->isTrailingComment())
return nullptr;
// Decompose the end of the comment.
const unsigned CommentEndOffset =
Comments.getCommentEndOffset(CommentBeforeDecl);
// Get the corresponding buffer.
bool Invalid = false;
const char *Buffer = SourceMgr.getBufferData(DeclLocDecomp.first,
&Invalid).data();
if (Invalid)
return nullptr;
// Extract text between the comment and declaration.
StringRef Text(Buffer + CommentEndOffset,
DeclLocDecomp.second - CommentEndOffset);
// There should be no other declarations or preprocessor directives between
// comment and declaration.
if (Text.find_first_of(";{}#@") != StringRef::npos)
return nullptr;
return CommentBeforeDecl;
}
RawComment *ASTContext::getRawCommentForDeclNoCache(const Decl *D) const {
const SourceLocation DeclLoc = getDeclLocForCommentSearch(D, SourceMgr);
// If the declaration doesn't map directly to a location in a file, we
// can't find the comment.
if (DeclLoc.isInvalid() || !DeclLoc.isFileID())
return nullptr;
if (ExternalSource && !CommentsLoaded) {
ExternalSource->ReadComments();
CommentsLoaded = true;
}
if (Comments.empty())
return nullptr;
const FileID File = SourceMgr.getDecomposedLoc(DeclLoc).first;
const auto CommentsInThisFile = Comments.getCommentsInFile(File);
if (!CommentsInThisFile || CommentsInThisFile->empty())
return nullptr;
return getRawCommentForDeclNoCacheImpl(D, DeclLoc, *CommentsInThisFile);
}
/// If we have a 'templated' declaration for a template, adjust 'D' to
/// refer to the actual template.
/// If we have an implicit instantiation, adjust 'D' to refer to template.
static const Decl &adjustDeclToTemplate(const Decl &D) {
if (const auto *FD = dyn_cast<FunctionDecl>(&D)) {
// Is this function declaration part of a function template?
if (const FunctionTemplateDecl *FTD = FD->getDescribedFunctionTemplate())
return *FTD;
// Nothing to do if function is not an implicit instantiation.
if (FD->getTemplateSpecializationKind() != TSK_ImplicitInstantiation)
return D;
// Function is an implicit instantiation of a function template?
if (const FunctionTemplateDecl *FTD = FD->getPrimaryTemplate())
return *FTD;
// Function is instantiated from a member definition of a class template?
if (const FunctionDecl *MemberDecl =
FD->getInstantiatedFromMemberFunction())
return *MemberDecl;
return D;
}
if (const auto *VD = dyn_cast<VarDecl>(&D)) {
// Static data member is instantiated from a member definition of a class
// template?
if (VD->isStaticDataMember())
if (const VarDecl *MemberDecl = VD->getInstantiatedFromStaticDataMember())
return *MemberDecl;
return D;
}
if (const auto *CRD = dyn_cast<CXXRecordDecl>(&D)) {
// Is this class declaration part of a class template?
if (const ClassTemplateDecl *CTD = CRD->getDescribedClassTemplate())
return *CTD;
// Class is an implicit instantiation of a class template or partial
// specialization?
if (const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(CRD)) {
if (CTSD->getSpecializationKind() != TSK_ImplicitInstantiation)
return D;
llvm::PointerUnion<ClassTemplateDecl *,
ClassTemplatePartialSpecializationDecl *>
PU = CTSD->getSpecializedTemplateOrPartial();
return PU.is<ClassTemplateDecl *>()
? *static_cast<const Decl *>(PU.get<ClassTemplateDecl *>())
: *static_cast<const Decl *>(
PU.get<ClassTemplatePartialSpecializationDecl *>());
}
// Class is instantiated from a member definition of a class template?
if (const MemberSpecializationInfo *Info =
CRD->getMemberSpecializationInfo())
return *Info->getInstantiatedFrom();
return D;
}
if (const auto *ED = dyn_cast<EnumDecl>(&D)) {
// Enum is instantiated from a member definition of a class template?
if (const EnumDecl *MemberDecl = ED->getInstantiatedFromMemberEnum())
return *MemberDecl;
return D;
}
// FIXME: Adjust alias templates?
return D;
}
const RawComment *ASTContext::getRawCommentForAnyRedecl(
const Decl *D,
const Decl **OriginalDecl) const {
if (!D) {
if (OriginalDecl)
OriginalDecl = nullptr;
return nullptr;
}
D = &adjustDeclToTemplate(*D);
// Any comment directly attached to D?
{
auto DeclComment = DeclRawComments.find(D);
if (DeclComment != DeclRawComments.end()) {
if (OriginalDecl)
*OriginalDecl = D;
return DeclComment->second;
}
}
// Any comment attached to any redeclaration of D?
const Decl *CanonicalD = D->getCanonicalDecl();
if (!CanonicalD)
return nullptr;
{
auto RedeclComment = RedeclChainComments.find(CanonicalD);
if (RedeclComment != RedeclChainComments.end()) {
if (OriginalDecl)
*OriginalDecl = RedeclComment->second;
auto CommentAtRedecl = DeclRawComments.find(RedeclComment->second);
assert(CommentAtRedecl != DeclRawComments.end() &&
"This decl is supposed to have comment attached.");
return CommentAtRedecl->second;
}
}
// Any redeclarations of D that we haven't checked for comments yet?
// We can't use DenseMap::iterator directly since it'd get invalid.
auto LastCheckedRedecl = [this, CanonicalD]() -> const Decl * {
auto LookupRes = CommentlessRedeclChains.find(CanonicalD);
if (LookupRes != CommentlessRedeclChains.end())
return LookupRes->second;
return nullptr;
}();
for (const auto Redecl : D->redecls()) {
assert(Redecl);
// Skip all redeclarations that have been checked previously.
if (LastCheckedRedecl) {
if (LastCheckedRedecl == Redecl) {
LastCheckedRedecl = nullptr;
}
continue;
}
const RawComment *RedeclComment = getRawCommentForDeclNoCache(Redecl);
if (RedeclComment) {
cacheRawCommentForDecl(*Redecl, *RedeclComment);
if (OriginalDecl)
*OriginalDecl = Redecl;
return RedeclComment;
}
CommentlessRedeclChains[CanonicalD] = Redecl;
}
if (OriginalDecl)
*OriginalDecl = nullptr;
return nullptr;
}
void ASTContext::cacheRawCommentForDecl(const Decl &OriginalD,
const RawComment &Comment) const {
assert(Comment.isDocumentation() || LangOpts.CommentOpts.ParseAllComments);
DeclRawComments.try_emplace(&OriginalD, &Comment);
const Decl *const CanonicalDecl = OriginalD.getCanonicalDecl();
RedeclChainComments.try_emplace(CanonicalDecl, &OriginalD);
CommentlessRedeclChains.erase(CanonicalDecl);
}
static void addRedeclaredMethods(const ObjCMethodDecl *ObjCMethod,
SmallVectorImpl<const NamedDecl *> &Redeclared) {
const DeclContext *DC = ObjCMethod->getDeclContext();
if (const auto *IMD = dyn_cast<ObjCImplDecl>(DC)) {
const ObjCInterfaceDecl *ID = IMD->getClassInterface();
if (!ID)
return;
// Add redeclared method here.
for (const auto *Ext : ID->known_extensions()) {
if (ObjCMethodDecl *RedeclaredMethod =
Ext->getMethod(ObjCMethod->getSelector(),
ObjCMethod->isInstanceMethod()))
Redeclared.push_back(RedeclaredMethod);
}
}
}
void ASTContext::attachCommentsToJustParsedDecls(ArrayRef<Decl *> Decls,
const Preprocessor *PP) {
if (Comments.empty() || Decls.empty())
return;
// See if there are any new comments that are not attached to a decl.
// The location doesn't have to be precise - we care only about the file.
const FileID File =
SourceMgr.getDecomposedLoc((*Decls.begin())->getLocation()).first;
auto CommentsInThisFile = Comments.getCommentsInFile(File);
if (!CommentsInThisFile || CommentsInThisFile->empty() ||
CommentsInThisFile->rbegin()->second->isAttached())
return;
// There is at least one comment not attached to a decl.
// Maybe it should be attached to one of Decls?
//
// Note that this way we pick up not only comments that precede the
// declaration, but also comments that *follow* the declaration -- thanks to
// the lookahead in the lexer: we've consumed the semicolon and looked
// ahead through comments.
for (const Decl *D : Decls) {
assert(D);
if (D->isInvalidDecl())
continue;
D = &adjustDeclToTemplate(*D);
const SourceLocation DeclLoc = getDeclLocForCommentSearch(D, SourceMgr);
if (DeclLoc.isInvalid() || !DeclLoc.isFileID())
continue;
if (DeclRawComments.count(D) > 0)
continue;
if (RawComment *const DocComment =
getRawCommentForDeclNoCacheImpl(D, DeclLoc, *CommentsInThisFile)) {
cacheRawCommentForDecl(*D, *DocComment);
comments::FullComment *FC = DocComment->parse(*this, PP, D);
ParsedComments[D->getCanonicalDecl()] = FC;
}
}
}
comments::FullComment *ASTContext::cloneFullComment(comments::FullComment *FC,
const Decl *D) const {
auto *ThisDeclInfo = new (*this) comments::DeclInfo;
ThisDeclInfo->CommentDecl = D;
ThisDeclInfo->IsFilled = false;
ThisDeclInfo->fill();
ThisDeclInfo->CommentDecl = FC->getDecl();
if (!ThisDeclInfo->TemplateParameters)
ThisDeclInfo->TemplateParameters = FC->getDeclInfo()->TemplateParameters;
comments::FullComment *CFC =
new (*this) comments::FullComment(FC->getBlocks(),
ThisDeclInfo);
return CFC;
}
comments::FullComment *ASTContext::getLocalCommentForDeclUncached(const Decl *D) const {
const RawComment *RC = getRawCommentForDeclNoCache(D);
return RC ? RC->parse(*this, nullptr, D) : nullptr;
}
comments::FullComment *ASTContext::getCommentForDecl(
const Decl *D,
const Preprocessor *PP) const {
if (!D || D->isInvalidDecl())
return nullptr;
D = &adjustDeclToTemplate(*D);
const Decl *Canonical = D->getCanonicalDecl();
llvm::DenseMap<const Decl *, comments::FullComment *>::iterator Pos =
ParsedComments.find(Canonical);
if (Pos != ParsedComments.end()) {
if (Canonical != D) {
comments::FullComment *FC = Pos->second;
comments::FullComment *CFC = cloneFullComment(FC, D);
return CFC;
}
return Pos->second;
}
const Decl *OriginalDecl = nullptr;
const RawComment *RC = getRawCommentForAnyRedecl(D, &OriginalDecl);
if (!RC) {
if (isa<ObjCMethodDecl>(D) || isa<FunctionDecl>(D)) {
SmallVector<const NamedDecl*, 8> Overridden;
const auto *OMD = dyn_cast<ObjCMethodDecl>(D);
if (OMD && OMD->isPropertyAccessor())
if (const ObjCPropertyDecl *PDecl = OMD->findPropertyDecl())
if (comments::FullComment *FC = getCommentForDecl(PDecl, PP))
return cloneFullComment(FC, D);
if (OMD)
addRedeclaredMethods(OMD, Overridden);
getOverriddenMethods(dyn_cast<NamedDecl>(D), Overridden);
for (unsigned i = 0, e = Overridden.size(); i < e; i++)
if (comments::FullComment *FC = getCommentForDecl(Overridden[i], PP))
return cloneFullComment(FC, D);
}
else if (const auto *TD = dyn_cast<TypedefNameDecl>(D)) {
// Attach any tag type's documentation to its typedef if latter
// does not have one of its own.
QualType QT = TD->getUnderlyingType();
if (const auto *TT = QT->getAs<TagType>())
if (const Decl *TD = TT->getDecl())
if (comments::FullComment *FC = getCommentForDecl(TD, PP))
return cloneFullComment(FC, D);
}
else if (const auto *IC = dyn_cast<ObjCInterfaceDecl>(D)) {
while (IC->getSuperClass()) {
IC = IC->getSuperClass();
if (comments::FullComment *FC = getCommentForDecl(IC, PP))
return cloneFullComment(FC, D);
}
}
else if (const auto *CD = dyn_cast<ObjCCategoryDecl>(D)) {
if (const ObjCInterfaceDecl *IC = CD->getClassInterface())
if (comments::FullComment *FC = getCommentForDecl(IC, PP))
return cloneFullComment(FC, D);
}
else if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
if (!(RD = RD->getDefinition()))
return nullptr;
// Check non-virtual bases.
for (const auto &I : RD->bases()) {
if (I.isVirtual() || (I.getAccessSpecifier() != AS_public))
continue;
QualType Ty = I.getType();
if (Ty.isNull())
continue;
if (const CXXRecordDecl *NonVirtualBase = Ty->getAsCXXRecordDecl()) {
if (!(NonVirtualBase= NonVirtualBase->getDefinition()))
continue;
if (comments::FullComment *FC = getCommentForDecl((NonVirtualBase), PP))
return cloneFullComment(FC, D);
}
}
// Check virtual bases.
for (const auto &I : RD->vbases()) {
if (I.getAccessSpecifier() != AS_public)
continue;
QualType Ty = I.getType();
if (Ty.isNull())
continue;
if (const CXXRecordDecl *VirtualBase = Ty->getAsCXXRecordDecl()) {
if (!(VirtualBase= VirtualBase->getDefinition()))
continue;
if (comments::FullComment *FC = getCommentForDecl((VirtualBase), PP))
return cloneFullComment(FC, D);
}
}
}
return nullptr;
}
// If the RawComment was attached to other redeclaration of this Decl, we
// should parse the comment in context of that other Decl. This is important
// because comments can contain references to parameter names which can be
// different across redeclarations.
if (D != OriginalDecl && OriginalDecl)
return getCommentForDecl(OriginalDecl, PP);
comments::FullComment *FC = RC->parse(*this, PP, D);
ParsedComments[Canonical] = FC;
return FC;
}
void
ASTContext::CanonicalTemplateTemplateParm::Profile(llvm::FoldingSetNodeID &ID,
const ASTContext &C,
TemplateTemplateParmDecl *Parm) {
ID.AddInteger(Parm->getDepth());
ID.AddInteger(Parm->getPosition());
ID.AddBoolean(Parm->isParameterPack());
TemplateParameterList *Params = Parm->getTemplateParameters();
ID.AddInteger(Params->size());
for (TemplateParameterList::const_iterator P = Params->begin(),
PEnd = Params->end();
P != PEnd; ++P) {
if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(*P)) {
ID.AddInteger(0);
ID.AddBoolean(TTP->isParameterPack());
const TypeConstraint *TC = TTP->getTypeConstraint();
ID.AddBoolean(TC != nullptr);
if (TC)
TC->getImmediatelyDeclaredConstraint()->Profile(ID, C,
/*Canonical=*/true);
if (TTP->isExpandedParameterPack()) {
ID.AddBoolean(true);
ID.AddInteger(TTP->getNumExpansionParameters());
} else
ID.AddBoolean(false);
continue;
}
if (const auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(*P)) {
ID.AddInteger(1);
ID.AddBoolean(NTTP->isParameterPack());
ID.AddPointer(NTTP->getType().getCanonicalType().getAsOpaquePtr());
if (NTTP->isExpandedParameterPack()) {
ID.AddBoolean(true);
ID.AddInteger(NTTP->getNumExpansionTypes());
for (unsigned I = 0, N = NTTP->getNumExpansionTypes(); I != N; ++I) {
QualType T = NTTP->getExpansionType(I);
ID.AddPointer(T.getCanonicalType().getAsOpaquePtr());
}
} else
ID.AddBoolean(false);
continue;
}
auto *TTP = cast<TemplateTemplateParmDecl>(*P);
ID.AddInteger(2);
Profile(ID, C, TTP);
}
Expr *RequiresClause = Parm->getTemplateParameters()->getRequiresClause();
ID.AddBoolean(RequiresClause != nullptr);
if (RequiresClause)
RequiresClause->Profile(ID, C, /*Canonical=*/true);
}
static Expr *
canonicalizeImmediatelyDeclaredConstraint(const ASTContext &C, Expr *IDC,
QualType ConstrainedType) {
// This is a bit ugly - we need to form a new immediately-declared
// constraint that references the new parameter; this would ideally
// require semantic analysis (e.g. template<C T> struct S {}; - the
// converted arguments of C<T> could be an argument pack if C is
// declared as template<typename... T> concept C = ...).
// We don't have semantic analysis here so we dig deep into the
// ready-made constraint expr and change the thing manually.
ConceptSpecializationExpr *CSE;
if (const auto *Fold = dyn_cast<CXXFoldExpr>(IDC))
CSE = cast<ConceptSpecializationExpr>(Fold->getLHS());
else
CSE = cast<ConceptSpecializationExpr>(IDC);
ArrayRef<TemplateArgument> OldConverted = CSE->getTemplateArguments();
SmallVector<TemplateArgument, 3> NewConverted;
NewConverted.reserve(OldConverted.size());
if (OldConverted.front().getKind() == TemplateArgument::Pack) {
// The case:
// template<typename... T> concept C = true;
// template<C<int> T> struct S; -> constraint is C<{T, int}>
NewConverted.push_back(ConstrainedType);
for (auto &Arg : OldConverted.front().pack_elements().drop_front(1))
NewConverted.push_back(Arg);
TemplateArgument NewPack(NewConverted);
NewConverted.clear();
NewConverted.push_back(NewPack);
assert(OldConverted.size() == 1 &&
"Template parameter pack should be the last parameter");
} else {
assert(OldConverted.front().getKind() == TemplateArgument::Type &&
"Unexpected first argument kind for immediately-declared "
"constraint");
NewConverted.push_back(ConstrainedType);
for (auto &Arg : OldConverted.drop_front(1))
NewConverted.push_back(Arg);
}
Expr *NewIDC = ConceptSpecializationExpr::Create(
C, CSE->getNamedConcept(), NewConverted, nullptr,
CSE->isInstantiationDependent(), CSE->containsUnexpandedParameterPack());
if (auto *OrigFold = dyn_cast<CXXFoldExpr>(IDC))
NewIDC = new (C) CXXFoldExpr(OrigFold->getType(), SourceLocation(), NewIDC,
BinaryOperatorKind::BO_LAnd,
SourceLocation(), /*RHS=*/nullptr,
SourceLocation(), /*NumExpansions=*/None);
return NewIDC;
}
TemplateTemplateParmDecl *
ASTContext::getCanonicalTemplateTemplateParmDecl(
TemplateTemplateParmDecl *TTP) const {
// Check if we already have a canonical template template parameter.
llvm::FoldingSetNodeID ID;
CanonicalTemplateTemplateParm::Profile(ID, *this, TTP);
void *InsertPos = nullptr;
CanonicalTemplateTemplateParm *Canonical
= CanonTemplateTemplateParms.FindNodeOrInsertPos(ID, InsertPos);
if (Canonical)
return Canonical->getParam();
// Build a canonical template parameter list.
TemplateParameterList *Params = TTP->getTemplateParameters();
SmallVector<NamedDecl *, 4> CanonParams;
CanonParams.reserve(Params->size());
for (TemplateParameterList::const_iterator P = Params->begin(),
PEnd = Params->end();
P != PEnd; ++P) {
if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(*P)) {
TemplateTypeParmDecl *NewTTP = TemplateTypeParmDecl::Create(*this,
getTranslationUnitDecl(), SourceLocation(), SourceLocation(),
TTP->getDepth(), TTP->getIndex(), nullptr, false,
TTP->isParameterPack(), TTP->hasTypeConstraint(),
TTP->isExpandedParameterPack() ?
llvm::Optional<unsigned>(TTP->getNumExpansionParameters()) : None);
if (const auto *TC = TTP->getTypeConstraint()) {
QualType ParamAsArgument(NewTTP->getTypeForDecl(), 0);
Expr *NewIDC = canonicalizeImmediatelyDeclaredConstraint(
*this, TC->getImmediatelyDeclaredConstraint(),
ParamAsArgument);
TemplateArgumentListInfo CanonArgsAsWritten;
if (auto *Args = TC->getTemplateArgsAsWritten())
for (const auto &ArgLoc : Args->arguments())
CanonArgsAsWritten.addArgument(
TemplateArgumentLoc(ArgLoc.getArgument(),
TemplateArgumentLocInfo()));
NewTTP->setTypeConstraint(
NestedNameSpecifierLoc(),
DeclarationNameInfo(TC->getNamedConcept()->getDeclName(),
SourceLocation()), /*FoundDecl=*/nullptr,
// Actually canonicalizing a TemplateArgumentLoc is difficult so we
// simply omit the ArgsAsWritten
TC->getNamedConcept(), /*ArgsAsWritten=*/nullptr, NewIDC);
}
CanonParams.push_back(NewTTP);
} else if (const auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(*P)) {
QualType T = getCanonicalType(NTTP->getType());
TypeSourceInfo *TInfo = getTrivialTypeSourceInfo(T);
NonTypeTemplateParmDecl *Param;
if (NTTP->isExpandedParameterPack()) {
SmallVector<QualType, 2> ExpandedTypes;
SmallVector<TypeSourceInfo *, 2> ExpandedTInfos;
for (unsigned I = 0, N = NTTP->getNumExpansionTypes(); I != N; ++I) {
ExpandedTypes.push_back(getCanonicalType(NTTP->getExpansionType(I)));
ExpandedTInfos.push_back(
getTrivialTypeSourceInfo(ExpandedTypes.back()));
}
Param = NonTypeTemplateParmDecl::Create(*this, getTranslationUnitDecl(),
SourceLocation(),
SourceLocation(),
NTTP->getDepth(),
NTTP->getPosition(), nullptr,
T,
TInfo,
ExpandedTypes,
ExpandedTInfos);
} else {
Param = NonTypeTemplateParmDecl::Create(*this, getTranslationUnitDecl(),
SourceLocation(),
SourceLocation(),
NTTP->getDepth(),
NTTP->getPosition(), nullptr,
T,
NTTP->isParameterPack(),
TInfo);
}
if (AutoType *AT = T->getContainedAutoType()) {
if (AT->isConstrained()) {
Param->setPlaceholderTypeConstraint(
canonicalizeImmediatelyDeclaredConstraint(
*this, NTTP->getPlaceholderTypeConstraint(), T));
}
}
CanonParams.push_back(Param);
} else
CanonParams.push_back(getCanonicalTemplateTemplateParmDecl(
cast<TemplateTemplateParmDecl>(*P)));
}
Expr *CanonRequiresClause = nullptr;
if (Expr *RequiresClause = TTP->getTemplateParameters()->getRequiresClause())
CanonRequiresClause = RequiresClause;
TemplateTemplateParmDecl *CanonTTP
= TemplateTemplateParmDecl::Create(*this, getTranslationUnitDecl(),
SourceLocation(), TTP->getDepth(),
TTP->getPosition(),
TTP->isParameterPack(),
nullptr,
TemplateParameterList::Create(*this, SourceLocation(),
SourceLocation(),
CanonParams,
SourceLocation(),
CanonRequiresClause));
// Get the new insert position for the node we care about.
Canonical = CanonTemplateTemplateParms.FindNodeOrInsertPos(ID, InsertPos);
assert(!Canonical && "Shouldn't be in the map!");
(void)Canonical;
// Create the canonical template template parameter entry.
Canonical = new (*this) CanonicalTemplateTemplateParm(CanonTTP);
CanonTemplateTemplateParms.InsertNode(Canonical, InsertPos);
return CanonTTP;
}
CXXABI *ASTContext::createCXXABI(const TargetInfo &T) {
if (!LangOpts.CPlusPlus) return nullptr;
switch (T.getCXXABI().getKind()) {
case TargetCXXABI::Fuchsia:
case TargetCXXABI::GenericARM: // Same as Itanium at this level
case TargetCXXABI::iOS:
case TargetCXXABI::iOS64:
case TargetCXXABI::WatchOS:
case TargetCXXABI::GenericAArch64:
case TargetCXXABI::GenericMIPS:
case TargetCXXABI::GenericItanium:
case TargetCXXABI::WebAssembly:
return CreateItaniumCXXABI(*this);
case TargetCXXABI::Microsoft:
return CreateMicrosoftCXXABI(*this);
}
llvm_unreachable("Invalid CXXABI type!");
}
interp::Context &ASTContext::getInterpContext() {
if (!InterpContext) {
InterpContext.reset(new interp::Context(*this));
}
return *InterpContext.get();
}
static const LangASMap *getAddressSpaceMap(const TargetInfo &T,
const LangOptions &LOpts) {
if (LOpts.FakeAddressSpaceMap) {
// The fake address space map must have a distinct entry for each
// language-specific address space.
static const unsigned FakeAddrSpaceMap[] = {
0, // Default
1, // opencl_global
3, // opencl_local
2, // opencl_constant
0, // opencl_private
4, // opencl_generic
5, // cuda_device
6, // cuda_constant
7, // cuda_shared
8, // ptr32_sptr
9, // ptr32_uptr
10 // ptr64
};
return &FakeAddrSpaceMap;
} else {
return &T.getAddressSpaceMap();
}
}
static bool isAddrSpaceMapManglingEnabled(const TargetInfo &TI,
const LangOptions &LangOpts) {
switch (LangOpts.getAddressSpaceMapMangling()) {
case LangOptions::ASMM_Target:
return TI.useAddressSpaceMapMangling();
case LangOptions::ASMM_On:
return true;
case LangOptions::ASMM_Off:
return false;
}
llvm_unreachable("getAddressSpaceMapMangling() doesn't cover anything.");
}
ASTContext::ASTContext(LangOptions &LOpts, SourceManager &SM,
IdentifierTable &idents, SelectorTable &sels,
Builtin::Context &builtins)
: ConstantArrayTypes(this_()), FunctionProtoTypes(this_()),
TemplateSpecializationTypes(this_()),
DependentTemplateSpecializationTypes(this_()), AutoTypes(this_()),
SubstTemplateTemplateParmPacks(this_()),
CanonTemplateTemplateParms(this_()), SourceMgr(SM), LangOpts(LOpts),
SanitizerBL(new SanitizerBlacklist(LangOpts.SanitizerBlacklistFiles, SM)),
XRayFilter(new XRayFunctionFilter(LangOpts.XRayAlwaysInstrumentFiles,
LangOpts.XRayNeverInstrumentFiles,
LangOpts.XRayAttrListFiles, SM)),
PrintingPolicy(LOpts), Idents(idents), Selectors(sels),
BuiltinInfo(builtins), DeclarationNames(*this), Comments(SM),
CommentCommandTraits(BumpAlloc, LOpts.CommentOpts),
CompCategories(this_()), LastSDM(nullptr, 0) {
TUDecl = TranslationUnitDecl::Create(*this);
TraversalScope = {TUDecl};
}
ASTContext::~ASTContext() {
// Release the DenseMaps associated with DeclContext objects.
// FIXME: Is this the ideal solution?
ReleaseDeclContextMaps();
// Call all of the deallocation functions on all of their targets.
for (auto &Pair : Deallocations)
(Pair.first)(Pair.second);
// ASTRecordLayout objects in ASTRecordLayouts must always be destroyed
// because they can contain DenseMaps.
for (llvm::DenseMap<const ObjCContainerDecl*,
const ASTRecordLayout*>::iterator
I = ObjCLayouts.begin(), E = ObjCLayouts.end(); I != E; )
// Increment in loop to prevent using deallocated memory.
if (auto *R = const_cast<ASTRecordLayout *>((I++)->second))
R->Destroy(*this);
for (llvm::DenseMap<const RecordDecl*, const ASTRecordLayout*>::iterator
I = ASTRecordLayouts.begin(), E = ASTRecordLayouts.end(); I != E; ) {
// Increment in loop to prevent using deallocated memory.
if (auto *R = const_cast<ASTRecordLayout *>((I++)->second))
R->Destroy(*this);
}
for (llvm::DenseMap<const Decl*, AttrVec*>::iterator A = DeclAttrs.begin(),
AEnd = DeclAttrs.end();
A != AEnd; ++A)
A->second->~AttrVec();
for (const auto &Value : ModuleInitializers)
Value.second->~PerModuleInitializers();
for (APValue *Value : APValueCleanups)
Value->~APValue();
}
class ASTContext::ParentMap {
/// Contains parents of a node.
using ParentVector = llvm::SmallVector<ast_type_traits::DynTypedNode, 2>;
/// Maps from a node to its parents. This is used for nodes that have
/// pointer identity only, which are more common and we can save space by
/// only storing a unique pointer to them.
using ParentMapPointers = llvm::DenseMap<
const void *,
llvm::PointerUnion<const Decl *, const Stmt *,
ast_type_traits::DynTypedNode *, ParentVector *>>;
/// Parent map for nodes without pointer identity. We store a full
/// DynTypedNode for all keys.
using ParentMapOtherNodes = llvm::DenseMap<
ast_type_traits::DynTypedNode,
llvm::PointerUnion<const Decl *, const Stmt *,
ast_type_traits::DynTypedNode *, ParentVector *>>;
ParentMapPointers PointerParents;
ParentMapOtherNodes OtherParents;
class ASTVisitor;
static ast_type_traits::DynTypedNode
getSingleDynTypedNodeFromParentMap(ParentMapPointers::mapped_type U) {
if (const auto *D = U.dyn_cast<const Decl *>())
return ast_type_traits::DynTypedNode::create(*D);
if (const auto *S = U.dyn_cast<const Stmt *>())
return ast_type_traits::DynTypedNode::create(*S);
return *U.get<ast_type_traits::DynTypedNode *>();
}
template <typename NodeTy, typename MapTy>
static ASTContext::DynTypedNodeList getDynNodeFromMap(const NodeTy &Node,
const MapTy &Map) {
auto I = Map.find(Node);
if (I == Map.end()) {
return llvm::ArrayRef<ast_type_traits::DynTypedNode>();
}
if (const auto *V = I->second.template dyn_cast<ParentVector *>()) {
return llvm::makeArrayRef(*V);
}
return getSingleDynTypedNodeFromParentMap(I->second);
}
public:
ParentMap(ASTContext &Ctx);
~ParentMap() {
for (const auto &Entry : PointerParents) {
if (Entry.second.is<ast_type_traits::DynTypedNode *>()) {
delete Entry.second.get<ast_type_traits::DynTypedNode *>();
} else if (Entry.second.is<ParentVector *>()) {
delete Entry.second.get<ParentVector *>();
}
}
for (const auto &Entry : OtherParents) {
if (Entry.second.is<ast_type_traits::DynTypedNode *>()) {
delete Entry.second.get<ast_type_traits::DynTypedNode *>();
} else if (Entry.second.is<ParentVector *>()) {
delete Entry.second.get<ParentVector *>();
}
}
}
DynTypedNodeList getParents(const ast_type_traits::DynTypedNode &Node) {
if (Node.getNodeKind().hasPointerIdentity())
return getDynNodeFromMap(Node.getMemoizationData(), PointerParents);
return getDynNodeFromMap(Node, OtherParents);
}
};
void ASTContext::setTraversalScope(const std::vector<Decl *> &TopLevelDecls) {
TraversalScope = TopLevelDecls;
Parents.clear();
}
void ASTContext::AddDeallocation(void (*Callback)(void *), void *Data) const {
Deallocations.push_back({Callback, Data});
}
void
ASTContext::setExternalSource(IntrusiveRefCntPtr<ExternalASTSource> Source) {
ExternalSource = std::move(Source);
}
void ASTContext::PrintStats() const {
llvm::errs() << "\n*** AST Context Stats:\n";
llvm::errs() << " " << Types.size() << " types total.\n";
unsigned counts[] = {
#define TYPE(Name, Parent) 0,
#define ABSTRACT_TYPE(Name, Parent)
#include "clang/AST/TypeNodes.inc"
0 // Extra
};
for (unsigned i = 0, e = Types.size(); i != e; ++i) {
Type *T = Types[i];
counts[(unsigned)T->getTypeClass()]++;
}
unsigned Idx = 0;
unsigned TotalBytes = 0;
#define TYPE(Name, Parent) \
if (counts[Idx]) \
llvm::errs() << " " << counts[Idx] << " " << #Name \
<< " types, " << sizeof(Name##Type) << " each " \
<< "(" << counts[Idx] * sizeof(Name##Type) \
<< " bytes)\n"; \
TotalBytes += counts[Idx] * sizeof(Name##Type); \
++Idx;
#define ABSTRACT_TYPE(Name, Parent)
#include "clang/AST/TypeNodes.inc"
llvm::errs() << "Total bytes = " << TotalBytes << "\n";
// Implicit special member functions.
llvm::errs() << NumImplicitDefaultConstructorsDeclared << "/"
<< NumImplicitDefaultConstructors
<< " implicit default constructors created\n";
llvm::errs() << NumImplicitCopyConstructorsDeclared << "/"
<< NumImplicitCopyConstructors
<< " implicit copy constructors created\n";
if (getLangOpts().CPlusPlus)
llvm::errs() << NumImplicitMoveConstructorsDeclared << "/"
<< NumImplicitMoveConstructors
<< " implicit move constructors created\n";
llvm::errs() << NumImplicitCopyAssignmentOperatorsDeclared << "/"
<< NumImplicitCopyAssignmentOperators
<< " implicit copy assignment operators created\n";
if (getLangOpts().CPlusPlus)
llvm::errs() << NumImplicitMoveAssignmentOperatorsDeclared << "/"
<< NumImplicitMoveAssignmentOperators
<< " implicit move assignment operators created\n";
llvm::errs() << NumImplicitDestructorsDeclared << "/"
<< NumImplicitDestructors
<< " implicit destructors created\n";
if (ExternalSource) {
llvm::errs() << "\n";
ExternalSource->PrintStats();
}
BumpAlloc.PrintStats();
}
void ASTContext::mergeDefinitionIntoModule(NamedDecl *ND, Module *M,
bool NotifyListeners) {
if (NotifyListeners)
if (auto *Listener = getASTMutationListener())
Listener->RedefinedHiddenDefinition(ND, M);
MergedDefModules[cast<NamedDecl>(ND->getCanonicalDecl())].push_back(M);
}
void ASTContext::deduplicateMergedDefinitonsFor(NamedDecl *ND) {
auto It = MergedDefModules.find(cast<NamedDecl>(ND->getCanonicalDecl()));
if (It == MergedDefModules.end())
return;
auto &Merged = It->second;
llvm::DenseSet<Module*> Found;
for (Module *&M : Merged)
if (!Found.insert(M).second)
M = nullptr;
Merged.erase(std::remove(Merged.begin(), Merged.end(), nullptr), Merged.end());
}
void ASTContext::PerModuleInitializers::resolve(ASTContext &Ctx) {
if (LazyInitializers.empty())
return;
auto *Source = Ctx.getExternalSource();
assert(Source && "lazy initializers but no external source");
auto LazyInits = std::move(LazyInitializers);
LazyInitializers.clear();
for (auto ID : LazyInits)
Initializers.push_back(Source->GetExternalDecl(ID));
assert(LazyInitializers.empty() &&
"GetExternalDecl for lazy module initializer added more inits");
}
void ASTContext::addModuleInitializer(Module *M, Decl *D) {
// One special case: if we add a module initializer that imports another
// module, and that module's only initializer is an ImportDecl, simplify.
if (const auto *ID = dyn_cast<ImportDecl>(D)) {
auto It = ModuleInitializers.find(ID->getImportedModule());
// Maybe the ImportDecl does nothing at all. (Common case.)
if (It == ModuleInitializers.end())
return;
// Maybe the ImportDecl only imports another ImportDecl.
auto &Imported = *It->second;
if (Imported.Initializers.size() + Imported.LazyInitializers.size() == 1) {
Imported.resolve(*this);
auto *OnlyDecl = Imported.Initializers.front();
if (isa<ImportDecl>(OnlyDecl))
D = OnlyDecl;
}
}
auto *&Inits = ModuleInitializers[M];
if (!Inits)
Inits = new (*this) PerModuleInitializers;
Inits->Initializers.push_back(D);
}
void ASTContext::addLazyModuleInitializers(Module *M, ArrayRef<uint32_t> IDs) {
auto *&Inits = ModuleInitializers[M];
if (!Inits)
Inits = new (*this) PerModuleInitializers;
Inits->LazyInitializers.insert(Inits->LazyInitializers.end(),
IDs.begin(), IDs.end());
}
ArrayRef<Decl *> ASTContext::getModuleInitializers(Module *M) {
auto It = ModuleInitializers.find(M);
if (It == ModuleInitializers.end())
return None;
auto *Inits = It->second;
Inits->resolve(*this);
return Inits->Initializers;
}
ExternCContextDecl *ASTContext::getExternCContextDecl() const {
if (!ExternCContext)
ExternCContext = ExternCContextDecl::Create(*this, getTranslationUnitDecl());
return ExternCContext;
}
BuiltinTemplateDecl *
ASTContext::buildBuiltinTemplateDecl(BuiltinTemplateKind BTK,
const IdentifierInfo *II) const {
auto *BuiltinTemplate = BuiltinTemplateDecl::Create(*this, TUDecl, II, BTK);
BuiltinTemplate->setImplicit();
TUDecl->addDecl(BuiltinTemplate);
return BuiltinTemplate;
}
BuiltinTemplateDecl *
ASTContext::getMakeIntegerSeqDecl() const {
if (!MakeIntegerSeqDecl)
MakeIntegerSeqDecl = buildBuiltinTemplateDecl(BTK__make_integer_seq,
getMakeIntegerSeqName());
return MakeIntegerSeqDecl;
}
BuiltinTemplateDecl *
ASTContext::getTypePackElementDecl() const {
if (!TypePackElementDecl)
TypePackElementDecl = buildBuiltinTemplateDecl(BTK__type_pack_element,
getTypePackElementName());
return TypePackElementDecl;
}
RecordDecl *ASTContext::buildImplicitRecord(StringRef Name,
RecordDecl::TagKind TK) const {
SourceLocation Loc;
RecordDecl *NewDecl;
if (getLangOpts().CPlusPlus)
NewDecl = CXXRecordDecl::Create(*this, TK, getTranslationUnitDecl(), Loc,
Loc, &Idents.get(Name));
else
NewDecl = RecordDecl::Create(*this, TK, getTranslationUnitDecl(), Loc, Loc,
&Idents.get(Name));
NewDecl->setImplicit();
NewDecl->addAttr(TypeVisibilityAttr::CreateImplicit(
const_cast<ASTContext &>(*this), TypeVisibilityAttr::Default));
return NewDecl;
}
TypedefDecl *ASTContext::buildImplicitTypedef(QualType T,
StringRef Name) const {
TypeSourceInfo *TInfo = getTrivialTypeSourceInfo(T);
TypedefDecl *NewDecl = TypedefDecl::Create(
const_cast<ASTContext &>(*this), getTranslationUnitDecl(),
SourceLocation(), SourceLocation(), &Idents.get(Name), TInfo);
NewDecl->setImplicit();
return NewDecl;
}
TypedefDecl *ASTContext::getInt128Decl() const {
if (!Int128Decl)
Int128Decl = buildImplicitTypedef(Int128Ty, "__int128_t");
return Int128Decl;
}
TypedefDecl *ASTContext::getUInt128Decl() const {
if (!UInt128Decl)
UInt128Decl = buildImplicitTypedef(UnsignedInt128Ty, "__uint128_t");
return UInt128Decl;
}
void ASTContext::InitBuiltinType(CanQualType &R, BuiltinType::Kind K) {
auto *Ty = new (*this, TypeAlignment) BuiltinType(K);
R = CanQualType::CreateUnsafe(QualType(Ty, 0));
Types.push_back(Ty);
}
void ASTContext::InitBuiltinTypes(const TargetInfo &Target,
const TargetInfo *AuxTarget) {
assert((!this->Target || this->Target == &Target) &&
"Incorrect target reinitialization");
assert(VoidTy.isNull() && "Context reinitialized?");
this->Target = &Target;
this->AuxTarget = AuxTarget;
ABI.reset(createCXXABI(Target));
AddrSpaceMap = getAddressSpaceMap(Target, LangOpts);
AddrSpaceMapMangling = isAddrSpaceMapManglingEnabled(Target, LangOpts);
// C99 6.2.5p19.
InitBuiltinType(VoidTy, BuiltinType::Void);
// C99 6.2.5p2.
InitBuiltinType(BoolTy, BuiltinType::Bool);
// C99 6.2.5p3.
if (LangOpts.CharIsSigned)
InitBuiltinType(CharTy, BuiltinType::Char_S);
else
InitBuiltinType(CharTy, BuiltinType::Char_U);
// C99 6.2.5p4.
InitBuiltinType(SignedCharTy, BuiltinType::SChar);
InitBuiltinType(ShortTy, BuiltinType::Short);
InitBuiltinType(IntTy, BuiltinType::Int);
InitBuiltinType(LongTy, BuiltinType::Long);
InitBuiltinType(LongLongTy, BuiltinType::LongLong);
// C99 6.2.5p6.
InitBuiltinType(UnsignedCharTy, BuiltinType::UChar);
InitBuiltinType(UnsignedShortTy, BuiltinType::UShort);
InitBuiltinType(UnsignedIntTy, BuiltinType::UInt);
InitBuiltinType(UnsignedLongTy, BuiltinType::ULong);
InitBuiltinType(UnsignedLongLongTy, BuiltinType::ULongLong);
// C99 6.2.5p10.
InitBuiltinType(FloatTy, BuiltinType::Float);
InitBuiltinType(DoubleTy, BuiltinType::Double);
InitBuiltinType(LongDoubleTy, BuiltinType::LongDouble);
// GNU extension, __float128 for IEEE quadruple precision
InitBuiltinType(Float128Ty, BuiltinType::Float128);
// C11 extension ISO/IEC TS 18661-3
InitBuiltinType(Float16Ty, BuiltinType::Float16);
// ISO/IEC JTC1 SC22 WG14 N1169 Extension
InitBuiltinType(ShortAccumTy, BuiltinType::ShortAccum);
InitBuiltinType(AccumTy, BuiltinType::Accum);
InitBuiltinType(LongAccumTy, BuiltinType::LongAccum);
InitBuiltinType(UnsignedShortAccumTy, BuiltinType::UShortAccum);
InitBuiltinType(UnsignedAccumTy, BuiltinType::UAccum);
InitBuiltinType(UnsignedLongAccumTy, BuiltinType::ULongAccum);
InitBuiltinType(ShortFractTy, BuiltinType::ShortFract);
InitBuiltinType(FractTy, BuiltinType::Fract);
InitBuiltinType(LongFractTy, BuiltinType::LongFract);
InitBuiltinType(UnsignedShortFractTy, BuiltinType::UShortFract);
InitBuiltinType(UnsignedFractTy, BuiltinType::UFract);
InitBuiltinType(UnsignedLongFractTy, BuiltinType::ULongFract);
InitBuiltinType(SatShortAccumTy, BuiltinType::SatShortAccum);
InitBuiltinType(SatAccumTy, BuiltinType::SatAccum);
InitBuiltinType(SatLongAccumTy, BuiltinType::SatLongAccum);
InitBuiltinType(SatUnsignedShortAccumTy, BuiltinType::SatUShortAccum);
InitBuiltinType(SatUnsignedAccumTy, BuiltinType::SatUAccum);
InitBuiltinType(SatUnsignedLongAccumTy, BuiltinType::SatULongAccum);
InitBuiltinType(SatShortFractTy, BuiltinType::SatShortFract);
InitBuiltinType(SatFractTy, BuiltinType::SatFract);
InitBuiltinType(SatLongFractTy, BuiltinType::SatLongFract);
InitBuiltinType(SatUnsignedShortFractTy, BuiltinType::SatUShortFract);
InitBuiltinType(SatUnsignedFractTy, BuiltinType::SatUFract);
InitBuiltinType(SatUnsignedLongFractTy, BuiltinType::SatULongFract);
// GNU extension, 128-bit integers.
InitBuiltinType(Int128Ty, BuiltinType::Int128);
InitBuiltinType(UnsignedInt128Ty, BuiltinType::UInt128);
// C++ 3.9.1p5
if (TargetInfo::isTypeSigned(Target.getWCharType()))
InitBuiltinType(WCharTy, BuiltinType::WChar_S);
else // -fshort-wchar makes wchar_t be unsigned.
InitBuiltinType(WCharTy, BuiltinType::WChar_U);
if (LangOpts.CPlusPlus && LangOpts.WChar)
WideCharTy = WCharTy;
else {
// C99 (or C++ using -fno-wchar).
WideCharTy = getFromTargetType(Target.getWCharType());
}
WIntTy = getFromTargetType(Target.getWIntType());
// C++20 (proposed)
InitBuiltinType(Char8Ty, BuiltinType::Char8);
if (LangOpts.CPlusPlus) // C++0x 3.9.1p5, extension for C++
InitBuiltinType(Char16Ty, BuiltinType::Char16);
else // C99
Char16Ty = getFromTargetType(Target.getChar16Type());
if (LangOpts.CPlusPlus) // C++0x 3.9.1p5, extension for C++
InitBuiltinType(Char32Ty, BuiltinType::Char32);
else // C99
Char32Ty = getFromTargetType(Target.getChar32Type());
// Placeholder type for type-dependent expressions whose type is
// completely unknown. No code should ever check a type against
// DependentTy and users should never see it; however, it is here to
// help diagnose failures to properly check for type-dependent
// expressions.
InitBuiltinType(DependentTy, BuiltinType::Dependent);
// Placeholder type for functions.
InitBuiltinType(OverloadTy, BuiltinType::Overload);
// Placeholder type for bound members.
InitBuiltinType(BoundMemberTy, BuiltinType::BoundMember);
// Placeholder type for pseudo-objects.
InitBuiltinType(PseudoObjectTy, BuiltinType::PseudoObject);
// "any" type; useful for debugger-like clients.
InitBuiltinType(UnknownAnyTy, BuiltinType::UnknownAny);
// Placeholder type for unbridged ARC casts.
InitBuiltinType(ARCUnbridgedCastTy, BuiltinType::ARCUnbridgedCast);
// Placeholder type for builtin functions.
InitBuiltinType(BuiltinFnTy, BuiltinType::BuiltinFn);
// Placeholder type for OMP array sections.
if (LangOpts.OpenMP)
InitBuiltinType(OMPArraySectionTy, BuiltinType::OMPArraySection);
// C99 6.2.5p11.
FloatComplexTy = getComplexType(FloatTy);
DoubleComplexTy = getComplexType(DoubleTy);
LongDoubleComplexTy = getComplexType(LongDoubleTy);
Float128ComplexTy = getComplexType(Float128Ty);
// Builtin types for 'id', 'Class', and 'SEL'.
InitBuiltinType(ObjCBuiltinIdTy, BuiltinType::ObjCId);
InitBuiltinType(ObjCBuiltinClassTy, BuiltinType::ObjCClass);
InitBuiltinType(ObjCBuiltinSelTy, BuiltinType::ObjCSel);
if (LangOpts.OpenCL) {
#define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
InitBuiltinType(SingletonId, BuiltinType::Id);
#include "clang/Basic/OpenCLImageTypes.def"
InitBuiltinType(OCLSamplerTy, BuiltinType::OCLSampler);
InitBuiltinType(OCLEventTy, BuiltinType::OCLEvent);
InitBuiltinType(OCLClkEventTy, BuiltinType::OCLClkEvent);
InitBuiltinType(OCLQueueTy, BuiltinType::OCLQueue);
InitBuiltinType(OCLReserveIDTy, BuiltinType::OCLReserveID);
#define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
InitBuiltinType(Id##Ty, BuiltinType::Id);
#include "clang/Basic/OpenCLExtensionTypes.def"
}
if (Target.hasAArch64SVETypes()) {
#define SVE_TYPE(Name, Id, SingletonId) \
InitBuiltinType(SingletonId, BuiltinType::Id);
#include "clang/Basic/AArch64SVEACLETypes.def"
}
// Builtin type for __objc_yes and __objc_no
ObjCBuiltinBoolTy = (Target.useSignedCharForObjCBool() ?
SignedCharTy : BoolTy);
ObjCConstantStringType = QualType();
ObjCSuperType = QualType();
// void * type
if (LangOpts.OpenCLVersion >= 200) {
auto Q = VoidTy.getQualifiers();
Q.setAddressSpace(LangAS::opencl_generic);
VoidPtrTy = getPointerType(getCanonicalType(
getQualifiedType(VoidTy.getUnqualifiedType(), Q)));
} else {
VoidPtrTy = getPointerType(VoidTy);
}
// nullptr type (C++0x 2.14.7)
InitBuiltinType(NullPtrTy, BuiltinType::NullPtr);
// half type (OpenCL 6.1.1.1) / ARM NEON __fp16
InitBuiltinType(HalfTy, BuiltinType::Half);
// Scaffold: cbit and qbit types added here for initialization
InitBuiltinType(AbitTy, BuiltinType::Abit);
InitBuiltinType(CbitTy, BuiltinType::Cbit);
InitBuiltinType(QbitTy, BuiltinType::Qbit);
// RKQC: qint added for initialization
InitBuiltinType(QintTy, BuiltinType::Qint);
InitBuiltinType(zzBitTy, BuiltinType::zzBit);
InitBuiltinType(zgBitTy, BuiltinType::zgBit);
InitBuiltinType(ooBitTy, BuiltinType::ooBit);
InitBuiltinType(ogBitTy, BuiltinType::ogBit);
// Builtin type used to help define __builtin_va_list.
VaListTagDecl = nullptr;
}
DiagnosticsEngine &ASTContext::getDiagnostics() const {
return SourceMgr.getDiagnostics();
}
AttrVec& ASTContext::getDeclAttrs(const Decl *D) {
AttrVec *&Result = DeclAttrs[D];
if (!Result) {
void *Mem = Allocate(sizeof(AttrVec));
Result = new (Mem) AttrVec;
}
return *Result;
}
/// Erase the attributes corresponding to the given declaration.
void ASTContext::eraseDeclAttrs(const Decl *D) {
llvm::DenseMap<const Decl*, AttrVec*>::iterator Pos = DeclAttrs.find(D);
if (Pos != DeclAttrs.end()) {
Pos->second->~AttrVec();
DeclAttrs.erase(Pos);
}
}
// FIXME: Remove ?
MemberSpecializationInfo *
ASTContext::getInstantiatedFromStaticDataMember(const VarDecl *Var) {
assert(Var->isStaticDataMember() && "Not a static data member");
return getTemplateOrSpecializationInfo(Var)
.dyn_cast<MemberSpecializationInfo *>();
}
ASTContext::TemplateOrSpecializationInfo
ASTContext::getTemplateOrSpecializationInfo(const VarDecl *Var) {
llvm::DenseMap<const VarDecl *, TemplateOrSpecializationInfo>::iterator Pos =
TemplateOrInstantiation.find(Var);
if (Pos == TemplateOrInstantiation.end())
return {};
return Pos->second;
}
void
ASTContext::setInstantiatedFromStaticDataMember(VarDecl *Inst, VarDecl *Tmpl,
TemplateSpecializationKind TSK,
SourceLocation PointOfInstantiation) {
assert(Inst->isStaticDataMember() && "Not a static data member");
assert(Tmpl->isStaticDataMember() && "Not a static data member");
setTemplateOrSpecializationInfo(Inst, new (*this) MemberSpecializationInfo(
Tmpl, TSK, PointOfInstantiation));
}
void
ASTContext::setTemplateOrSpecializationInfo(VarDecl *Inst,
TemplateOrSpecializationInfo TSI) {
assert(!TemplateOrInstantiation[Inst] &&
"Already noted what the variable was instantiated from");
TemplateOrInstantiation[Inst] = TSI;
}
NamedDecl *
ASTContext::getInstantiatedFromUsingDecl(NamedDecl *UUD) {
auto Pos = InstantiatedFromUsingDecl.find(UUD);
if (Pos == InstantiatedFromUsingDecl.end())
return nullptr;
return Pos->second;
}
void
ASTContext::setInstantiatedFromUsingDecl(NamedDecl *Inst, NamedDecl *Pattern) {
assert((isa<UsingDecl>(Pattern) ||
isa<UnresolvedUsingValueDecl>(Pattern) ||
isa<UnresolvedUsingTypenameDecl>(Pattern)) &&
"pattern decl is not a using decl");
assert((isa<UsingDecl>(Inst) ||
isa<UnresolvedUsingValueDecl>(Inst) ||
isa<UnresolvedUsingTypenameDecl>(Inst)) &&
"instantiation did not produce a using decl");
assert(!InstantiatedFromUsingDecl[Inst] && "pattern already exists");
InstantiatedFromUsingDecl[Inst] = Pattern;
}
UsingShadowDecl *
ASTContext::getInstantiatedFromUsingShadowDecl(UsingShadowDecl *Inst) {
llvm::DenseMap<UsingShadowDecl*, UsingShadowDecl*>::const_iterator Pos
= InstantiatedFromUsingShadowDecl.find(Inst);
if (Pos == InstantiatedFromUsingShadowDecl.end())
return nullptr;
return Pos->second;
}
void
ASTContext::setInstantiatedFromUsingShadowDecl(UsingShadowDecl *Inst,
UsingShadowDecl *Pattern) {
assert(!InstantiatedFromUsingShadowDecl[Inst] && "pattern already exists");
InstantiatedFromUsingShadowDecl[Inst] = Pattern;
}
FieldDecl *ASTContext::getInstantiatedFromUnnamedFieldDecl(FieldDecl *Field) {
llvm::DenseMap<FieldDecl *, FieldDecl *>::iterator Pos
= InstantiatedFromUnnamedFieldDecl.find(Field);
if (Pos == InstantiatedFromUnnamedFieldDecl.end())
return nullptr;
return Pos->second;
}
void ASTContext::setInstantiatedFromUnnamedFieldDecl(FieldDecl *Inst,
FieldDecl *Tmpl) {
assert(!Inst->getDeclName() && "Instantiated field decl is not unnamed");
assert(!Tmpl->getDeclName() && "Template field decl is not unnamed");
assert(!InstantiatedFromUnnamedFieldDecl[Inst] &&
"Already noted what unnamed field was instantiated from");
InstantiatedFromUnnamedFieldDecl[Inst] = Tmpl;
}
ASTContext::overridden_cxx_method_iterator
ASTContext::overridden_methods_begin(const CXXMethodDecl *Method) const {
return overridden_methods(Method).begin();
}
ASTContext::overridden_cxx_method_iterator
ASTContext::overridden_methods_end(const CXXMethodDecl *Method) const {
return overridden_methods(Method).end();
}
unsigned
ASTContext::overridden_methods_size(const CXXMethodDecl *Method) const {
auto Range = overridden_methods(Method);
return Range.end() - Range.begin();
}
ASTContext::overridden_method_range
ASTContext::overridden_methods(const CXXMethodDecl *Method) const {
llvm::DenseMap<const CXXMethodDecl *, CXXMethodVector>::const_iterator Pos =
OverriddenMethods.find(Method->getCanonicalDecl());
if (Pos == OverriddenMethods.end())
return overridden_method_range(nullptr, nullptr);
return overridden_method_range(Pos->second.begin(), Pos->second.end());
}
void ASTContext::addOverriddenMethod(const CXXMethodDecl *Method,
const CXXMethodDecl *Overridden) {
assert(Method->isCanonicalDecl() && Overridden->isCanonicalDecl());
OverriddenMethods[Method].push_back(Overridden);
}
void ASTContext::getOverriddenMethods(
const NamedDecl *D,
SmallVectorImpl<const NamedDecl *> &Overridden) const {
assert(D);
if (const auto *CXXMethod = dyn_cast<CXXMethodDecl>(D)) {
Overridden.append(overridden_methods_begin(CXXMethod),
overridden_methods_end(CXXMethod));
return;
}
const auto *Method = dyn_cast<ObjCMethodDecl>(D);
if (!Method)
return;
SmallVector<const ObjCMethodDecl *, 8> OverDecls;
Method->getOverriddenMethods(OverDecls);
Overridden.append(OverDecls.begin(), OverDecls.end());
}
void ASTContext::addedLocalImportDecl(ImportDecl *Import) {
assert(!Import->NextLocalImport && "Import declaration already in the chain");
assert(!Import->isFromASTFile() && "Non-local import declaration");
if (!FirstLocalImport) {
FirstLocalImport = Import;
LastLocalImport = Import;
return;
}
LastLocalImport->NextLocalImport = Import;
LastLocalImport = Import;
}
//===----------------------------------------------------------------------===//
// Type Sizing and Analysis
//===----------------------------------------------------------------------===//
/// getFloatTypeSemantics - Return the APFloat 'semantics' for the specified
/// scalar floating point type.
const llvm::fltSemantics &ASTContext::getFloatTypeSemantics(QualType T) const {
switch (T->castAs<BuiltinType>()->getKind()) {
default:
llvm_unreachable("Not a floating point type!");
case BuiltinType::Float16:
case BuiltinType::Half:
return Target->getHalfFormat();
case BuiltinType::Float: return Target->getFloatFormat();
case BuiltinType::Double: return Target->getDoubleFormat();
case BuiltinType::LongDouble:
if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice)
return AuxTarget->getLongDoubleFormat();
return Target->getLongDoubleFormat();
case BuiltinType::Float128:
if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice)
return AuxTarget->getFloat128Format();
return Target->getFloat128Format();
}
}
CharUnits ASTContext::getDeclAlign(const Decl *D, bool ForAlignof) const {
unsigned Align = Target->getCharWidth();
bool UseAlignAttrOnly = false;
if (unsigned AlignFromAttr = D->getMaxAlignment()) {
Align = AlignFromAttr;
// __attribute__((aligned)) can increase or decrease alignment
// *except* on a struct or struct member, where it only increases
// alignment unless 'packed' is also specified.
//
// It is an error for alignas to decrease alignment, so we can
// ignore that possibility; Sema should diagnose it.
if (isa<FieldDecl>(D)) {
UseAlignAttrOnly = D->hasAttr<PackedAttr>() ||
cast<FieldDecl>(D)->getParent()->hasAttr<PackedAttr>();
} else {
UseAlignAttrOnly = true;
}
}
else if (isa<FieldDecl>(D))
UseAlignAttrOnly =
D->hasAttr<PackedAttr>() ||
cast<FieldDecl>(D)->getParent()->hasAttr<PackedAttr>();
// If we're using the align attribute only, just ignore everything
// else about the declaration and its type.
if (UseAlignAttrOnly) {
// do nothing
} else if (const auto *VD = dyn_cast<ValueDecl>(D)) {
QualType T = VD->getType();
if (const auto *RT = T->getAs<ReferenceType>()) {
if (ForAlignof)
T = RT->getPointeeType();
else
T = getPointerType(RT->getPointeeType());
}
QualType BaseT = getBaseElementType(T);
if (T->isFunctionType())
Align = getTypeInfoImpl(T.getTypePtr()).Align;
else if (!BaseT->isIncompleteType()) {
// Adjust alignments of declarations with array type by the
// large-array alignment on the target.
if (const ArrayType *arrayType = getAsArrayType(T)) {
unsigned MinWidth = Target->getLargeArrayMinWidth();
if (!ForAlignof && MinWidth) {
if (isa<VariableArrayType>(arrayType))
Align = std::max(Align, Target->getLargeArrayAlign());
else if (isa<ConstantArrayType>(arrayType) &&
MinWidth <= getTypeSize(cast<ConstantArrayType>(arrayType)))
Align = std::max(Align, Target->getLargeArrayAlign());
}
}
Align = std::max(Align, getPreferredTypeAlign(T.getTypePtr()));
if (BaseT.getQualifiers().hasUnaligned())
Align = Target->getCharWidth();
if (const auto *VD = dyn_cast<VarDecl>(D)) {
if (VD->hasGlobalStorage() && !ForAlignof) {
uint64_t TypeSize = getTypeSize(T.getTypePtr());
Align = std::max(Align, getTargetInfo().getMinGlobalAlign(TypeSize));
}
}
}
// Fields can be subject to extra alignment constraints, like if
// the field is packed, the struct is packed, or the struct has a
// a max-field-alignment constraint (#pragma pack). So calculate
// the actual alignment of the field within the struct, and then
// (as we're expected to) constrain that by the alignment of the type.
if (const auto *Field = dyn_cast<FieldDecl>(VD)) {
const RecordDecl *Parent = Field->getParent();
// We can only produce a sensible answer if the record is valid.
if (!Parent->isInvalidDecl()) {
const ASTRecordLayout &Layout = getASTRecordLayout(Parent);
// Start with the record's overall alignment.
unsigned FieldAlign = toBits(Layout.getAlignment());
// Use the GCD of that and the offset within the record.
uint64_t Offset = Layout.getFieldOffset(Field->getFieldIndex());
if (Offset > 0) {
// Alignment is always a power of 2, so the GCD will be a power of 2,
// which means we get to do this crazy thing instead of Euclid's.
uint64_t LowBitOfOffset = Offset & (~Offset + 1);
if (LowBitOfOffset < FieldAlign)
FieldAlign = static_cast<unsigned>(LowBitOfOffset);
}
Align = std::min(Align, FieldAlign);
}
}
}
return toCharUnitsFromBits(Align);
}
// getTypeInfoDataSizeInChars - Return the size of a type, in
// chars. If the type is a record, its data size is returned. This is
// the size of the memcpy that's performed when assigning this type
// using a trivial copy/move assignment operator.
std::pair<CharUnits, CharUnits>
ASTContext::getTypeInfoDataSizeInChars(QualType T) const {
std::pair<CharUnits, CharUnits> sizeAndAlign = getTypeInfoInChars(T);
// In C++, objects can sometimes be allocated into the tail padding
// of a base-class subobject. We decide whether that's possible
// during class layout, so here we can just trust the layout results.
if (getLangOpts().CPlusPlus) {
if (const auto *RT = T->getAs<RecordType>()) {
const ASTRecordLayout &layout = getASTRecordLayout(RT->getDecl());
sizeAndAlign.first = layout.getDataSize();
}
}
return sizeAndAlign;
}
/// getConstantArrayInfoInChars - Performing the computation in CharUnits
/// instead of in bits prevents overflowing the uint64_t for some large arrays.
std::pair<CharUnits, CharUnits>
static getConstantArrayInfoInChars(const ASTContext &Context,
const ConstantArrayType *CAT) {
std::pair<CharUnits, CharUnits> EltInfo =
Context.getTypeInfoInChars(CAT->getElementType());
uint64_t Size = CAT->getSize().getZExtValue();
assert((Size == 0 || static_cast<uint64_t>(EltInfo.first.getQuantity()) <=
(uint64_t)(-1)/Size) &&
"Overflow in array type char size evaluation");
uint64_t Width = EltInfo.first.getQuantity() * Size;
unsigned Align = EltInfo.second.getQuantity();
if (!Context.getTargetInfo().getCXXABI().isMicrosoft() ||
Context.getTargetInfo().getPointerWidth(0) == 64)
Width = llvm::alignTo(Width, Align);
return std::make_pair(CharUnits::fromQuantity(Width),
CharUnits::fromQuantity(Align));
}
std::pair<CharUnits, CharUnits>
ASTContext::getTypeInfoInChars(const Type *T) const {
if (const auto *CAT = dyn_cast<ConstantArrayType>(T))
return getConstantArrayInfoInChars(*this, CAT);
TypeInfo Info = getTypeInfo(T);
return std::make_pair(toCharUnitsFromBits(Info.Width),
toCharUnitsFromBits(Info.Align));
}
std::pair<CharUnits, CharUnits>
ASTContext::getTypeInfoInChars(QualType T) const {
return getTypeInfoInChars(T.getTypePtr());
}
bool ASTContext::isAlignmentRequired(const Type *T) const {
return getTypeInfo(T).AlignIsRequired;
}
bool ASTContext::isAlignmentRequired(QualType T) const {
return isAlignmentRequired(T.getTypePtr());
}
unsigned ASTContext::getTypeAlignIfKnown(QualType T) const {
// An alignment on a typedef overrides anything else.
if (const auto *TT = T->getAs<TypedefType>())
if (unsigned Align = TT->getDecl()->getMaxAlignment())
return Align;
// If we have an (array of) complete type, we're done.
T = getBaseElementType(T);
if (!T->isIncompleteType())
return getTypeAlign(T);
// If we had an array type, its element type might be a typedef
// type with an alignment attribute.
if (const auto *TT = T->getAs<TypedefType>())
if (unsigned Align = TT->getDecl()->getMaxAlignment())
return Align;
// Otherwise, see if the declaration of the type had an attribute.
if (const auto *TT = T->getAs<TagType>())
return TT->getDecl()->getMaxAlignment();
return 0;
}
TypeInfo ASTContext::getTypeInfo(const Type *T) const {
TypeInfoMap::iterator I = MemoizedTypeInfo.find(T);
if (I != MemoizedTypeInfo.end())
return I->second;
// This call can invalidate MemoizedTypeInfo[T], so we need a second lookup.
TypeInfo TI = getTypeInfoImpl(T);
MemoizedTypeInfo[T] = TI;
return TI;
}
/// getTypeInfoImpl - Return the size of the specified type, in bits. This
/// method does not work on incomplete types.
///
/// FIXME: Pointers into different addr spaces could have different sizes and
/// alignment requirements: getPointerInfo should take an AddrSpace, this
/// should take a QualType, &c.
TypeInfo ASTContext::getTypeInfoImpl(const Type *T) const {
uint64_t Width = 0;
unsigned Align = 8;
bool AlignIsRequired = false;
unsigned AS = 0;
switch (T->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_TYPE(Class, Base)
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) \
case Type::Class: \
assert(!T->isDependentType() && "should not see dependent types here"); \
return getTypeInfo(cast<Class##Type>(T)->desugar().getTypePtr());
#include "clang/AST/TypeNodes.inc"
llvm_unreachable("Should not see dependent types");
case Type::FunctionNoProto:
case Type::FunctionProto:
// GCC extension: alignof(function) = 32 bits
Width = 0;
Align = 32;
break;
case Type::IncompleteArray:
case Type::VariableArray:
Width = 0;
Align = getTypeAlign(cast<ArrayType>(T)->getElementType());
break;
case Type::ConstantArray: {
const auto *CAT = cast<ConstantArrayType>(T);
TypeInfo EltInfo = getTypeInfo(CAT->getElementType());
uint64_t Size = CAT->getSize().getZExtValue();
assert((Size == 0 || EltInfo.Width <= (uint64_t)(-1) / Size) &&
"Overflow in array type bit size evaluation");
Width = EltInfo.Width * Size;
Align = EltInfo.Align;
if (!getTargetInfo().getCXXABI().isMicrosoft() ||
getTargetInfo().getPointerWidth(0) == 64)
Width = llvm::alignTo(Width, Align);
break;
}
case Type::ExtVector:
case Type::Vector: {
const auto *VT = cast<VectorType>(T);
TypeInfo EltInfo = getTypeInfo(VT->getElementType());
Width = EltInfo.Width * VT->getNumElements();
Align = Width;
// If the alignment is not a power of 2, round up to the next power of 2.
// This happens for non-power-of-2 length vectors.
if (Align & (Align-1)) {
Align = llvm::NextPowerOf2(Align);
Width = llvm::alignTo(Width, Align);
}
// Adjust the alignment based on the target max.
uint64_t TargetVectorAlign = Target->getMaxVectorAlign();
if (TargetVectorAlign && TargetVectorAlign < Align)
Align = TargetVectorAlign;
break;
}
case Type::Builtin:
switch (cast<BuiltinType>(T)->getKind()) {
default: llvm_unreachable("Unknown builtin type!");
case BuiltinType::Void:
// GCC extension: alignof(void) = 8 bits.
Width = 0;
Align = 8;
break;
case BuiltinType::Bool:
Width = Target->getBoolWidth();
Align = Target->getBoolAlign();
break;
case BuiltinType::Char_S:
case BuiltinType::Char_U:
case BuiltinType::UChar:
case BuiltinType::SChar:
case BuiltinType::Char8:
Width = Target->getCharWidth();
Align = Target->getCharAlign();
break;
case BuiltinType::WChar_S:
case BuiltinType::WChar_U:
Width = Target->getWCharWidth();
Align = Target->getWCharAlign();
break;
case BuiltinType::Char16:
Width = Target->getChar16Width();
Align = Target->getChar16Align();
break;
case BuiltinType::Char32:
Width = Target->getChar32Width();
Align = Target->getChar32Align();
break;
case BuiltinType::UShort:
case BuiltinType::Short:
Width = Target->getShortWidth();
Align = Target->getShortAlign();
break;
case BuiltinType::UInt:
case BuiltinType::Int:
Width = Target->getIntWidth();
Align = Target->getIntAlign();
break;
case BuiltinType::ULong:
case BuiltinType::Long:
Width = Target->getLongWidth();
Align = Target->getLongAlign();
break;
case BuiltinType::ULongLong:
case BuiltinType::LongLong:
Width = Target->getLongLongWidth();
Align = Target->getLongLongAlign();
break;
case BuiltinType::Int128:
case BuiltinType::UInt128:
Width = 128;
Align = 128; // int128_t is 128-bit aligned on all targets.
break;
case BuiltinType::ShortAccum:
case BuiltinType::UShortAccum:
case BuiltinType::SatShortAccum:
case BuiltinType::SatUShortAccum:
Width = Target->getShortAccumWidth();
Align = Target->getShortAccumAlign();
break;
case BuiltinType::Accum:
case BuiltinType::UAccum:
case BuiltinType::SatAccum:
case BuiltinType::SatUAccum:
Width = Target->getAccumWidth();
Align = Target->getAccumAlign();
break;
case BuiltinType::LongAccum:
case BuiltinType::ULongAccum:
case BuiltinType::SatLongAccum:
case BuiltinType::SatULongAccum:
Width = Target->getLongAccumWidth();
Align = Target->getLongAccumAlign();
break;
case BuiltinType::ShortFract:
case BuiltinType::UShortFract:
case BuiltinType::SatShortFract:
case BuiltinType::SatUShortFract:
Width = Target->getShortFractWidth();
Align = Target->getShortFractAlign();
break;
case BuiltinType::Fract:
case BuiltinType::UFract:
case BuiltinType::SatFract:
case BuiltinType::SatUFract:
Width = Target->getFractWidth();
Align = Target->getFractAlign();
break;
case BuiltinType::LongFract:
case BuiltinType::ULongFract:
case BuiltinType::SatLongFract:
case BuiltinType::SatULongFract:
Width = Target->getLongFractWidth();
Align = Target->getLongFractAlign();
break;
case BuiltinType::Float16:
case BuiltinType::Half:
if (Target->hasFloat16Type() || !getLangOpts().OpenMP ||
!getLangOpts().OpenMPIsDevice) {
Width = Target->getHalfWidth();
Align = Target->getHalfAlign();
} else {
assert(getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice &&
"Expected OpenMP device compilation.");
Width = AuxTarget->getHalfWidth();
Align = AuxTarget->getHalfAlign();
}
break;
case BuiltinType::Float:
Width = Target->getFloatWidth();
Align = Target->getFloatAlign();
break;
case BuiltinType::Double:
Width = Target->getDoubleWidth();
Align = Target->getDoubleAlign();
break;
case BuiltinType::LongDouble:
if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice &&
(Target->getLongDoubleWidth() != AuxTarget->getLongDoubleWidth() ||
Target->getLongDoubleAlign() != AuxTarget->getLongDoubleAlign())) {
Width = AuxTarget->getLongDoubleWidth();
Align = AuxTarget->getLongDoubleAlign();
} else {
Width = Target->getLongDoubleWidth();
Align = Target->getLongDoubleAlign();
}
break;
case BuiltinType::Float128:
if (Target->hasFloat128Type() || !getLangOpts().OpenMP ||
!getLangOpts().OpenMPIsDevice) {
Width = Target->getFloat128Width();
Align = Target->getFloat128Align();
} else {
assert(getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice &&
"Expected OpenMP device compilation.");
Width = AuxTarget->getFloat128Width();
Align = AuxTarget->getFloat128Align();
}
break;
case BuiltinType::NullPtr:
Width = Target->getPointerWidth(0); // C++ 3.9.1p11: sizeof(nullptr_t)
Align = Target->getPointerAlign(0); // == sizeof(void*)
break;
case BuiltinType::ObjCId:
case BuiltinType::ObjCClass:
case BuiltinType::ObjCSel:
Width = Target->getPointerWidth(0);
Align = Target->getPointerAlign(0);
break;
// Scaffold types
case BuiltinType::Abit:
Width = Target->getAbitWidth();
Align = Target->getAbitAlign();
break;
case BuiltinType::Cbit:
Width = Target->getCbitWidth();
Align = Target->getCbitAlign();
break;
case BuiltinType::Qbit:
Width = Target->getQbitWidth();
Align = Target->getQbitAlign();
break;
case BuiltinType::Qint:
Width = Target->getQintWidth();
Align = Target->getQintAlign();
break;
case BuiltinType::OCLSampler:
case BuiltinType::OCLEvent:
case BuiltinType::OCLClkEvent:
case BuiltinType::OCLQueue:
case BuiltinType::OCLReserveID:
#define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
case BuiltinType::Id:
#include "clang/Basic/OpenCLImageTypes.def"
#define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
case BuiltinType::Id:
#include "clang/Basic/OpenCLExtensionTypes.def"
AS = getTargetAddressSpace(
Target->getOpenCLTypeAddrSpace(getOpenCLTypeKind(T)));
Width = Target->getPointerWidth(AS);
Align = Target->getPointerAlign(AS);
break;
// The SVE types are effectively target-specific. The length of an
// SVE_VECTOR_TYPE is only known at runtime, but it is always a multiple
// of 128 bits. There is one predicate bit for each vector byte, so the
// length of an SVE_PREDICATE_TYPE is always a multiple of 16 bits.
//
// Because the length is only known at runtime, we use a dummy value
// of 0 for the static length. The alignment values are those defined
// by the Procedure Call Standard for the Arm Architecture.
#define SVE_VECTOR_TYPE(Name, Id, SingletonId, ElKind, ElBits, IsSigned, IsFP)\
case BuiltinType::Id: \
Width = 0; \
Align = 128; \
break;
#define SVE_PREDICATE_TYPE(Name, Id, SingletonId, ElKind) \
case BuiltinType::Id: \
Width = 0; \
Align = 16; \
break;
#include "clang/Basic/AArch64SVEACLETypes.def"
}
break;
case Type::ObjCObjectPointer:
Width = Target->getPointerWidth(0);
Align = Target->getPointerAlign(0);
break;
case Type::BlockPointer:
AS = getTargetAddressSpace(cast<BlockPointerType>(T)->getPointeeType());
Width = Target->getPointerWidth(AS);
Align = Target->getPointerAlign(AS);
break;
case Type::LValueReference:
case Type::RValueReference:
// alignof and sizeof should never enter this code path here, so we go
// the pointer route.
AS = getTargetAddressSpace(cast<ReferenceType>(T)->getPointeeType());
Width = Target->getPointerWidth(AS);
Align = Target->getPointerAlign(AS);
break;
case Type::Pointer:
AS = getTargetAddressSpace(cast<PointerType>(T)->getPointeeType());
Width = Target->getPointerWidth(AS);
Align = Target->getPointerAlign(AS);
break;
case Type::MemberPointer: {
const auto *MPT = cast<MemberPointerType>(T);
CXXABI::MemberPointerInfo MPI = ABI->getMemberPointerInfo(MPT);
Width = MPI.Width;
Align = MPI.Align;
break;
}
case Type::Complex: {
// Complex types have the same alignment as their elements, but twice the
// size.
TypeInfo EltInfo = getTypeInfo(cast<ComplexType>(T)->getElementType());
Width = EltInfo.Width * 2;
Align = EltInfo.Align;
break;
}
case Type::ObjCObject:
return getTypeInfo(cast<ObjCObjectType>(T)->getBaseType().getTypePtr());
case Type::Adjusted:
case Type::Decayed:
return getTypeInfo(cast<AdjustedType>(T)->getAdjustedType().getTypePtr());
case Type::ObjCInterface: {
const auto *ObjCI = cast<ObjCInterfaceType>(T);
const ASTRecordLayout &Layout = getASTObjCInterfaceLayout(ObjCI->getDecl());
Width = toBits(Layout.getSize());
Align = toBits(Layout.getAlignment());
break;
}
case Type::Record:
case Type::Enum: {
const auto *TT = cast<TagType>(T);
if (TT->getDecl()->isInvalidDecl()) {
Width = 8;
Align = 8;
break;
}
if (const auto *ET = dyn_cast<EnumType>(TT)) {
const EnumDecl *ED = ET->getDecl();
TypeInfo Info =
getTypeInfo(ED->getIntegerType()->getUnqualifiedDesugaredType());
if (unsigned AttrAlign = ED->getMaxAlignment()) {
Info.Align = AttrAlign;
Info.AlignIsRequired = true;
}
return Info;
}
const auto *RT = cast<RecordType>(TT);
const RecordDecl *RD = RT->getDecl();
const ASTRecordLayout &Layout = getASTRecordLayout(RD);
Width = toBits(Layout.getSize());
Align = toBits(Layout.getAlignment());
AlignIsRequired = RD->hasAttr<AlignedAttr>();
break;
}
case Type::SubstTemplateTypeParm:
return getTypeInfo(cast<SubstTemplateTypeParmType>(T)->
getReplacementType().getTypePtr());
case Type::Auto:
case Type::DeducedTemplateSpecialization: {
const auto *A = cast<DeducedType>(T);
assert(!A->getDeducedType().isNull() &&
"cannot request the size of an undeduced or dependent auto type");
return getTypeInfo(A->getDeducedType().getTypePtr());
}
case Type::Paren:
return getTypeInfo(cast<ParenType>(T)->getInnerType().getTypePtr());
case Type::MacroQualified:
return getTypeInfo(
cast<MacroQualifiedType>(T)->getUnderlyingType().getTypePtr());
case Type::ObjCTypeParam:
return getTypeInfo(cast<ObjCTypeParamType>(T)->desugar().getTypePtr());
case Type::Typedef: {
const TypedefNameDecl *Typedef = cast<TypedefType>(T)->getDecl();
TypeInfo Info = getTypeInfo(Typedef->getUnderlyingType().getTypePtr());
// If the typedef has an aligned attribute on it, it overrides any computed
// alignment we have. This violates the GCC documentation (which says that
// attribute(aligned) can only round up) but matches its implementation.
if (unsigned AttrAlign = Typedef->getMaxAlignment()) {
Align = AttrAlign;
AlignIsRequired = true;
} else {
Align = Info.Align;
AlignIsRequired = Info.AlignIsRequired;
}
Width = Info.Width;
break;
}
case Type::Elaborated:
return getTypeInfo(cast<ElaboratedType>(T)->getNamedType().getTypePtr());
case Type::Attributed:
return getTypeInfo(
cast<AttributedType>(T)->getEquivalentType().getTypePtr());
case Type::Atomic: {
// Start with the base type information.
TypeInfo Info = getTypeInfo(cast<AtomicType>(T)->getValueType());
Width = Info.Width;
Align = Info.Align;
if (!Width) {
// An otherwise zero-sized type should still generate an
// atomic operation.
Width = Target->getCharWidth();
assert(Align);
} else if (Width <= Target->getMaxAtomicPromoteWidth()) {
// If the size of the type doesn't exceed the platform's max
// atomic promotion width, make the size and alignment more
// favorable to atomic operations:
// Round the size up to a power of 2.
if (!llvm::isPowerOf2_64(Width))
Width = llvm::NextPowerOf2(Width);
// Set the alignment equal to the size.
Align = static_cast<unsigned>(Width);
}
}
break;
case Type::Pipe:
Width = Target->getPointerWidth(getTargetAddressSpace(LangAS::opencl_global));
Align = Target->getPointerAlign(getTargetAddressSpace(LangAS::opencl_global));
break;
}
assert(llvm::isPowerOf2_32(Align) && "Alignment must be power of 2");
return TypeInfo(Width, Align, AlignIsRequired);
}
unsigned ASTContext::getTypeUnadjustedAlign(const Type *T) const {
UnadjustedAlignMap::iterator I = MemoizedUnadjustedAlign.find(T);
if (I != MemoizedUnadjustedAlign.end())
return I->second;
unsigned UnadjustedAlign;
if (const auto *RT = T->getAs<RecordType>()) {
const RecordDecl *RD = RT->getDecl();
const ASTRecordLayout &Layout = getASTRecordLayout(RD);
UnadjustedAlign = toBits(Layout.getUnadjustedAlignment());
} else if (const auto *ObjCI = T->getAs<ObjCInterfaceType>()) {
const ASTRecordLayout &Layout = getASTObjCInterfaceLayout(ObjCI->getDecl());
UnadjustedAlign = toBits(Layout.getUnadjustedAlignment());
} else {
UnadjustedAlign = getTypeAlign(T->getUnqualifiedDesugaredType());
}
MemoizedUnadjustedAlign[T] = UnadjustedAlign;
return UnadjustedAlign;
}
unsigned ASTContext::getOpenMPDefaultSimdAlign(QualType T) const {
unsigned SimdAlign = getTargetInfo().getSimdDefaultAlign();
// Target ppc64 with QPX: simd default alignment for pointer to double is 32.
if ((getTargetInfo().getTriple().getArch() == llvm::Triple::ppc64 ||
getTargetInfo().getTriple().getArch() == llvm::Triple::ppc64le) &&
getTargetInfo().getABI() == "elfv1-qpx" &&
T->isSpecificBuiltinType(BuiltinType::Double))
SimdAlign = 256;
return SimdAlign;
}
/// toCharUnitsFromBits - Convert a size in bits to a size in characters.
CharUnits ASTContext::toCharUnitsFromBits(int64_t BitSize) const {
return CharUnits::fromQuantity(BitSize / getCharWidth());
}
/// toBits - Convert a size in characters to a size in characters.
int64_t ASTContext::toBits(CharUnits CharSize) const {
return CharSize.getQuantity() * getCharWidth();
}
/// getTypeSizeInChars - Return the size of the specified type, in characters.
/// This method does not work on incomplete types.
CharUnits ASTContext::getTypeSizeInChars(QualType T) const {
return getTypeInfoInChars(T).first;
}
CharUnits ASTContext::getTypeSizeInChars(const Type *T) const {
return getTypeInfoInChars(T).first;
}
/// getTypeAlignInChars - Return the ABI-specified alignment of a type, in
/// characters. This method does not work on incomplete types.
CharUnits ASTContext::getTypeAlignInChars(QualType T) const {
return toCharUnitsFromBits(getTypeAlign(T));
}
CharUnits ASTContext::getTypeAlignInChars(const Type *T) const {
return toCharUnitsFromBits(getTypeAlign(T));
}
/// getTypeUnadjustedAlignInChars - Return the ABI-specified alignment of a
/// type, in characters, before alignment adustments. This method does
/// not work on incomplete types.
CharUnits ASTContext::getTypeUnadjustedAlignInChars(QualType T) const {
return toCharUnitsFromBits(getTypeUnadjustedAlign(T));
}
CharUnits ASTContext::getTypeUnadjustedAlignInChars(const Type *T) const {
return toCharUnitsFromBits(getTypeUnadjustedAlign(T));
}
/// getPreferredTypeAlign - Return the "preferred" alignment of the specified
/// type for the current target in bits. This can be different than the ABI
/// alignment in cases where it is beneficial for performance to overalign
/// a data type.
unsigned ASTContext::getPreferredTypeAlign(const Type *T) const {
TypeInfo TI = getTypeInfo(T);
unsigned ABIAlign = TI.Align;
T = T->getBaseElementTypeUnsafe();
// The preferred alignment of member pointers is that of a pointer.
if (T->isMemberPointerType())
return getPreferredTypeAlign(getPointerDiffType().getTypePtr());
if (!Target->allowsLargerPreferedTypeAlignment())
return ABIAlign;
// Double and long long should be naturally aligned if possible.
if (const auto *CT = T->getAs<ComplexType>())
T = CT->getElementType().getTypePtr();
if (const auto *ET = T->getAs<EnumType>())
T = ET->getDecl()->getIntegerType().getTypePtr();
if (T->isSpecificBuiltinType(BuiltinType::Double) ||
T->isSpecificBuiltinType(BuiltinType::LongLong) ||
T->isSpecificBuiltinType(BuiltinType::ULongLong))
// Don't increase the alignment if an alignment attribute was specified on a
// typedef declaration.
if (!TI.AlignIsRequired)
return std::max(ABIAlign, (unsigned)getTypeSize(T));
return ABIAlign;
}
/// getTargetDefaultAlignForAttributeAligned - Return the default alignment
/// for __attribute__((aligned)) on this target, to be used if no alignment
/// value is specified.
unsigned ASTContext::getTargetDefaultAlignForAttributeAligned() const {
return getTargetInfo().getDefaultAlignForAttributeAligned();
}
/// getAlignOfGlobalVar - Return the alignment in bits that should be given
/// to a global variable of the specified type.
unsigned ASTContext::getAlignOfGlobalVar(QualType T) const {
uint64_t TypeSize = getTypeSize(T.getTypePtr());
return std::max(getTypeAlign(T), getTargetInfo().getMinGlobalAlign(TypeSize));
}
/// getAlignOfGlobalVarInChars - Return the alignment in characters that
/// should be given to a global variable of the specified type.
CharUnits ASTContext::getAlignOfGlobalVarInChars(QualType T) const {
return toCharUnitsFromBits(getAlignOfGlobalVar(T));
}
CharUnits ASTContext::getOffsetOfBaseWithVBPtr(const CXXRecordDecl *RD) const {
CharUnits Offset = CharUnits::Zero();
const ASTRecordLayout *Layout = &getASTRecordLayout(RD);
while (const CXXRecordDecl *Base = Layout->getBaseSharingVBPtr()) {
Offset += Layout->getBaseClassOffset(Base);
Layout = &getASTRecordLayout(Base);
}
return Offset;
}
/// DeepCollectObjCIvars -
/// This routine first collects all declared, but not synthesized, ivars in
/// super class and then collects all ivars, including those synthesized for
/// current class. This routine is used for implementation of current class
/// when all ivars, declared and synthesized are known.
void ASTContext::DeepCollectObjCIvars(const ObjCInterfaceDecl *OI,
bool leafClass,
SmallVectorImpl<const ObjCIvarDecl*> &Ivars) const {
if (const ObjCInterfaceDecl *SuperClass = OI->getSuperClass())
DeepCollectObjCIvars(SuperClass, false, Ivars);
if (!leafClass) {
for (const auto *I : OI->ivars())
Ivars.push_back(I);
} else {
auto *IDecl = const_cast<ObjCInterfaceDecl *>(OI);
for (const ObjCIvarDecl *Iv = IDecl->all_declared_ivar_begin(); Iv;
Iv= Iv->getNextIvar())
Ivars.push_back(Iv);
}
}
/// CollectInheritedProtocols - Collect all protocols in current class and
/// those inherited by it.
void ASTContext::CollectInheritedProtocols(const Decl *CDecl,
llvm::SmallPtrSet<ObjCProtocolDecl*, 8> &Protocols) {
if (const auto *OI = dyn_cast<ObjCInterfaceDecl>(CDecl)) {
// We can use protocol_iterator here instead of
// all_referenced_protocol_iterator since we are walking all categories.
for (auto *Proto : OI->all_referenced_protocols()) {
CollectInheritedProtocols(Proto, Protocols);
}
// Categories of this Interface.
for (const auto *Cat : OI->visible_categories())
CollectInheritedProtocols(Cat, Protocols);
if (ObjCInterfaceDecl *SD = OI->getSuperClass())
while (SD) {
CollectInheritedProtocols(SD, Protocols);
SD = SD->getSuperClass();
}
} else if (const auto *OC = dyn_cast<ObjCCategoryDecl>(CDecl)) {
for (auto *Proto : OC->protocols()) {
CollectInheritedProtocols(Proto, Protocols);
}
} else if (const auto *OP = dyn_cast<ObjCProtocolDecl>(CDecl)) {
// Insert the protocol.
if (!Protocols.insert(
const_cast<ObjCProtocolDecl *>(OP->getCanonicalDecl())).second)
return;
for (auto *Proto : OP->protocols())
CollectInheritedProtocols(Proto, Protocols);
}
}
static bool unionHasUniqueObjectRepresentations(const ASTContext &Context,
const RecordDecl *RD) {
assert(RD->isUnion() && "Must be union type");
CharUnits UnionSize = Context.getTypeSizeInChars(RD->getTypeForDecl());
for (const auto *Field : RD->fields()) {
if (!Context.hasUniqueObjectRepresentations(Field->getType()))
return false;
CharUnits FieldSize = Context.getTypeSizeInChars(Field->getType());
if (FieldSize != UnionSize)
return false;
}
return !RD->field_empty();
}
static bool isStructEmpty(QualType Ty) {
const RecordDecl *RD = Ty->castAs<RecordType>()->getDecl();
if (!RD->field_empty())
return false;
if (const auto *ClassDecl = dyn_cast<CXXRecordDecl>(RD))
return ClassDecl->isEmpty();
return true;
}
static llvm::Optional<int64_t>
structHasUniqueObjectRepresentations(const ASTContext &Context,
const RecordDecl *RD) {
assert(!RD->isUnion() && "Must be struct/class type");
const auto &Layout = Context.getASTRecordLayout(RD);
int64_t CurOffsetInBits = 0;
if (const auto *ClassDecl = dyn_cast<CXXRecordDecl>(RD)) {
if (ClassDecl->isDynamicClass())
return llvm::None;
SmallVector<std::pair<QualType, int64_t>, 4> Bases;
for (const auto &Base : ClassDecl->bases()) {
// Empty types can be inherited from, and non-empty types can potentially
// have tail padding, so just make sure there isn't an error.
if (!isStructEmpty(Base.getType())) {
llvm::Optional<int64_t> Size = structHasUniqueObjectRepresentations(
Context, Base.getType()->castAs<RecordType>()->getDecl());
if (!Size)
return llvm::None;
Bases.emplace_back(Base.getType(), Size.getValue());
}
}
llvm::sort(Bases, [&](const std::pair<QualType, int64_t> &L,
const std::pair<QualType, int64_t> &R) {
return Layout.getBaseClassOffset(L.first->getAsCXXRecordDecl()) <
Layout.getBaseClassOffset(R.first->getAsCXXRecordDecl());
});
for (const auto &Base : Bases) {
int64_t BaseOffset = Context.toBits(
Layout.getBaseClassOffset(Base.first->getAsCXXRecordDecl()));
int64_t BaseSize = Base.second;
if (BaseOffset != CurOffsetInBits)
return llvm::None;
CurOffsetInBits = BaseOffset + BaseSize;
}
}
for (const auto *Field : RD->fields()) {
if (!Field->getType()->isReferenceType() &&
!Context.hasUniqueObjectRepresentations(Field->getType()))
return llvm::None;
int64_t FieldSizeInBits =
Context.toBits(Context.getTypeSizeInChars(Field->getType()));
if (Field->isBitField()) {
int64_t BitfieldSize = Field->getBitWidthValue(Context);
if (BitfieldSize > FieldSizeInBits)
return llvm::None;
FieldSizeInBits = BitfieldSize;
}
int64_t FieldOffsetInBits = Context.getFieldOffset(Field);
if (FieldOffsetInBits != CurOffsetInBits)
return llvm::None;
CurOffsetInBits = FieldSizeInBits + FieldOffsetInBits;
}
return CurOffsetInBits;
}
bool ASTContext::hasUniqueObjectRepresentations(QualType Ty) const {
// C++17 [meta.unary.prop]:
// The predicate condition for a template specialization
// has_unique_object_representations<T> shall be
// satisfied if and only if:
// (9.1) - T is trivially copyable, and
// (9.2) - any two objects of type T with the same value have the same
// object representation, where two objects
// of array or non-union class type are considered to have the same value
// if their respective sequences of
// direct subobjects have the same values, and two objects of union type
// are considered to have the same
// value if they have the same active member and the corresponding members
// have the same value.
// The set of scalar types for which this condition holds is
// implementation-defined. [ Note: If a type has padding
// bits, the condition does not hold; otherwise, the condition holds true
// for unsigned integral types. -- end note ]
assert(!Ty.isNull() && "Null QualType sent to unique object rep check");
// Arrays are unique only if their element type is unique.
if (Ty->isArrayType())
return hasUniqueObjectRepresentations(getBaseElementType(Ty));
// (9.1) - T is trivially copyable...
if (!Ty.isTriviallyCopyableType(*this))
return false;
// All integrals and enums are unique.
if (Ty->isIntegralOrEnumerationType())
return true;
// All other pointers are unique.
if (Ty->isPointerType())
return true;
if (Ty->isMemberPointerType()) {
const auto *MPT = Ty->getAs<MemberPointerType>();
return !ABI->getMemberPointerInfo(MPT).HasPadding;
}
if (Ty->isRecordType()) {
const RecordDecl *Record = Ty->castAs<RecordType>()->getDecl();
if (Record->isInvalidDecl())
return false;
if (Record->isUnion())
return unionHasUniqueObjectRepresentations(*this, Record);
Optional<int64_t> StructSize =
structHasUniqueObjectRepresentations(*this, Record);
return StructSize &&
StructSize.getValue() == static_cast<int64_t>(getTypeSize(Ty));
}
// FIXME: More cases to handle here (list by rsmith):
// vectors (careful about, eg, vector of 3 foo)
// _Complex int and friends
// _Atomic T
// Obj-C block pointers
// Obj-C object pointers
// and perhaps OpenCL's various builtin types (pipe, sampler_t, event_t,
// clk_event_t, queue_t, reserve_id_t)
// There're also Obj-C class types and the Obj-C selector type, but I think it
// makes sense for those to return false here.
return false;
}
unsigned ASTContext::CountNonClassIvars(const ObjCInterfaceDecl *OI) const {
unsigned count = 0;
// Count ivars declared in class extension.
for (const auto *Ext : OI->known_extensions())
count += Ext->ivar_size();
// Count ivar defined in this class's implementation. This
// includes synthesized ivars.
if (ObjCImplementationDecl *ImplDecl = OI->getImplementation())
count += ImplDecl->ivar_size();
return count;
}
bool ASTContext::isSentinelNullExpr(const Expr *E) {
if (!E)
return false;
// nullptr_t is always treated as null.
if (E->getType()->isNullPtrType()) return true;
if (E->getType()->isAnyPointerType() &&
E->IgnoreParenCasts()->isNullPointerConstant(*this,
Expr::NPC_ValueDependentIsNull))
return true;
// Unfortunately, __null has type 'int'.
if (isa<GNUNullExpr>(E)) return true;
return false;
}
/// Get the implementation of ObjCInterfaceDecl, or nullptr if none
/// exists.
ObjCImplementationDecl *ASTContext::getObjCImplementation(ObjCInterfaceDecl *D) {
llvm::DenseMap<ObjCContainerDecl*, ObjCImplDecl*>::iterator
I = ObjCImpls.find(D);
if (I != ObjCImpls.end())
return cast<ObjCImplementationDecl>(I->second);
return nullptr;
}
/// Get the implementation of ObjCCategoryDecl, or nullptr if none
/// exists.
ObjCCategoryImplDecl *ASTContext::getObjCImplementation(ObjCCategoryDecl *D) {
llvm::DenseMap<ObjCContainerDecl*, ObjCImplDecl*>::iterator
I = ObjCImpls.find(D);
if (I != ObjCImpls.end())
return cast<ObjCCategoryImplDecl>(I->second);
return nullptr;
}
/// Set the implementation of ObjCInterfaceDecl.
void ASTContext::setObjCImplementation(ObjCInterfaceDecl *IFaceD,
ObjCImplementationDecl *ImplD) {
assert(IFaceD && ImplD && "Passed null params");
ObjCImpls[IFaceD] = ImplD;
}
/// Set the implementation of ObjCCategoryDecl.
void ASTContext::setObjCImplementation(ObjCCategoryDecl *CatD,
ObjCCategoryImplDecl *ImplD) {
assert(CatD && ImplD && "Passed null params");
ObjCImpls[CatD] = ImplD;
}
const ObjCMethodDecl *
ASTContext::getObjCMethodRedeclaration(const ObjCMethodDecl *MD) const {
return ObjCMethodRedecls.lookup(MD);
}
void ASTContext::setObjCMethodRedeclaration(const ObjCMethodDecl *MD,
const ObjCMethodDecl *Redecl) {
assert(!getObjCMethodRedeclaration(MD) && "MD already has a redeclaration");
ObjCMethodRedecls[MD] = Redecl;
}
const ObjCInterfaceDecl *ASTContext::getObjContainingInterface(
const NamedDecl *ND) const {
if (const auto *ID = dyn_cast<ObjCInterfaceDecl>(ND->getDeclContext()))
return ID;
if (const auto *CD = dyn_cast<ObjCCategoryDecl>(ND->getDeclContext()))
return CD->getClassInterface();
if (const auto *IMD = dyn_cast<ObjCImplDecl>(ND->getDeclContext()))
return IMD->getClassInterface();
return nullptr;
}
/// Get the copy initialization expression of VarDecl, or nullptr if
/// none exists.
BlockVarCopyInit ASTContext::getBlockVarCopyInit(const VarDecl *VD) const {
assert(VD && "Passed null params");
assert(VD->hasAttr<BlocksAttr>() &&
"getBlockVarCopyInits - not __block var");
auto I = BlockVarCopyInits.find(VD);
if (I != BlockVarCopyInits.end())
return I->second;
return {nullptr, false};
}
/// Set the copy initialization expression of a block var decl.
void ASTContext::setBlockVarCopyInit(const VarDecl*VD, Expr *CopyExpr,
bool CanThrow) {
assert(VD && CopyExpr && "Passed null params");
assert(VD->hasAttr<BlocksAttr>() &&
"setBlockVarCopyInits - not __block var");
BlockVarCopyInits[VD].setExprAndFlag(CopyExpr, CanThrow);
}
TypeSourceInfo *ASTContext::CreateTypeSourceInfo(QualType T,
unsigned DataSize) const {
if (!DataSize)
DataSize = TypeLoc::getFullDataSizeForType(T);
else
assert(DataSize == TypeLoc::getFullDataSizeForType(T) &&
"incorrect data size provided to CreateTypeSourceInfo!");
auto *TInfo =
(TypeSourceInfo*)BumpAlloc.Allocate(sizeof(TypeSourceInfo) + DataSize, 8);
new (TInfo) TypeSourceInfo(T);
return TInfo;
}
TypeSourceInfo *ASTContext::getTrivialTypeSourceInfo(QualType T,
SourceLocation L) const {
TypeSourceInfo *DI = CreateTypeSourceInfo(T);
DI->getTypeLoc().initialize(const_cast<ASTContext &>(*this), L);
return DI;
}
const ASTRecordLayout &
ASTContext::getASTObjCInterfaceLayout(const ObjCInterfaceDecl *D) const {
return getObjCLayout(D, nullptr);
}
const ASTRecordLayout &
ASTContext::getASTObjCImplementationLayout(
const ObjCImplementationDecl *D) const {
return getObjCLayout(D->getClassInterface(), D);
}
//===----------------------------------------------------------------------===//
// Type creation/memoization methods
//===----------------------------------------------------------------------===//
QualType
ASTContext::getExtQualType(const Type *baseType, Qualifiers quals) const {
unsigned fastQuals = quals.getFastQualifiers();
quals.removeFastQualifiers();
// Check if we've already instantiated this type.
llvm::FoldingSetNodeID ID;
ExtQuals::Profile(ID, baseType, quals);
void *insertPos = nullptr;
if (ExtQuals *eq = ExtQualNodes.FindNodeOrInsertPos(ID, insertPos)) {
assert(eq->getQualifiers() == quals);
return QualType(eq, fastQuals);
}
// If the base type is not canonical, make the appropriate canonical type.
QualType canon;
if (!baseType->isCanonicalUnqualified()) {
SplitQualType canonSplit = baseType->getCanonicalTypeInternal().split();
canonSplit.Quals.addConsistentQualifiers(quals);
canon = getExtQualType(canonSplit.Ty, canonSplit.Quals);
// Re-find the insert position.
(void) ExtQualNodes.FindNodeOrInsertPos(ID, insertPos);
}
auto *eq = new (*this, TypeAlignment) ExtQuals(baseType, canon, quals);
ExtQualNodes.InsertNode(eq, insertPos);
return QualType(eq, fastQuals);
}
QualType ASTContext::getAddrSpaceQualType(QualType T,
LangAS AddressSpace) const {
QualType CanT = getCanonicalType(T);
if (CanT.getAddressSpace() == AddressSpace)
return T;
// If we are composing extended qualifiers together, merge together
// into one ExtQuals node.
QualifierCollector Quals;
const Type *TypeNode = Quals.strip(T);
// If this type already has an address space specified, it cannot get
// another one.
assert(!Quals.hasAddressSpace() &&
"Type cannot be in multiple addr spaces!");
Quals.addAddressSpace(AddressSpace);
return getExtQualType(TypeNode, Quals);
}
QualType ASTContext::removeAddrSpaceQualType(QualType T) const {
// If we are composing extended qualifiers together, merge together
// into one ExtQuals node.
QualifierCollector Quals;
const Type *TypeNode = Quals.strip(T);
// If the qualifier doesn't have an address space just return it.
if (!Quals.hasAddressSpace())
return T;
Quals.removeAddressSpace();
// Removal of the address space can mean there are no longer any
// non-fast qualifiers, so creating an ExtQualType isn't possible (asserts)
// or required.
if (Quals.hasNonFastQualifiers())
return getExtQualType(TypeNode, Quals);
else
return QualType(TypeNode, Quals.getFastQualifiers());
}
QualType ASTContext::getObjCGCQualType(QualType T,
Qualifiers::GC GCAttr) const {
QualType CanT = getCanonicalType(T);
if (CanT.getObjCGCAttr() == GCAttr)
return T;
if (const auto *ptr = T->getAs<PointerType>()) {
QualType Pointee = ptr->getPointeeType();
if (Pointee->isAnyPointerType()) {
QualType ResultType = getObjCGCQualType(Pointee, GCAttr);
return getPointerType(ResultType);
}
}
// If we are composing extended qualifiers together, merge together
// into one ExtQuals node.
QualifierCollector Quals;
const Type *TypeNode = Quals.strip(T);
// If this type already has an ObjCGC specified, it cannot get
// another one.
assert(!Quals.hasObjCGCAttr() &&
"Type cannot have multiple ObjCGCs!");
Quals.addObjCGCAttr(GCAttr);
return getExtQualType(TypeNode, Quals);
}
QualType ASTContext::removePtrSizeAddrSpace(QualType T) const {
if (const PointerType *Ptr = T->getAs<PointerType>()) {
QualType Pointee = Ptr->getPointeeType();
if (isPtrSizeAddressSpace(Pointee.getAddressSpace())) {
return getPointerType(removeAddrSpaceQualType(Pointee));
}
}
return T;
}
const FunctionType *ASTContext::adjustFunctionType(const FunctionType *T,
FunctionType::ExtInfo Info) {
if (T->getExtInfo() == Info)
return T;
QualType Result;
if (const auto *FNPT = dyn_cast<FunctionNoProtoType>(T)) {
Result = getFunctionNoProtoType(FNPT->getReturnType(), Info);
} else {
const auto *FPT = cast<FunctionProtoType>(T);
FunctionProtoType::ExtProtoInfo EPI = FPT->getExtProtoInfo();
EPI.ExtInfo = Info;
Result = getFunctionType(FPT->getReturnType(), FPT->getParamTypes(), EPI);
}
return cast<FunctionType>(Result.getTypePtr());
}
void ASTContext::adjustDeducedFunctionResultType(FunctionDecl *FD,
QualType ResultType) {
FD = FD->getMostRecentDecl();
while (true) {
const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
FunctionProtoType::ExtProtoInfo EPI = FPT->getExtProtoInfo();
FD->setType(getFunctionType(ResultType, FPT->getParamTypes(), EPI));
if (FunctionDecl *Next = FD->getPreviousDecl())
FD = Next;
else
break;
}
if (ASTMutationListener *L = getASTMutationListener())
L->DeducedReturnType(FD, ResultType);
}
/// Get a function type and produce the equivalent function type with the
/// specified exception specification. Type sugar that can be present on a
/// declaration of a function with an exception specification is permitted
/// and preserved. Other type sugar (for instance, typedefs) is not.
QualType ASTContext::getFunctionTypeWithExceptionSpec(
QualType Orig, const FunctionProtoType::ExceptionSpecInfo &ESI) {
// Might have some parens.
if (const auto *PT = dyn_cast<ParenType>(Orig))
return getParenType(
getFunctionTypeWithExceptionSpec(PT->getInnerType(), ESI));
// Might be wrapped in a macro qualified type.
if (const auto *MQT = dyn_cast<MacroQualifiedType>(Orig))
return getMacroQualifiedType(
getFunctionTypeWithExceptionSpec(MQT->getUnderlyingType(), ESI),
MQT->getMacroIdentifier());
// Might have a calling-convention attribute.
if (const auto *AT = dyn_cast<AttributedType>(Orig))
return getAttributedType(
AT->getAttrKind(),
getFunctionTypeWithExceptionSpec(AT->getModifiedType(), ESI),
getFunctionTypeWithExceptionSpec(AT->getEquivalentType(), ESI));
// Anything else must be a function type. Rebuild it with the new exception
// specification.
const auto *Proto = Orig->castAs<FunctionProtoType>();
return getFunctionType(
Proto->getReturnType(), Proto->getParamTypes(),
Proto->getExtProtoInfo().withExceptionSpec(ESI));
}
bool ASTContext::hasSameFunctionTypeIgnoringExceptionSpec(QualType T,
QualType U) {
return hasSameType(T, U) ||
(getLangOpts().CPlusPlus17 &&
hasSameType(getFunctionTypeWithExceptionSpec(T, EST_None),
getFunctionTypeWithExceptionSpec(U, EST_None)));
}
QualType ASTContext::getFunctionTypeWithoutPtrSizes(QualType T) {
if (const auto *Proto = T->getAs<FunctionProtoType>()) {
QualType RetTy = removePtrSizeAddrSpace(Proto->getReturnType());
SmallVector<QualType, 16> Args(Proto->param_types());
for (unsigned i = 0, n = Args.size(); i != n; ++i)
Args[i] = removePtrSizeAddrSpace(Args[i]);
return getFunctionType(RetTy, Args, Proto->getExtProtoInfo());
}
if (const FunctionNoProtoType *Proto = T->getAs<FunctionNoProtoType>()) {
QualType RetTy = removePtrSizeAddrSpace(Proto->getReturnType());
return getFunctionNoProtoType(RetTy, Proto->getExtInfo());
}
return T;
}
bool ASTContext::hasSameFunctionTypeIgnoringPtrSizes(QualType T, QualType U) {
return hasSameType(T, U) ||
hasSameType(getFunctionTypeWithoutPtrSizes(T),
getFunctionTypeWithoutPtrSizes(U));
}
void ASTContext::adjustExceptionSpec(
FunctionDecl *FD, const FunctionProtoType::ExceptionSpecInfo &ESI,
bool AsWritten) {
// Update the type.
QualType Updated =
getFunctionTypeWithExceptionSpec(FD->getType(), ESI);
FD->setType(Updated);
if (!AsWritten)
return;
// Update the type in the type source information too.
if (TypeSourceInfo *TSInfo = FD->getTypeSourceInfo()) {
// If the type and the type-as-written differ, we may need to update
// the type-as-written too.
if (TSInfo->getType() != FD->getType())
Updated = getFunctionTypeWithExceptionSpec(TSInfo->getType(), ESI);
// FIXME: When we get proper type location information for exceptions,
// we'll also have to rebuild the TypeSourceInfo. For now, we just patch
// up the TypeSourceInfo;
assert(TypeLoc::getFullDataSizeForType(Updated) ==
TypeLoc::getFullDataSizeForType(TSInfo->getType()) &&
"TypeLoc size mismatch from updating exception specification");
TSInfo->overrideType(Updated);
}
}
/// getComplexType - Return the uniqued reference to the type for a complex
/// number with the specified element type.
QualType ASTContext::getComplexType(QualType T) const {
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
ComplexType::Profile(ID, T);
void *InsertPos = nullptr;
if (ComplexType *CT = ComplexTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(CT, 0);
// If the pointee type isn't canonical, this won't be a canonical type either,
// so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical()) {
Canonical = getComplexType(getCanonicalType(T));
// Get the new insert position for the node we care about.
ComplexType *NewIP = ComplexTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) ComplexType(T, Canonical);
Types.push_back(New);
ComplexTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getPointerType - Return the uniqued reference to the type for a pointer to
/// the specified type.
QualType ASTContext::getPointerType(QualType T) const {
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
PointerType::Profile(ID, T);
void *InsertPos = nullptr;
if (PointerType *PT = PointerTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(PT, 0);
// If the pointee type isn't canonical, this won't be a canonical type either,
// so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical()) {
Canonical = getPointerType(getCanonicalType(T));
// Get the new insert position for the node we care about.
PointerType *NewIP = PointerTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) PointerType(T, Canonical);
Types.push_back(New);
PointerTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
QualType ASTContext::getAdjustedType(QualType Orig, QualType New) const {
llvm::FoldingSetNodeID ID;
AdjustedType::Profile(ID, Orig, New);
void *InsertPos = nullptr;
AdjustedType *AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos);
if (AT)
return QualType(AT, 0);
QualType Canonical = getCanonicalType(New);
// Get the new insert position for the node we care about.
AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!AT && "Shouldn't be in the map!");
AT = new (*this, TypeAlignment)
AdjustedType(Type::Adjusted, Orig, New, Canonical);
Types.push_back(AT);
AdjustedTypes.InsertNode(AT, InsertPos);
return QualType(AT, 0);
}
QualType ASTContext::getDecayedType(QualType T) const {
assert((T->isArrayType() || T->isFunctionType()) && "T does not decay");
QualType Decayed;
// C99 6.7.5.3p7:
// A declaration of a parameter as "array of type" shall be
// adjusted to "qualified pointer to type", where the type
// qualifiers (if any) are those specified within the [ and ] of
// the array type derivation.
if (T->isArrayType())
Decayed = getArrayDecayedType(T);
// C99 6.7.5.3p8:
// A declaration of a parameter as "function returning type"
// shall be adjusted to "pointer to function returning type", as
// in 6.3.2.1.
if (T->isFunctionType())
Decayed = getPointerType(T);
llvm::FoldingSetNodeID ID;
AdjustedType::Profile(ID, T, Decayed);
void *InsertPos = nullptr;
AdjustedType *AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos);
if (AT)
return QualType(AT, 0);
QualType Canonical = getCanonicalType(Decayed);
// Get the new insert position for the node we care about.
AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!AT && "Shouldn't be in the map!");
AT = new (*this, TypeAlignment) DecayedType(T, Decayed, Canonical);
Types.push_back(AT);
AdjustedTypes.InsertNode(AT, InsertPos);
return QualType(AT, 0);
}
/// getBlockPointerType - Return the uniqued reference to the type for
/// a pointer to the specified block.
QualType ASTContext::getBlockPointerType(QualType T) const {
assert(T->isFunctionType() && "block of function types only");
// Unique pointers, to guarantee there is only one block of a particular
// structure.
llvm::FoldingSetNodeID ID;
BlockPointerType::Profile(ID, T);
void *InsertPos = nullptr;
if (BlockPointerType *PT =
BlockPointerTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(PT, 0);
// If the block pointee type isn't canonical, this won't be a canonical
// type either so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical()) {
Canonical = getBlockPointerType(getCanonicalType(T));
// Get the new insert position for the node we care about.
BlockPointerType *NewIP =
BlockPointerTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) BlockPointerType(T, Canonical);
Types.push_back(New);
BlockPointerTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getLValueReferenceType - Return the uniqued reference to the type for an
/// lvalue reference to the specified type.
QualType
ASTContext::getLValueReferenceType(QualType T, bool SpelledAsLValue) const {
assert(getCanonicalType(T) != OverloadTy &&
"Unresolved overloaded function type");
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
ReferenceType::Profile(ID, T, SpelledAsLValue);
void *InsertPos = nullptr;
if (LValueReferenceType *RT =
LValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(RT, 0);
const auto *InnerRef = T->getAs<ReferenceType>();
// If the referencee type isn't canonical, this won't be a canonical type
// either, so fill in the canonical type field.
QualType Canonical;
if (!SpelledAsLValue || InnerRef || !T.isCanonical()) {
QualType PointeeType = (InnerRef ? InnerRef->getPointeeType() : T);
Canonical = getLValueReferenceType(getCanonicalType(PointeeType));
// Get the new insert position for the node we care about.
LValueReferenceType *NewIP =
LValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) LValueReferenceType(T, Canonical,
SpelledAsLValue);
Types.push_back(New);
LValueReferenceTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getRValueReferenceType - Return the uniqued reference to the type for an
/// rvalue reference to the specified type.
QualType ASTContext::getRValueReferenceType(QualType T) const {
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
ReferenceType::Profile(ID, T, false);
void *InsertPos = nullptr;
if (RValueReferenceType *RT =
RValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(RT, 0);
const auto *InnerRef = T->getAs<ReferenceType>();
// If the referencee type isn't canonical, this won't be a canonical type
// either, so fill in the canonical type field.
QualType Canonical;
if (InnerRef || !T.isCanonical()) {
QualType PointeeType = (InnerRef ? InnerRef->getPointeeType() : T);
Canonical = getRValueReferenceType(getCanonicalType(PointeeType));
// Get the new insert position for the node we care about.
RValueReferenceType *NewIP =
RValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) RValueReferenceType(T, Canonical);
Types.push_back(New);
RValueReferenceTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getMemberPointerType - Return the uniqued reference to the type for a
/// member pointer to the specified type, in the specified class.
QualType ASTContext::getMemberPointerType(QualType T, const Type *Cls) const {
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
MemberPointerType::Profile(ID, T, Cls);
void *InsertPos = nullptr;
if (MemberPointerType *PT =
MemberPointerTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(PT, 0);
// If the pointee or class type isn't canonical, this won't be a canonical
// type either, so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical() || !Cls->isCanonicalUnqualified()) {
Canonical = getMemberPointerType(getCanonicalType(T),getCanonicalType(Cls));
// Get the new insert position for the node we care about.
MemberPointerType *NewIP =
MemberPointerTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) MemberPointerType(T, Cls, Canonical);
Types.push_back(New);
MemberPointerTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getConstantArrayType - Return the unique reference to the type for an
/// array of the specified element type.
QualType ASTContext::getConstantArrayType(QualType EltTy,
const llvm::APInt &ArySizeIn,
const Expr *SizeExpr,
ArrayType::ArraySizeModifier ASM,
unsigned IndexTypeQuals) const {
assert((EltTy->isDependentType() ||
EltTy->isIncompleteType() || EltTy->isConstantSizeType()) &&
"Constant array of VLAs is illegal!");
// We only need the size as part of the type if it's instantiation-dependent.
if (SizeExpr && !SizeExpr->isInstantiationDependent())
SizeExpr = nullptr;
// Convert the array size into a canonical width matching the pointer size for
// the target.
llvm::APInt ArySize(ArySizeIn);
ArySize = ArySize.zextOrTrunc(Target->getMaxPointerWidth());
llvm::FoldingSetNodeID ID;
ConstantArrayType::Profile(ID, *this, EltTy, ArySize, SizeExpr, ASM,
IndexTypeQuals);
void *InsertPos = nullptr;
if (ConstantArrayType *ATP =
ConstantArrayTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(ATP, 0);
// If the element type isn't canonical or has qualifiers, or the array bound
// is instantiation-dependent, this won't be a canonical type either, so fill
// in the canonical type field.
QualType Canon;
if (!EltTy.isCanonical() || EltTy.hasLocalQualifiers() || SizeExpr) {
SplitQualType canonSplit = getCanonicalType(EltTy).split();
Canon = getConstantArrayType(QualType(canonSplit.Ty, 0), ArySize, nullptr,
ASM, IndexTypeQuals);
Canon = getQualifiedType(Canon, canonSplit.Quals);
// Get the new insert position for the node we care about.
ConstantArrayType *NewIP =
ConstantArrayTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
void *Mem = Allocate(
ConstantArrayType::totalSizeToAlloc<const Expr *>(SizeExpr ? 1 : 0),
TypeAlignment);
auto *New = new (Mem)
ConstantArrayType(EltTy, Canon, ArySize, SizeExpr, ASM, IndexTypeQuals);
ConstantArrayTypes.InsertNode(New, InsertPos);
Types.push_back(New);
return QualType(New, 0);
}
/// getVariableArrayDecayedType - Turns the given type, which may be
/// variably-modified, into the corresponding type with all the known
/// sizes replaced with [*].
QualType ASTContext::getVariableArrayDecayedType(QualType type) const {
// Vastly most common case.
if (!type->isVariablyModifiedType()) return type;
QualType result;
SplitQualType split = type.getSplitDesugaredType();
const Type *ty = split.Ty;
switch (ty->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.inc"
llvm_unreachable("didn't desugar past all non-canonical types?");
// These types should never be variably-modified.
case Type::Builtin:
case Type::Complex:
case Type::Vector:
case Type::DependentVector:
case Type::ExtVector:
case Type::DependentSizedExtVector:
case Type::DependentAddressSpace:
case Type::ObjCObject:
case Type::ObjCInterface:
case Type::ObjCObjectPointer:
case Type::Record:
case Type::Enum:
case Type::UnresolvedUsing:
case Type::TypeOfExpr:
case Type::TypeOf:
case Type::Decltype:
case Type::UnaryTransform:
case Type::DependentName:
case Type::InjectedClassName:
case Type::TemplateSpecialization:
case Type::DependentTemplateSpecialization:
case Type::TemplateTypeParm:
case Type::SubstTemplateTypeParmPack:
case Type::Auto:
case Type::DeducedTemplateSpecialization:
case Type::PackExpansion:
llvm_unreachable("type should never be variably-modified");
// These types can be variably-modified but should never need to
// further decay.
case Type::FunctionNoProto:
case Type::FunctionProto:
case Type::BlockPointer:
case Type::MemberPointer:
case Type::Pipe:
return type;
// These types can be variably-modified. All these modifications
// preserve structure except as noted by comments.
// TODO: if we ever care about optimizing VLAs, there are no-op
// optimizations available here.
case Type::Pointer:
result = getPointerType(getVariableArrayDecayedType(
cast<PointerType>(ty)->getPointeeType()));
break;
case Type::LValueReference: {
const auto *lv = cast<LValueReferenceType>(ty);
result = getLValueReferenceType(
getVariableArrayDecayedType(lv->getPointeeType()),
lv->isSpelledAsLValue());
break;
}
case Type::RValueReference: {
const auto *lv = cast<RValueReferenceType>(ty);
result = getRValueReferenceType(
getVariableArrayDecayedType(lv->getPointeeType()));
break;
}
case Type::Atomic: {
const auto *at = cast<AtomicType>(ty);
result = getAtomicType(getVariableArrayDecayedType(at->getValueType()));
break;
}
case Type::ConstantArray: {
const auto *cat = cast<ConstantArrayType>(ty);
result = getConstantArrayType(
getVariableArrayDecayedType(cat->getElementType()),
cat->getSize(),
cat->getSizeExpr(),
cat->getSizeModifier(),
cat->getIndexTypeCVRQualifiers());
break;
}
case Type::DependentSizedArray: {
const auto *dat = cast<DependentSizedArrayType>(ty);
result = getDependentSizedArrayType(
getVariableArrayDecayedType(dat->getElementType()),
dat->getSizeExpr(),
dat->getSizeModifier(),
dat->getIndexTypeCVRQualifiers(),
dat->getBracketsRange());
break;
}
// Turn incomplete types into [*] types.
case Type::IncompleteArray: {
const auto *iat = cast<IncompleteArrayType>(ty);
result = getVariableArrayType(
getVariableArrayDecayedType(iat->getElementType()),
/*size*/ nullptr,
ArrayType::Normal,
iat->getIndexTypeCVRQualifiers(),
SourceRange());
break;
}
// Turn VLA types into [*] types.
case Type::VariableArray: {
const auto *vat = cast<VariableArrayType>(ty);
result = getVariableArrayType(
getVariableArrayDecayedType(vat->getElementType()),
/*size*/ nullptr,
ArrayType::Star,
vat->getIndexTypeCVRQualifiers(),
vat->getBracketsRange());
break;
}
}
// Apply the top-level qualifiers from the original.
return getQualifiedType(result, split.Quals);
}
/// getVariableArrayType - Returns a non-unique reference to the type for a
/// variable array of the specified element type.
QualType ASTContext::getVariableArrayType(QualType EltTy,
Expr *NumElts,
ArrayType::ArraySizeModifier ASM,
unsigned IndexTypeQuals,
SourceRange Brackets) const {
// Since we don't unique expressions, it isn't possible to unique VLA's
// that have an expression provided for their size.
QualType Canon;
// Be sure to pull qualifiers off the element type.
if (!EltTy.isCanonical() || EltTy.hasLocalQualifiers()) {
SplitQualType canonSplit = getCanonicalType(EltTy).split();
Canon = getVariableArrayType(QualType(canonSplit.Ty, 0), NumElts, ASM,
IndexTypeQuals, Brackets);
Canon = getQualifiedType(Canon, canonSplit.Quals);
}
auto *New = new (*this, TypeAlignment)
VariableArrayType(EltTy, Canon, NumElts, ASM, IndexTypeQuals, Brackets);
VariableArrayTypes.push_back(New);
Types.push_back(New);
return QualType(New, 0);
}
/// getDependentSizedArrayType - Returns a non-unique reference to
/// the type for a dependently-sized array of the specified element
/// type.
QualType ASTContext::getDependentSizedArrayType(QualType elementType,
Expr *numElements,
ArrayType::ArraySizeModifier ASM,
unsigned elementTypeQuals,
SourceRange brackets) const {
assert((!numElements || numElements->isTypeDependent() ||
numElements->isValueDependent()) &&
"Size must be type- or value-dependent!");
// Dependently-sized array types that do not have a specified number
// of elements will have their sizes deduced from a dependent
// initializer. We do no canonicalization here at all, which is okay
// because they can't be used in most locations.
if (!numElements) {
auto *newType
= new (*this, TypeAlignment)
DependentSizedArrayType(*this, elementType, QualType(),
numElements, ASM, elementTypeQuals,
brackets);
Types.push_back(newType);
return QualType(newType, 0);
}
// Otherwise, we actually build a new type every time, but we
// also build a canonical type.
SplitQualType canonElementType = getCanonicalType(elementType).split();
void *insertPos = nullptr;
llvm::FoldingSetNodeID ID;
DependentSizedArrayType::Profile(ID, *this,
QualType(canonElementType.Ty, 0),
ASM, elementTypeQuals, numElements);
// Look for an existing type with these properties.
DependentSizedArrayType *canonTy =
DependentSizedArrayTypes.FindNodeOrInsertPos(ID, insertPos);
// If we don't have one, build one.
if (!canonTy) {
canonTy = new (*this, TypeAlignment)
DependentSizedArrayType(*this, QualType(canonElementType.Ty, 0),
QualType(), numElements, ASM, elementTypeQuals,
brackets);
DependentSizedArrayTypes.InsertNode(canonTy, insertPos);
Types.push_back(canonTy);
}
// Apply qualifiers from the element type to the array.
QualType canon = getQualifiedType(QualType(canonTy,0),
canonElementType.Quals);
// If we didn't need extra canonicalization for the element type or the size
// expression, then just use that as our result.
if (QualType(canonElementType.Ty, 0) == elementType &&
canonTy->getSizeExpr() == numElements)
return canon;
// Otherwise, we need to build a type which follows the spelling
// of the element type.
auto *sugaredType
= new (*this, TypeAlignment)
DependentSizedArrayType(*this, elementType, canon, numElements,
ASM, elementTypeQuals, brackets);
Types.push_back(sugaredType);
return QualType(sugaredType, 0);
}
QualType ASTContext::getIncompleteArrayType(QualType elementType,
ArrayType::ArraySizeModifier ASM,
unsigned elementTypeQuals) const {
llvm::FoldingSetNodeID ID;
IncompleteArrayType::Profile(ID, elementType, ASM, elementTypeQuals);
void *insertPos = nullptr;
if (IncompleteArrayType *iat =
IncompleteArrayTypes.FindNodeOrInsertPos(ID, insertPos))
return QualType(iat, 0);
// If the element type isn't canonical, this won't be a canonical type
// either, so fill in the canonical type field. We also have to pull
// qualifiers off the element type.
QualType canon;
if (!elementType.isCanonical() || elementType.hasLocalQualifiers()) {
SplitQualType canonSplit = getCanonicalType(elementType).split();
canon = getIncompleteArrayType(QualType(canonSplit.Ty, 0),
ASM, elementTypeQuals);
canon = getQualifiedType(canon, canonSplit.Quals);
// Get the new insert position for the node we care about.
IncompleteArrayType *existing =
IncompleteArrayTypes.FindNodeOrInsertPos(ID, insertPos);
assert(!existing && "Shouldn't be in the map!"); (void) existing;
}
auto *newType = new (*this, TypeAlignment)
IncompleteArrayType(elementType, canon, ASM, elementTypeQuals);
IncompleteArrayTypes.InsertNode(newType, insertPos);
Types.push_back(newType);
return QualType(newType, 0);
}
/// getVectorType - Return the unique reference to a vector type of
/// the specified element type and size. VectorType must be a built-in type.
QualType ASTContext::getVectorType(QualType vecType, unsigned NumElts,
VectorType::VectorKind VecKind) const {
assert(vecType->isBuiltinType());
// Check if we've already instantiated a vector of this type.
llvm::FoldingSetNodeID ID;
VectorType::Profile(ID, vecType, NumElts, Type::Vector, VecKind);
void *InsertPos = nullptr;
if (VectorType *VTP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(VTP, 0);
// If the element type isn't canonical, this won't be a canonical type either,
// so fill in the canonical type field.
QualType Canonical;
if (!vecType.isCanonical()) {
Canonical = getVectorType(getCanonicalType(vecType), NumElts, VecKind);
// Get the new insert position for the node we care about.
VectorType *NewIP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment)
VectorType(vecType, NumElts, Canonical, VecKind);
VectorTypes.InsertNode(New, InsertPos);
Types.push_back(New);
return QualType(New, 0);
}
QualType
ASTContext::getDependentVectorType(QualType VecType, Expr *SizeExpr,
SourceLocation AttrLoc,
VectorType::VectorKind VecKind) const {
llvm::FoldingSetNodeID ID;
DependentVectorType::Profile(ID, *this, getCanonicalType(VecType), SizeExpr,
VecKind);
void *InsertPos = nullptr;
DependentVectorType *Canon =
DependentVectorTypes.FindNodeOrInsertPos(ID, InsertPos);
DependentVectorType *New;
if (Canon) {
New = new (*this, TypeAlignment) DependentVectorType(
*this, VecType, QualType(Canon, 0), SizeExpr, AttrLoc, VecKind);
} else {
QualType CanonVecTy = getCanonicalType(VecType);
if (CanonVecTy == VecType) {
New = new (*this, TypeAlignment) DependentVectorType(
*this, VecType, QualType(), SizeExpr, AttrLoc, VecKind);
DependentVectorType *CanonCheck =
DependentVectorTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!CanonCheck &&
"Dependent-sized vector_size canonical type broken");
(void)CanonCheck;
DependentVectorTypes.InsertNode(New, InsertPos);
} else {
QualType CanonExtTy = getDependentSizedExtVectorType(CanonVecTy, SizeExpr,
SourceLocation());
New = new (*this, TypeAlignment) DependentVectorType(
*this, VecType, CanonExtTy, SizeExpr, AttrLoc, VecKind);
}
}
Types.push_back(New);
return QualType(New, 0);
}
/// getExtVectorType - Return the unique reference to an extended vector type of
/// the specified element type and size. VectorType must be a built-in type.
QualType
ASTContext::getExtVectorType(QualType vecType, unsigned NumElts) const {
assert(vecType->isBuiltinType() || vecType->isDependentType());
// Check if we've already instantiated a vector of this type.
llvm::FoldingSetNodeID ID;
VectorType::Profile(ID, vecType, NumElts, Type::ExtVector,
VectorType::GenericVector);
void *InsertPos = nullptr;
if (VectorType *VTP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(VTP, 0);
// If the element type isn't canonical, this won't be a canonical type either,
// so fill in the canonical type field.
QualType Canonical;
if (!vecType.isCanonical()) {
Canonical = getExtVectorType(getCanonicalType(vecType), NumElts);
// Get the new insert position for the node we care about.
VectorType *NewIP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment)
ExtVectorType(vecType, NumElts, Canonical);
VectorTypes.InsertNode(New, InsertPos);
Types.push_back(New);
return QualType(New, 0);
}
QualType
ASTContext::getDependentSizedExtVectorType(QualType vecType,
Expr *SizeExpr,
SourceLocation AttrLoc) const {
llvm::FoldingSetNodeID ID;
DependentSizedExtVectorType::Profile(ID, *this, getCanonicalType(vecType),
SizeExpr);
void *InsertPos = nullptr;
DependentSizedExtVectorType *Canon
= DependentSizedExtVectorTypes.FindNodeOrInsertPos(ID, InsertPos);
DependentSizedExtVectorType *New;
if (Canon) {
// We already have a canonical version of this array type; use it as
// the canonical type for a newly-built type.
New = new (*this, TypeAlignment)
DependentSizedExtVectorType(*this, vecType, QualType(Canon, 0),
SizeExpr, AttrLoc);
} else {
QualType CanonVecTy = getCanonicalType(vecType);
if (CanonVecTy == vecType) {
New = new (*this, TypeAlignment)
DependentSizedExtVectorType(*this, vecType, QualType(), SizeExpr,
AttrLoc);
DependentSizedExtVectorType *CanonCheck
= DependentSizedExtVectorTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!CanonCheck && "Dependent-sized ext_vector canonical type broken");
(void)CanonCheck;
DependentSizedExtVectorTypes.InsertNode(New, InsertPos);
} else {
QualType CanonExtTy = getDependentSizedExtVectorType(CanonVecTy, SizeExpr,
SourceLocation());
New = new (*this, TypeAlignment) DependentSizedExtVectorType(
*this, vecType, CanonExtTy, SizeExpr, AttrLoc);
}
}
Types.push_back(New);
return QualType(New, 0);
}
QualType ASTContext::getDependentAddressSpaceType(QualType PointeeType,
Expr *AddrSpaceExpr,
SourceLocation AttrLoc) const {
assert(AddrSpaceExpr->isInstantiationDependent());
QualType canonPointeeType = getCanonicalType(PointeeType);
void *insertPos = nullptr;
llvm::FoldingSetNodeID ID;
DependentAddressSpaceType::Profile(ID, *this, canonPointeeType,
AddrSpaceExpr);
DependentAddressSpaceType *canonTy =
DependentAddressSpaceTypes.FindNodeOrInsertPos(ID, insertPos);
if (!canonTy) {
canonTy = new (*this, TypeAlignment)
DependentAddressSpaceType(*this, canonPointeeType,
QualType(), AddrSpaceExpr, AttrLoc);
DependentAddressSpaceTypes.InsertNode(canonTy, insertPos);
Types.push_back(canonTy);
}
if (canonPointeeType == PointeeType &&
canonTy->getAddrSpaceExpr() == AddrSpaceExpr)
return QualType(canonTy, 0);
auto *sugaredType
= new (*this, TypeAlignment)
DependentAddressSpaceType(*this, PointeeType, QualType(canonTy, 0),
AddrSpaceExpr, AttrLoc);
Types.push_back(sugaredType);
return QualType(sugaredType, 0);
}
/// Determine whether \p T is canonical as the result type of a function.
static bool isCanonicalResultType(QualType T) {
return T.isCanonical() &&
(T.getObjCLifetime() == Qualifiers::OCL_None ||
T.getObjCLifetime() == Qualifiers::OCL_ExplicitNone);
}
/// getFunctionNoProtoType - Return a K&R style C function type like 'int()'.
QualType
ASTContext::getFunctionNoProtoType(QualType ResultTy,
const FunctionType::ExtInfo &Info) const {
// Unique functions, to guarantee there is only one function of a particular
// structure.
llvm::FoldingSetNodeID ID;
FunctionNoProtoType::Profile(ID, ResultTy, Info);
void *InsertPos = nullptr;
if (FunctionNoProtoType *FT =
FunctionNoProtoTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(FT, 0);
QualType Canonical;
if (!isCanonicalResultType(ResultTy)) {
Canonical =
getFunctionNoProtoType(getCanonicalFunctionResultType(ResultTy), Info);
// Get the new insert position for the node we care about.
FunctionNoProtoType *NewIP =
FunctionNoProtoTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment)
FunctionNoProtoType(ResultTy, Canonical, Info);
Types.push_back(New);
FunctionNoProtoTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
CanQualType
ASTContext::getCanonicalFunctionResultType(QualType ResultType) const {
CanQualType CanResultType = getCanonicalType(ResultType);
// Canonical result types do not have ARC lifetime qualifiers.
if (CanResultType.getQualifiers().hasObjCLifetime()) {
Qualifiers Qs = CanResultType.getQualifiers();
Qs.removeObjCLifetime();
return CanQualType::CreateUnsafe(
getQualifiedType(CanResultType.getUnqualifiedType(), Qs));
}
return CanResultType;
}
static bool isCanonicalExceptionSpecification(
const FunctionProtoType::ExceptionSpecInfo &ESI, bool NoexceptInType) {
if (ESI.Type == EST_None)
return true;
if (!NoexceptInType)
return false;
// C++17 onwards: exception specification is part of the type, as a simple
// boolean "can this function type throw".
if (ESI.Type == EST_BasicNoexcept)
return true;
// A noexcept(expr) specification is (possibly) canonical if expr is
// value-dependent.
if (ESI.Type == EST_DependentNoexcept)
return true;
// A dynamic exception specification is canonical if it only contains pack
// expansions (so we can't tell whether it's non-throwing) and all its
// contained types are canonical.
if (ESI.Type == EST_Dynamic) {
bool AnyPackExpansions = false;
for (QualType ET : ESI.Exceptions) {
if (!ET.isCanonical())
return false;
if (ET->getAs<PackExpansionType>())
AnyPackExpansions = true;
}
return AnyPackExpansions;
}
return false;
}
QualType ASTContext::getFunctionTypeInternal(
QualType ResultTy, ArrayRef<QualType> ArgArray,
const FunctionProtoType::ExtProtoInfo &EPI, bool OnlyWantCanonical) const {
size_t NumArgs = ArgArray.size();
// Unique functions, to guarantee there is only one function of a particular
// structure.
llvm::FoldingSetNodeID ID;
FunctionProtoType::Profile(ID, ResultTy, ArgArray.begin(), NumArgs, EPI,
*this, true);
QualType Canonical;
bool Unique = false;
void *InsertPos = nullptr;
if (FunctionProtoType *FPT =
FunctionProtoTypes.FindNodeOrInsertPos(ID, InsertPos)) {
QualType Existing = QualType(FPT, 0);
// If we find a pre-existing equivalent FunctionProtoType, we can just reuse
// it so long as our exception specification doesn't contain a dependent
// noexcept expression, or we're just looking for a canonical type.
// Otherwise, we're going to need to create a type
// sugar node to hold the concrete expression.
if (OnlyWantCanonical || !isComputedNoexcept(EPI.ExceptionSpec.Type) ||
EPI.ExceptionSpec.NoexceptExpr == FPT->getNoexceptExpr())
return Existing;
// We need a new type sugar node for this one, to hold the new noexcept
// expression. We do no canonicalization here, but that's OK since we don't
// expect to see the same noexcept expression much more than once.
Canonical = getCanonicalType(Existing);
Unique = true;
}
bool NoexceptInType = getLangOpts().CPlusPlus17;
bool IsCanonicalExceptionSpec =
isCanonicalExceptionSpecification(EPI.ExceptionSpec, NoexceptInType);
// Determine whether the type being created is already canonical or not.
bool isCanonical = !Unique && IsCanonicalExceptionSpec &&
isCanonicalResultType(ResultTy) && !EPI.HasTrailingReturn;
for (unsigned i = 0; i != NumArgs && isCanonical; ++i)
if (!ArgArray[i].isCanonicalAsParam())
isCanonical = false;
if (OnlyWantCanonical)
assert(isCanonical &&
"given non-canonical parameters constructing canonical type");
// If this type isn't canonical, get the canonical version of it if we don't
// already have it. The exception spec is only partially part of the
// canonical type, and only in C++17 onwards.
if (!isCanonical && Canonical.isNull()) {
SmallVector<QualType, 16> CanonicalArgs;
CanonicalArgs.reserve(NumArgs);
for (unsigned i = 0; i != NumArgs; ++i)
CanonicalArgs.push_back(getCanonicalParamType(ArgArray[i]));
llvm::SmallVector<QualType, 8> ExceptionTypeStorage;
FunctionProtoType::ExtProtoInfo CanonicalEPI = EPI;
CanonicalEPI.HasTrailingReturn = false;
if (IsCanonicalExceptionSpec) {
// Exception spec is already OK.
} else if (NoexceptInType) {
switch (EPI.ExceptionSpec.Type) {
case EST_Unparsed: case EST_Unevaluated: case EST_Uninstantiated:
// We don't know yet. It shouldn't matter what we pick here; no-one
// should ever look at this.
LLVM_FALLTHROUGH;
case EST_None: case EST_MSAny: case EST_NoexceptFalse:
CanonicalEPI.ExceptionSpec.Type = EST_None;
break;
// A dynamic exception specification is almost always "not noexcept",
// with the exception that a pack expansion might expand to no types.
case EST_Dynamic: {
bool AnyPacks = false;
for (QualType ET : EPI.ExceptionSpec.Exceptions) {
if (ET->getAs<PackExpansionType>())
AnyPacks = true;
ExceptionTypeStorage.push_back(getCanonicalType(ET));
}
if (!AnyPacks)
CanonicalEPI.ExceptionSpec.Type = EST_None;
else {
CanonicalEPI.ExceptionSpec.Type = EST_Dynamic;
CanonicalEPI.ExceptionSpec.Exceptions = ExceptionTypeStorage;
}
break;
}
case EST_DynamicNone:
case EST_BasicNoexcept:
case EST_NoexceptTrue:
case EST_NoThrow:
CanonicalEPI.ExceptionSpec.Type = EST_BasicNoexcept;
break;
case EST_DependentNoexcept:
llvm_unreachable("dependent noexcept is already canonical");
}
} else {
CanonicalEPI.ExceptionSpec = FunctionProtoType::ExceptionSpecInfo();
}
// Adjust the canonical function result type.
CanQualType CanResultTy = getCanonicalFunctionResultType(ResultTy);
Canonical =
getFunctionTypeInternal(CanResultTy, CanonicalArgs, CanonicalEPI, true);
// Get the new insert position for the node we care about.
FunctionProtoType *NewIP =
FunctionProtoTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
// Compute the needed size to hold this FunctionProtoType and the
// various trailing objects.
auto ESH = FunctionProtoType::getExceptionSpecSize(
EPI.ExceptionSpec.Type, EPI.ExceptionSpec.Exceptions.size());
size_t Size = FunctionProtoType::totalSizeToAlloc<
QualType, SourceLocation, FunctionType::FunctionTypeExtraBitfields,
FunctionType::ExceptionType, Expr *, FunctionDecl *,
FunctionProtoType::ExtParameterInfo, Qualifiers>(
NumArgs, EPI.Variadic,
FunctionProtoType::hasExtraBitfields(EPI.ExceptionSpec.Type),
ESH.NumExceptionType, ESH.NumExprPtr, ESH.NumFunctionDeclPtr,
EPI.ExtParameterInfos ? NumArgs : 0,
EPI.TypeQuals.hasNonFastQualifiers() ? 1 : 0);
auto *FTP = (FunctionProtoType *)Allocate(Size, TypeAlignment);
FunctionProtoType::ExtProtoInfo newEPI = EPI;
new (FTP) FunctionProtoType(ResultTy, ArgArray, Canonical, newEPI);
Types.push_back(FTP);
if (!Unique)
FunctionProtoTypes.InsertNode(FTP, InsertPos);
return QualType(FTP, 0);
}
QualType ASTContext::getPipeType(QualType T, bool ReadOnly) const {
llvm::FoldingSetNodeID ID;
PipeType::Profile(ID, T, ReadOnly);
void *InsertPos = nullptr;
if (PipeType *PT = PipeTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(PT, 0);<|fim▁hole|> // either, so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical()) {
Canonical = getPipeType(getCanonicalType(T), ReadOnly);
// Get the new insert position for the node we care about.
PipeType *NewIP = PipeTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!");
(void)NewIP;
}
auto *New = new (*this, TypeAlignment) PipeType(T, Canonical, ReadOnly);
Types.push_back(New);
PipeTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
QualType ASTContext::adjustStringLiteralBaseType(QualType Ty) const {
// OpenCL v1.1 s6.5.3: a string literal is in the constant address space.
return LangOpts.OpenCL ? getAddrSpaceQualType(Ty, LangAS::opencl_constant)
: Ty;
}
QualType ASTContext::getReadPipeType(QualType T) const {
return getPipeType(T, true);
}
QualType ASTContext::getWritePipeType(QualType T) const {
return getPipeType(T, false);
}
#ifndef NDEBUG
static bool NeedsInjectedClassNameType(const RecordDecl *D) {
if (!isa<CXXRecordDecl>(D)) return false;
const auto *RD = cast<CXXRecordDecl>(D);
if (isa<ClassTemplatePartialSpecializationDecl>(RD))
return true;
if (RD->getDescribedClassTemplate() &&
!isa<ClassTemplateSpecializationDecl>(RD))
return true;
return false;
}
#endif
/// getInjectedClassNameType - Return the unique reference to the
/// injected class name type for the specified templated declaration.
QualType ASTContext::getInjectedClassNameType(CXXRecordDecl *Decl,
QualType TST) const {
assert(NeedsInjectedClassNameType(Decl));
if (Decl->TypeForDecl) {
assert(isa<InjectedClassNameType>(Decl->TypeForDecl));
} else if (CXXRecordDecl *PrevDecl = Decl->getPreviousDecl()) {
assert(PrevDecl->TypeForDecl && "previous declaration has no type");
Decl->TypeForDecl = PrevDecl->TypeForDecl;
assert(isa<InjectedClassNameType>(Decl->TypeForDecl));
} else {
Type *newType =
new (*this, TypeAlignment) InjectedClassNameType(Decl, TST);
Decl->TypeForDecl = newType;
Types.push_back(newType);
}
return QualType(Decl->TypeForDecl, 0);
}
/// getTypeDeclType - Return the unique reference to the type for the
/// specified type declaration.
QualType ASTContext::getTypeDeclTypeSlow(const TypeDecl *Decl) const {
assert(Decl && "Passed null for Decl param");
assert(!Decl->TypeForDecl && "TypeForDecl present in slow case");
if (const auto *Typedef = dyn_cast<TypedefNameDecl>(Decl))
return getTypedefType(Typedef);
assert(!isa<TemplateTypeParmDecl>(Decl) &&
"Template type parameter types are always available.");
if (const auto *Record = dyn_cast<RecordDecl>(Decl)) {
assert(Record->isFirstDecl() && "struct/union has previous declaration");
assert(!NeedsInjectedClassNameType(Record));
return getRecordType(Record);
} else if (const auto *Enum = dyn_cast<EnumDecl>(Decl)) {
assert(Enum->isFirstDecl() && "enum has previous declaration");
return getEnumType(Enum);
} else if (const auto *Using = dyn_cast<UnresolvedUsingTypenameDecl>(Decl)) {
Type *newType = new (*this, TypeAlignment) UnresolvedUsingType(Using);
Decl->TypeForDecl = newType;
Types.push_back(newType);
} else
llvm_unreachable("TypeDecl without a type?");
return QualType(Decl->TypeForDecl, 0);
}
/// getTypedefType - Return the unique reference to the type for the
/// specified typedef name decl.
QualType
ASTContext::getTypedefType(const TypedefNameDecl *Decl,
QualType Canonical) const {
if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0);
if (Canonical.isNull())
Canonical = getCanonicalType(Decl->getUnderlyingType());
auto *newType = new (*this, TypeAlignment)
TypedefType(Type::Typedef, Decl, Canonical);
Decl->TypeForDecl = newType;
Types.push_back(newType);
return QualType(newType, 0);
}
QualType ASTContext::getRecordType(const RecordDecl *Decl) const {
if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0);
if (const RecordDecl *PrevDecl = Decl->getPreviousDecl())
if (PrevDecl->TypeForDecl)
return QualType(Decl->TypeForDecl = PrevDecl->TypeForDecl, 0);
auto *newType = new (*this, TypeAlignment) RecordType(Decl);
Decl->TypeForDecl = newType;
Types.push_back(newType);
return QualType(newType, 0);
}
QualType ASTContext::getEnumType(const EnumDecl *Decl) const {
if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0);
if (const EnumDecl *PrevDecl = Decl->getPreviousDecl())
if (PrevDecl->TypeForDecl)
return QualType(Decl->TypeForDecl = PrevDecl->TypeForDecl, 0);
auto *newType = new (*this, TypeAlignment) EnumType(Decl);
Decl->TypeForDecl = newType;
Types.push_back(newType);
return QualType(newType, 0);
}
QualType ASTContext::getAttributedType(attr::Kind attrKind,
QualType modifiedType,
QualType equivalentType) {
llvm::FoldingSetNodeID id;
AttributedType::Profile(id, attrKind, modifiedType, equivalentType);
void *insertPos = nullptr;
AttributedType *type = AttributedTypes.FindNodeOrInsertPos(id, insertPos);
if (type) return QualType(type, 0);
QualType canon = getCanonicalType(equivalentType);
type = new (*this, TypeAlignment)
AttributedType(canon, attrKind, modifiedType, equivalentType);
Types.push_back(type);
AttributedTypes.InsertNode(type, insertPos);
return QualType(type, 0);
}
/// Retrieve a substitution-result type.
QualType
ASTContext::getSubstTemplateTypeParmType(const TemplateTypeParmType *Parm,
QualType Replacement) const {
assert(Replacement.isCanonical()
&& "replacement types must always be canonical");
llvm::FoldingSetNodeID ID;
SubstTemplateTypeParmType::Profile(ID, Parm, Replacement);
void *InsertPos = nullptr;
SubstTemplateTypeParmType *SubstParm
= SubstTemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos);
if (!SubstParm) {
SubstParm = new (*this, TypeAlignment)
SubstTemplateTypeParmType(Parm, Replacement);
Types.push_back(SubstParm);
SubstTemplateTypeParmTypes.InsertNode(SubstParm, InsertPos);
}
return QualType(SubstParm, 0);
}
/// Retrieve a
QualType ASTContext::getSubstTemplateTypeParmPackType(
const TemplateTypeParmType *Parm,
const TemplateArgument &ArgPack) {
#ifndef NDEBUG
for (const auto &P : ArgPack.pack_elements()) {
assert(P.getKind() == TemplateArgument::Type &&"Pack contains a non-type");
assert(P.getAsType().isCanonical() && "Pack contains non-canonical type");
}
#endif
llvm::FoldingSetNodeID ID;
SubstTemplateTypeParmPackType::Profile(ID, Parm, ArgPack);
void *InsertPos = nullptr;
if (SubstTemplateTypeParmPackType *SubstParm
= SubstTemplateTypeParmPackTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(SubstParm, 0);
QualType Canon;
if (!Parm->isCanonicalUnqualified()) {
Canon = getCanonicalType(QualType(Parm, 0));
Canon = getSubstTemplateTypeParmPackType(cast<TemplateTypeParmType>(Canon),
ArgPack);
SubstTemplateTypeParmPackTypes.FindNodeOrInsertPos(ID, InsertPos);
}
auto *SubstParm
= new (*this, TypeAlignment) SubstTemplateTypeParmPackType(Parm, Canon,
ArgPack);
Types.push_back(SubstParm);
SubstTemplateTypeParmPackTypes.InsertNode(SubstParm, InsertPos);
return QualType(SubstParm, 0);
}
/// Retrieve the template type parameter type for a template
/// parameter or parameter pack with the given depth, index, and (optionally)
/// name.
QualType ASTContext::getTemplateTypeParmType(unsigned Depth, unsigned Index,
bool ParameterPack,
TemplateTypeParmDecl *TTPDecl) const {
llvm::FoldingSetNodeID ID;
TemplateTypeParmType::Profile(ID, Depth, Index, ParameterPack, TTPDecl);
void *InsertPos = nullptr;
TemplateTypeParmType *TypeParm
= TemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos);
if (TypeParm)
return QualType(TypeParm, 0);
if (TTPDecl) {
QualType Canon = getTemplateTypeParmType(Depth, Index, ParameterPack);
TypeParm = new (*this, TypeAlignment) TemplateTypeParmType(TTPDecl, Canon);
TemplateTypeParmType *TypeCheck
= TemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!TypeCheck && "Template type parameter canonical type broken");
(void)TypeCheck;
} else
TypeParm = new (*this, TypeAlignment)
TemplateTypeParmType(Depth, Index, ParameterPack);
Types.push_back(TypeParm);
TemplateTypeParmTypes.InsertNode(TypeParm, InsertPos);
return QualType(TypeParm, 0);
}
TypeSourceInfo *
ASTContext::getTemplateSpecializationTypeInfo(TemplateName Name,
SourceLocation NameLoc,
const TemplateArgumentListInfo &Args,
QualType Underlying) const {
assert(!Name.getAsDependentTemplateName() &&
"No dependent template names here!");
QualType TST = getTemplateSpecializationType(Name, Args, Underlying);
TypeSourceInfo *DI = CreateTypeSourceInfo(TST);
TemplateSpecializationTypeLoc TL =
DI->getTypeLoc().castAs<TemplateSpecializationTypeLoc>();
TL.setTemplateKeywordLoc(SourceLocation());
TL.setTemplateNameLoc(NameLoc);
TL.setLAngleLoc(Args.getLAngleLoc());
TL.setRAngleLoc(Args.getRAngleLoc());
for (unsigned i = 0, e = TL.getNumArgs(); i != e; ++i)
TL.setArgLocInfo(i, Args[i].getLocInfo());
return DI;
}
QualType
ASTContext::getTemplateSpecializationType(TemplateName Template,
const TemplateArgumentListInfo &Args,
QualType Underlying) const {
assert(!Template.getAsDependentTemplateName() &&
"No dependent template names here!");
SmallVector<TemplateArgument, 4> ArgVec;
ArgVec.reserve(Args.size());
for (const TemplateArgumentLoc &Arg : Args.arguments())
ArgVec.push_back(Arg.getArgument());
return getTemplateSpecializationType(Template, ArgVec, Underlying);
}
#ifndef NDEBUG
static bool hasAnyPackExpansions(ArrayRef<TemplateArgument> Args) {
for (const TemplateArgument &Arg : Args)
if (Arg.isPackExpansion())
return true;
return true;
}
#endif
QualType
ASTContext::getTemplateSpecializationType(TemplateName Template,
ArrayRef<TemplateArgument> Args,
QualType Underlying) const {
assert(!Template.getAsDependentTemplateName() &&
"No dependent template names here!");
// Look through qualified template names.
if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName())
Template = TemplateName(QTN->getTemplateDecl());
bool IsTypeAlias =
Template.getAsTemplateDecl() &&
isa<TypeAliasTemplateDecl>(Template.getAsTemplateDecl());
QualType CanonType;
if (!Underlying.isNull())
CanonType = getCanonicalType(Underlying);
else {
// We can get here with an alias template when the specialization contains
// a pack expansion that does not match up with a parameter pack.
assert((!IsTypeAlias || hasAnyPackExpansions(Args)) &&
"Caller must compute aliased type");
IsTypeAlias = false;
CanonType = getCanonicalTemplateSpecializationType(Template, Args);
}
// Allocate the (non-canonical) template specialization type, but don't
// try to unique it: these types typically have location information that
// we don't unique and don't want to lose.
void *Mem = Allocate(sizeof(TemplateSpecializationType) +
sizeof(TemplateArgument) * Args.size() +
(IsTypeAlias? sizeof(QualType) : 0),
TypeAlignment);
auto *Spec
= new (Mem) TemplateSpecializationType(Template, Args, CanonType,
IsTypeAlias ? Underlying : QualType());
Types.push_back(Spec);
return QualType(Spec, 0);
}
QualType ASTContext::getCanonicalTemplateSpecializationType(
TemplateName Template, ArrayRef<TemplateArgument> Args) const {
assert(!Template.getAsDependentTemplateName() &&
"No dependent template names here!");
// Look through qualified template names.
if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName())
Template = TemplateName(QTN->getTemplateDecl());
// Build the canonical template specialization type.
TemplateName CanonTemplate = getCanonicalTemplateName(Template);
SmallVector<TemplateArgument, 4> CanonArgs;
unsigned NumArgs = Args.size();
CanonArgs.reserve(NumArgs);
for (const TemplateArgument &Arg : Args)
CanonArgs.push_back(getCanonicalTemplateArgument(Arg));
// Determine whether this canonical template specialization type already
// exists.
llvm::FoldingSetNodeID ID;
TemplateSpecializationType::Profile(ID, CanonTemplate,
CanonArgs, *this);
void *InsertPos = nullptr;
TemplateSpecializationType *Spec
= TemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos);
if (!Spec) {
// Allocate a new canonical template specialization type.
void *Mem = Allocate((sizeof(TemplateSpecializationType) +
sizeof(TemplateArgument) * NumArgs),
TypeAlignment);
Spec = new (Mem) TemplateSpecializationType(CanonTemplate,
CanonArgs,
QualType(), QualType());
Types.push_back(Spec);
TemplateSpecializationTypes.InsertNode(Spec, InsertPos);
}
assert(Spec->isDependentType() &&
"Non-dependent template-id type must have a canonical type");
return QualType(Spec, 0);
}
QualType ASTContext::getElaboratedType(ElaboratedTypeKeyword Keyword,
NestedNameSpecifier *NNS,
QualType NamedType,
TagDecl *OwnedTagDecl) const {
llvm::FoldingSetNodeID ID;
ElaboratedType::Profile(ID, Keyword, NNS, NamedType, OwnedTagDecl);
void *InsertPos = nullptr;
ElaboratedType *T = ElaboratedTypes.FindNodeOrInsertPos(ID, InsertPos);
if (T)
return QualType(T, 0);
QualType Canon = NamedType;
if (!Canon.isCanonical()) {
Canon = getCanonicalType(NamedType);
ElaboratedType *CheckT = ElaboratedTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!CheckT && "Elaborated canonical type broken");
(void)CheckT;
}
void *Mem = Allocate(ElaboratedType::totalSizeToAlloc<TagDecl *>(!!OwnedTagDecl),
TypeAlignment);
T = new (Mem) ElaboratedType(Keyword, NNS, NamedType, Canon, OwnedTagDecl);
Types.push_back(T);
ElaboratedTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
QualType
ASTContext::getParenType(QualType InnerType) const {
llvm::FoldingSetNodeID ID;
ParenType::Profile(ID, InnerType);
void *InsertPos = nullptr;
ParenType *T = ParenTypes.FindNodeOrInsertPos(ID, InsertPos);
if (T)
return QualType(T, 0);
QualType Canon = InnerType;
if (!Canon.isCanonical()) {
Canon = getCanonicalType(InnerType);
ParenType *CheckT = ParenTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!CheckT && "Paren canonical type broken");
(void)CheckT;
}
T = new (*this, TypeAlignment) ParenType(InnerType, Canon);
Types.push_back(T);
ParenTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
QualType
ASTContext::getMacroQualifiedType(QualType UnderlyingTy,
const IdentifierInfo *MacroII) const {
QualType Canon = UnderlyingTy;
if (!Canon.isCanonical())
Canon = getCanonicalType(UnderlyingTy);
auto *newType = new (*this, TypeAlignment)
MacroQualifiedType(UnderlyingTy, Canon, MacroII);
Types.push_back(newType);
return QualType(newType, 0);
}
QualType ASTContext::getDependentNameType(ElaboratedTypeKeyword Keyword,
NestedNameSpecifier *NNS,
const IdentifierInfo *Name,
QualType Canon) const {
if (Canon.isNull()) {
NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS);
if (CanonNNS != NNS)
Canon = getDependentNameType(Keyword, CanonNNS, Name);
}
llvm::FoldingSetNodeID ID;
DependentNameType::Profile(ID, Keyword, NNS, Name);
void *InsertPos = nullptr;
DependentNameType *T
= DependentNameTypes.FindNodeOrInsertPos(ID, InsertPos);
if (T)
return QualType(T, 0);
T = new (*this, TypeAlignment) DependentNameType(Keyword, NNS, Name, Canon);
Types.push_back(T);
DependentNameTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
QualType
ASTContext::getDependentTemplateSpecializationType(
ElaboratedTypeKeyword Keyword,
NestedNameSpecifier *NNS,
const IdentifierInfo *Name,
const TemplateArgumentListInfo &Args) const {
// TODO: avoid this copy
SmallVector<TemplateArgument, 16> ArgCopy;
for (unsigned I = 0, E = Args.size(); I != E; ++I)
ArgCopy.push_back(Args[I].getArgument());
return getDependentTemplateSpecializationType(Keyword, NNS, Name, ArgCopy);
}
QualType
ASTContext::getDependentTemplateSpecializationType(
ElaboratedTypeKeyword Keyword,
NestedNameSpecifier *NNS,
const IdentifierInfo *Name,
ArrayRef<TemplateArgument> Args) const {
assert((!NNS || NNS->isDependent()) &&
"nested-name-specifier must be dependent");
llvm::FoldingSetNodeID ID;
DependentTemplateSpecializationType::Profile(ID, *this, Keyword, NNS,
Name, Args);
void *InsertPos = nullptr;
DependentTemplateSpecializationType *T
= DependentTemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos);
if (T)
return QualType(T, 0);
NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS);
ElaboratedTypeKeyword CanonKeyword = Keyword;
if (Keyword == ETK_None) CanonKeyword = ETK_Typename;
bool AnyNonCanonArgs = false;
unsigned NumArgs = Args.size();
SmallVector<TemplateArgument, 16> CanonArgs(NumArgs);
for (unsigned I = 0; I != NumArgs; ++I) {
CanonArgs[I] = getCanonicalTemplateArgument(Args[I]);
if (!CanonArgs[I].structurallyEquals(Args[I]))
AnyNonCanonArgs = true;
}
QualType Canon;
if (AnyNonCanonArgs || CanonNNS != NNS || CanonKeyword != Keyword) {
Canon = getDependentTemplateSpecializationType(CanonKeyword, CanonNNS,
Name,
CanonArgs);
// Find the insert position again.
DependentTemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos);
}
void *Mem = Allocate((sizeof(DependentTemplateSpecializationType) +
sizeof(TemplateArgument) * NumArgs),
TypeAlignment);
T = new (Mem) DependentTemplateSpecializationType(Keyword, NNS,
Name, Args, Canon);
Types.push_back(T);
DependentTemplateSpecializationTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
TemplateArgument ASTContext::getInjectedTemplateArg(NamedDecl *Param) {
TemplateArgument Arg;
if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(Param)) {
QualType ArgType = getTypeDeclType(TTP);
if (TTP->isParameterPack())
ArgType = getPackExpansionType(ArgType, None);
Arg = TemplateArgument(ArgType);
} else if (auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(Param)) {
Expr *E = new (*this) DeclRefExpr(
*this, NTTP, /*enclosing*/ false,
NTTP->getType().getNonLValueExprType(*this),
Expr::getValueKindForType(NTTP->getType()), NTTP->getLocation());
if (NTTP->isParameterPack())
E = new (*this) PackExpansionExpr(DependentTy, E, NTTP->getLocation(),
None);
Arg = TemplateArgument(E);
} else {
auto *TTP = cast<TemplateTemplateParmDecl>(Param);
if (TTP->isParameterPack())
Arg = TemplateArgument(TemplateName(TTP), Optional<unsigned>());
else
Arg = TemplateArgument(TemplateName(TTP));
}
if (Param->isTemplateParameterPack())
Arg = TemplateArgument::CreatePackCopy(*this, Arg);
return Arg;
}
void
ASTContext::getInjectedTemplateArgs(const TemplateParameterList *Params,
SmallVectorImpl<TemplateArgument> &Args) {
Args.reserve(Args.size() + Params->size());
for (NamedDecl *Param : *Params)
Args.push_back(getInjectedTemplateArg(Param));
}
QualType ASTContext::getPackExpansionType(QualType Pattern,
Optional<unsigned> NumExpansions) {
llvm::FoldingSetNodeID ID;
PackExpansionType::Profile(ID, Pattern, NumExpansions);
// A deduced type can deduce to a pack, eg
// auto ...x = some_pack;
// That declaration isn't (yet) valid, but is created as part of building an
// init-capture pack:
// [...x = some_pack] {}
assert((Pattern->containsUnexpandedParameterPack() ||
Pattern->getContainedDeducedType()) &&
"Pack expansions must expand one or more parameter packs");
void *InsertPos = nullptr;
PackExpansionType *T
= PackExpansionTypes.FindNodeOrInsertPos(ID, InsertPos);
if (T)
return QualType(T, 0);
QualType Canon;
if (!Pattern.isCanonical()) {
Canon = getCanonicalType(Pattern);
// The canonical type might not contain an unexpanded parameter pack, if it
// contains an alias template specialization which ignores one of its
// parameters.
if (Canon->containsUnexpandedParameterPack()) {
Canon = getPackExpansionType(Canon, NumExpansions);
// Find the insert position again, in case we inserted an element into
// PackExpansionTypes and invalidated our insert position.
PackExpansionTypes.FindNodeOrInsertPos(ID, InsertPos);
}
}
T = new (*this, TypeAlignment)
PackExpansionType(Pattern, Canon, NumExpansions);
Types.push_back(T);
PackExpansionTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
/// CmpProtocolNames - Comparison predicate for sorting protocols
/// alphabetically.
static int CmpProtocolNames(ObjCProtocolDecl *const *LHS,
ObjCProtocolDecl *const *RHS) {
return DeclarationName::compare((*LHS)->getDeclName(), (*RHS)->getDeclName());
}
static bool areSortedAndUniqued(ArrayRef<ObjCProtocolDecl *> Protocols) {
if (Protocols.empty()) return true;
if (Protocols[0]->getCanonicalDecl() != Protocols[0])
return false;
for (unsigned i = 1; i != Protocols.size(); ++i)
if (CmpProtocolNames(&Protocols[i - 1], &Protocols[i]) >= 0 ||
Protocols[i]->getCanonicalDecl() != Protocols[i])
return false;
return true;
}
static void
SortAndUniqueProtocols(SmallVectorImpl<ObjCProtocolDecl *> &Protocols) {
// Sort protocols, keyed by name.
llvm::array_pod_sort(Protocols.begin(), Protocols.end(), CmpProtocolNames);
// Canonicalize.
for (ObjCProtocolDecl *&P : Protocols)
P = P->getCanonicalDecl();
// Remove duplicates.
auto ProtocolsEnd = std::unique(Protocols.begin(), Protocols.end());
Protocols.erase(ProtocolsEnd, Protocols.end());
}
QualType ASTContext::getObjCObjectType(QualType BaseType,
ObjCProtocolDecl * const *Protocols,
unsigned NumProtocols) const {
return getObjCObjectType(BaseType, {},
llvm::makeArrayRef(Protocols, NumProtocols),
/*isKindOf=*/false);
}
QualType ASTContext::getObjCObjectType(
QualType baseType,
ArrayRef<QualType> typeArgs,
ArrayRef<ObjCProtocolDecl *> protocols,
bool isKindOf) const {
// If the base type is an interface and there aren't any protocols or
// type arguments to add, then the interface type will do just fine.
if (typeArgs.empty() && protocols.empty() && !isKindOf &&
isa<ObjCInterfaceType>(baseType))
return baseType;
// Look in the folding set for an existing type.
llvm::FoldingSetNodeID ID;
ObjCObjectTypeImpl::Profile(ID, baseType, typeArgs, protocols, isKindOf);
void *InsertPos = nullptr;
if (ObjCObjectType *QT = ObjCObjectTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(QT, 0);
// Determine the type arguments to be used for canonicalization,
// which may be explicitly specified here or written on the base
// type.
ArrayRef<QualType> effectiveTypeArgs = typeArgs;
if (effectiveTypeArgs.empty()) {
if (const auto *baseObject = baseType->getAs<ObjCObjectType>())
effectiveTypeArgs = baseObject->getTypeArgs();
}
// Build the canonical type, which has the canonical base type and a
// sorted-and-uniqued list of protocols and the type arguments
// canonicalized.
QualType canonical;
bool typeArgsAreCanonical = std::all_of(effectiveTypeArgs.begin(),
effectiveTypeArgs.end(),
[&](QualType type) {
return type.isCanonical();
});
bool protocolsSorted = areSortedAndUniqued(protocols);
if (!typeArgsAreCanonical || !protocolsSorted || !baseType.isCanonical()) {
// Determine the canonical type arguments.
ArrayRef<QualType> canonTypeArgs;
SmallVector<QualType, 4> canonTypeArgsVec;
if (!typeArgsAreCanonical) {
canonTypeArgsVec.reserve(effectiveTypeArgs.size());
for (auto typeArg : effectiveTypeArgs)
canonTypeArgsVec.push_back(getCanonicalType(typeArg));
canonTypeArgs = canonTypeArgsVec;
} else {
canonTypeArgs = effectiveTypeArgs;
}
ArrayRef<ObjCProtocolDecl *> canonProtocols;
SmallVector<ObjCProtocolDecl*, 8> canonProtocolsVec;
if (!protocolsSorted) {
canonProtocolsVec.append(protocols.begin(), protocols.end());
SortAndUniqueProtocols(canonProtocolsVec);
canonProtocols = canonProtocolsVec;
} else {
canonProtocols = protocols;
}
canonical = getObjCObjectType(getCanonicalType(baseType), canonTypeArgs,
canonProtocols, isKindOf);
// Regenerate InsertPos.
ObjCObjectTypes.FindNodeOrInsertPos(ID, InsertPos);
}
unsigned size = sizeof(ObjCObjectTypeImpl);
size += typeArgs.size() * sizeof(QualType);
size += protocols.size() * sizeof(ObjCProtocolDecl *);
void *mem = Allocate(size, TypeAlignment);
auto *T =
new (mem) ObjCObjectTypeImpl(canonical, baseType, typeArgs, protocols,
isKindOf);
Types.push_back(T);
ObjCObjectTypes.InsertNode(T, InsertPos);
return QualType(T, 0);
}
/// Apply Objective-C protocol qualifiers to the given type.
/// If this is for the canonical type of a type parameter, we can apply
/// protocol qualifiers on the ObjCObjectPointerType.
QualType
ASTContext::applyObjCProtocolQualifiers(QualType type,
ArrayRef<ObjCProtocolDecl *> protocols, bool &hasError,
bool allowOnPointerType) const {
hasError = false;
if (const auto *objT = dyn_cast<ObjCTypeParamType>(type.getTypePtr())) {
return getObjCTypeParamType(objT->getDecl(), protocols);
}
// Apply protocol qualifiers to ObjCObjectPointerType.
if (allowOnPointerType) {
if (const auto *objPtr =
dyn_cast<ObjCObjectPointerType>(type.getTypePtr())) {
const ObjCObjectType *objT = objPtr->getObjectType();
// Merge protocol lists and construct ObjCObjectType.
SmallVector<ObjCProtocolDecl*, 8> protocolsVec;
protocolsVec.append(objT->qual_begin(),
objT->qual_end());
protocolsVec.append(protocols.begin(), protocols.end());
ArrayRef<ObjCProtocolDecl *> protocols = protocolsVec;
type = getObjCObjectType(
objT->getBaseType(),
objT->getTypeArgsAsWritten(),
protocols,
objT->isKindOfTypeAsWritten());
return getObjCObjectPointerType(type);
}
}
// Apply protocol qualifiers to ObjCObjectType.
if (const auto *objT = dyn_cast<ObjCObjectType>(type.getTypePtr())){
// FIXME: Check for protocols to which the class type is already
// known to conform.
return getObjCObjectType(objT->getBaseType(),
objT->getTypeArgsAsWritten(),
protocols,
objT->isKindOfTypeAsWritten());
}
// If the canonical type is ObjCObjectType, ...
if (type->isObjCObjectType()) {
// Silently overwrite any existing protocol qualifiers.
// TODO: determine whether that's the right thing to do.
// FIXME: Check for protocols to which the class type is already
// known to conform.
return getObjCObjectType(type, {}, protocols, false);
}
// id<protocol-list>
if (type->isObjCIdType()) {
const auto *objPtr = type->castAs<ObjCObjectPointerType>();
type = getObjCObjectType(ObjCBuiltinIdTy, {}, protocols,
objPtr->isKindOfType());
return getObjCObjectPointerType(type);
}
// Class<protocol-list>
if (type->isObjCClassType()) {
const auto *objPtr = type->castAs<ObjCObjectPointerType>();
type = getObjCObjectType(ObjCBuiltinClassTy, {}, protocols,
objPtr->isKindOfType());
return getObjCObjectPointerType(type);
}
hasError = true;
return type;
}
QualType
ASTContext::getObjCTypeParamType(const ObjCTypeParamDecl *Decl,
ArrayRef<ObjCProtocolDecl *> protocols) const {
// Look in the folding set for an existing type.
llvm::FoldingSetNodeID ID;
ObjCTypeParamType::Profile(ID, Decl, protocols);
void *InsertPos = nullptr;
if (ObjCTypeParamType *TypeParam =
ObjCTypeParamTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(TypeParam, 0);
// We canonicalize to the underlying type.
QualType Canonical = getCanonicalType(Decl->getUnderlyingType());
if (!protocols.empty()) {
// Apply the protocol qualifers.
bool hasError;
Canonical = getCanonicalType(applyObjCProtocolQualifiers(
Canonical, protocols, hasError, true /*allowOnPointerType*/));
assert(!hasError && "Error when apply protocol qualifier to bound type");
}
unsigned size = sizeof(ObjCTypeParamType);
size += protocols.size() * sizeof(ObjCProtocolDecl *);
void *mem = Allocate(size, TypeAlignment);
auto *newType = new (mem) ObjCTypeParamType(Decl, Canonical, protocols);
Types.push_back(newType);
ObjCTypeParamTypes.InsertNode(newType, InsertPos);
return QualType(newType, 0);
}
/// ObjCObjectAdoptsQTypeProtocols - Checks that protocols in IC's
/// protocol list adopt all protocols in QT's qualified-id protocol
/// list.
bool ASTContext::ObjCObjectAdoptsQTypeProtocols(QualType QT,
ObjCInterfaceDecl *IC) {
if (!QT->isObjCQualifiedIdType())
return false;
if (const auto *OPT = QT->getAs<ObjCObjectPointerType>()) {
// If both the right and left sides have qualifiers.
for (auto *Proto : OPT->quals()) {
if (!IC->ClassImplementsProtocol(Proto, false))
return false;
}
return true;
}
return false;
}
/// QIdProtocolsAdoptObjCObjectProtocols - Checks that protocols in
/// QT's qualified-id protocol list adopt all protocols in IDecl's list
/// of protocols.
bool ASTContext::QIdProtocolsAdoptObjCObjectProtocols(QualType QT,
ObjCInterfaceDecl *IDecl) {
if (!QT->isObjCQualifiedIdType())
return false;
const auto *OPT = QT->getAs<ObjCObjectPointerType>();
if (!OPT)
return false;
if (!IDecl->hasDefinition())
return false;
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> InheritedProtocols;
CollectInheritedProtocols(IDecl, InheritedProtocols);
if (InheritedProtocols.empty())
return false;
// Check that if every protocol in list of id<plist> conforms to a protocol
// of IDecl's, then bridge casting is ok.
bool Conforms = false;
for (auto *Proto : OPT->quals()) {
Conforms = false;
for (auto *PI : InheritedProtocols) {
if (ProtocolCompatibleWithProtocol(Proto, PI)) {
Conforms = true;
break;
}
}
if (!Conforms)
break;
}
if (Conforms)
return true;
for (auto *PI : InheritedProtocols) {
// If both the right and left sides have qualifiers.
bool Adopts = false;
for (auto *Proto : OPT->quals()) {
// return 'true' if 'PI' is in the inheritance hierarchy of Proto
if ((Adopts = ProtocolCompatibleWithProtocol(PI, Proto)))
break;
}
if (!Adopts)
return false;
}
return true;
}
/// getObjCObjectPointerType - Return a ObjCObjectPointerType type for
/// the given object type.
QualType ASTContext::getObjCObjectPointerType(QualType ObjectT) const {
llvm::FoldingSetNodeID ID;
ObjCObjectPointerType::Profile(ID, ObjectT);
void *InsertPos = nullptr;
if (ObjCObjectPointerType *QT =
ObjCObjectPointerTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(QT, 0);
// Find the canonical object type.
QualType Canonical;
if (!ObjectT.isCanonical()) {
Canonical = getObjCObjectPointerType(getCanonicalType(ObjectT));
// Regenerate InsertPos.
ObjCObjectPointerTypes.FindNodeOrInsertPos(ID, InsertPos);
}
// No match.
void *Mem = Allocate(sizeof(ObjCObjectPointerType), TypeAlignment);
auto *QType =
new (Mem) ObjCObjectPointerType(Canonical, ObjectT);
Types.push_back(QType);
ObjCObjectPointerTypes.InsertNode(QType, InsertPos);
return QualType(QType, 0);
}
/// getObjCInterfaceType - Return the unique reference to the type for the
/// specified ObjC interface decl. The list of protocols is optional.
QualType ASTContext::getObjCInterfaceType(const ObjCInterfaceDecl *Decl,
ObjCInterfaceDecl *PrevDecl) const {
if (Decl->TypeForDecl)
return QualType(Decl->TypeForDecl, 0);
if (PrevDecl) {
assert(PrevDecl->TypeForDecl && "previous decl has no TypeForDecl");
Decl->TypeForDecl = PrevDecl->TypeForDecl;
return QualType(PrevDecl->TypeForDecl, 0);
}
// Prefer the definition, if there is one.
if (const ObjCInterfaceDecl *Def = Decl->getDefinition())
Decl = Def;
void *Mem = Allocate(sizeof(ObjCInterfaceType), TypeAlignment);
auto *T = new (Mem) ObjCInterfaceType(Decl);
Decl->TypeForDecl = T;
Types.push_back(T);
return QualType(T, 0);
}
/// getTypeOfExprType - Unlike many "get<Type>" functions, we can't unique
/// TypeOfExprType AST's (since expression's are never shared). For example,
/// multiple declarations that refer to "typeof(x)" all contain different
/// DeclRefExpr's. This doesn't effect the type checker, since it operates
/// on canonical type's (which are always unique).
QualType ASTContext::getTypeOfExprType(Expr *tofExpr) const {
TypeOfExprType *toe;
if (tofExpr->isTypeDependent()) {
llvm::FoldingSetNodeID ID;
DependentTypeOfExprType::Profile(ID, *this, tofExpr);
void *InsertPos = nullptr;
DependentTypeOfExprType *Canon
= DependentTypeOfExprTypes.FindNodeOrInsertPos(ID, InsertPos);
if (Canon) {
// We already have a "canonical" version of an identical, dependent
// typeof(expr) type. Use that as our canonical type.
toe = new (*this, TypeAlignment) TypeOfExprType(tofExpr,
QualType((TypeOfExprType*)Canon, 0));
} else {
// Build a new, canonical typeof(expr) type.
Canon
= new (*this, TypeAlignment) DependentTypeOfExprType(*this, tofExpr);
DependentTypeOfExprTypes.InsertNode(Canon, InsertPos);
toe = Canon;
}
} else {
QualType Canonical = getCanonicalType(tofExpr->getType());
toe = new (*this, TypeAlignment) TypeOfExprType(tofExpr, Canonical);
}
Types.push_back(toe);
return QualType(toe, 0);
}
/// getTypeOfType - Unlike many "get<Type>" functions, we don't unique
/// TypeOfType nodes. The only motivation to unique these nodes would be
/// memory savings. Since typeof(t) is fairly uncommon, space shouldn't be
/// an issue. This doesn't affect the type checker, since it operates
/// on canonical types (which are always unique).
QualType ASTContext::getTypeOfType(QualType tofType) const {
QualType Canonical = getCanonicalType(tofType);
auto *tot = new (*this, TypeAlignment) TypeOfType(tofType, Canonical);
Types.push_back(tot);
return QualType(tot, 0);
}
/// Unlike many "get<Type>" functions, we don't unique DecltypeType
/// nodes. This would never be helpful, since each such type has its own
/// expression, and would not give a significant memory saving, since there
/// is an Expr tree under each such type.
QualType ASTContext::getDecltypeType(Expr *e, QualType UnderlyingType) const {
DecltypeType *dt;
// C++11 [temp.type]p2:
// If an expression e involves a template parameter, decltype(e) denotes a
// unique dependent type. Two such decltype-specifiers refer to the same
// type only if their expressions are equivalent (14.5.6.1).
if (e->isInstantiationDependent()) {
llvm::FoldingSetNodeID ID;
DependentDecltypeType::Profile(ID, *this, e);
void *InsertPos = nullptr;
DependentDecltypeType *Canon
= DependentDecltypeTypes.FindNodeOrInsertPos(ID, InsertPos);
if (!Canon) {
// Build a new, canonical decltype(expr) type.
Canon = new (*this, TypeAlignment) DependentDecltypeType(*this, e);
DependentDecltypeTypes.InsertNode(Canon, InsertPos);
}
dt = new (*this, TypeAlignment)
DecltypeType(e, UnderlyingType, QualType((DecltypeType *)Canon, 0));
} else {
dt = new (*this, TypeAlignment)
DecltypeType(e, UnderlyingType, getCanonicalType(UnderlyingType));
}
Types.push_back(dt);
return QualType(dt, 0);
}
/// getUnaryTransformationType - We don't unique these, since the memory
/// savings are minimal and these are rare.
QualType ASTContext::getUnaryTransformType(QualType BaseType,
QualType UnderlyingType,
UnaryTransformType::UTTKind Kind)
const {
UnaryTransformType *ut = nullptr;
if (BaseType->isDependentType()) {
// Look in the folding set for an existing type.
llvm::FoldingSetNodeID ID;
DependentUnaryTransformType::Profile(ID, getCanonicalType(BaseType), Kind);
void *InsertPos = nullptr;
DependentUnaryTransformType *Canon
= DependentUnaryTransformTypes.FindNodeOrInsertPos(ID, InsertPos);
if (!Canon) {
// Build a new, canonical __underlying_type(type) type.
Canon = new (*this, TypeAlignment)
DependentUnaryTransformType(*this, getCanonicalType(BaseType),
Kind);
DependentUnaryTransformTypes.InsertNode(Canon, InsertPos);
}
ut = new (*this, TypeAlignment) UnaryTransformType (BaseType,
QualType(), Kind,
QualType(Canon, 0));
} else {
QualType CanonType = getCanonicalType(UnderlyingType);
ut = new (*this, TypeAlignment) UnaryTransformType (BaseType,
UnderlyingType, Kind,
CanonType);
}
Types.push_back(ut);
return QualType(ut, 0);
}
/// getAutoType - Return the uniqued reference to the 'auto' type which has been
/// deduced to the given type, or to the canonical undeduced 'auto' type, or the
/// canonical deduced-but-dependent 'auto' type.
QualType
ASTContext::getAutoType(QualType DeducedType, AutoTypeKeyword Keyword,
bool IsDependent, bool IsPack,
ConceptDecl *TypeConstraintConcept,
ArrayRef<TemplateArgument> TypeConstraintArgs) const {
assert((!IsPack || IsDependent) && "only use IsPack for a dependent pack");
if (DeducedType.isNull() && Keyword == AutoTypeKeyword::Auto &&
!TypeConstraintConcept && !IsDependent)
return getAutoDeductType();
// Look in the folding set for an existing type.
void *InsertPos = nullptr;
llvm::FoldingSetNodeID ID;
AutoType::Profile(ID, *this, DeducedType, Keyword, IsDependent,
TypeConstraintConcept, TypeConstraintArgs);
if (AutoType *AT = AutoTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(AT, 0);
void *Mem = Allocate(sizeof(AutoType) +
sizeof(TemplateArgument) * TypeConstraintArgs.size(),
TypeAlignment);
auto *AT = new (Mem) AutoType(DeducedType, Keyword, IsDependent, IsPack,
TypeConstraintConcept, TypeConstraintArgs);
Types.push_back(AT);
if (InsertPos)
AutoTypes.InsertNode(AT, InsertPos);
return QualType(AT, 0);
}
/// Return the uniqued reference to the deduced template specialization type
/// which has been deduced to the given type, or to the canonical undeduced
/// such type, or the canonical deduced-but-dependent such type.
QualType ASTContext::getDeducedTemplateSpecializationType(
TemplateName Template, QualType DeducedType, bool IsDependent) const {
// Look in the folding set for an existing type.
void *InsertPos = nullptr;
llvm::FoldingSetNodeID ID;
DeducedTemplateSpecializationType::Profile(ID, Template, DeducedType,
IsDependent);
if (DeducedTemplateSpecializationType *DTST =
DeducedTemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(DTST, 0);
auto *DTST = new (*this, TypeAlignment)
DeducedTemplateSpecializationType(Template, DeducedType, IsDependent);
Types.push_back(DTST);
if (InsertPos)
DeducedTemplateSpecializationTypes.InsertNode(DTST, InsertPos);
return QualType(DTST, 0);
}
/// getAtomicType - Return the uniqued reference to the atomic type for
/// the given value type.
QualType ASTContext::getAtomicType(QualType T) const {
// Unique pointers, to guarantee there is only one pointer of a particular
// structure.
llvm::FoldingSetNodeID ID;
AtomicType::Profile(ID, T);
void *InsertPos = nullptr;
if (AtomicType *AT = AtomicTypes.FindNodeOrInsertPos(ID, InsertPos))
return QualType(AT, 0);
// If the atomic value type isn't canonical, this won't be a canonical type
// either, so fill in the canonical type field.
QualType Canonical;
if (!T.isCanonical()) {
Canonical = getAtomicType(getCanonicalType(T));
// Get the new insert position for the node we care about.
AtomicType *NewIP = AtomicTypes.FindNodeOrInsertPos(ID, InsertPos);
assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP;
}
auto *New = new (*this, TypeAlignment) AtomicType(T, Canonical);
Types.push_back(New);
AtomicTypes.InsertNode(New, InsertPos);
return QualType(New, 0);
}
/// getAutoDeductType - Get type pattern for deducing against 'auto'.
QualType ASTContext::getAutoDeductType() const {
if (AutoDeductTy.isNull())
AutoDeductTy = QualType(
new (*this, TypeAlignment) AutoType(QualType(), AutoTypeKeyword::Auto,
/*dependent*/false, /*pack*/false,
/*concept*/nullptr, /*args*/{}),
0);
return AutoDeductTy;
}
/// getAutoRRefDeductType - Get type pattern for deducing against 'auto &&'.
QualType ASTContext::getAutoRRefDeductType() const {
if (AutoRRefDeductTy.isNull())
AutoRRefDeductTy = getRValueReferenceType(getAutoDeductType());
assert(!AutoRRefDeductTy.isNull() && "can't build 'auto &&' pattern");
return AutoRRefDeductTy;
}
/// getTagDeclType - Return the unique reference to the type for the
/// specified TagDecl (struct/union/class/enum) decl.
QualType ASTContext::getTagDeclType(const TagDecl *Decl) const {
assert(Decl);
// FIXME: What is the design on getTagDeclType when it requires casting
// away const? mutable?
return getTypeDeclType(const_cast<TagDecl*>(Decl));
}
/// getSizeType - Return the unique type for "size_t" (C99 7.17), the result
/// of the sizeof operator (C99 6.5.3.4p4). The value is target dependent and
/// needs to agree with the definition in <stddef.h>.
CanQualType ASTContext::getSizeType() const {
return getFromTargetType(Target->getSizeType());
}
/// Return the unique signed counterpart of the integer type
/// corresponding to size_t.
CanQualType ASTContext::getSignedSizeType() const {
return getFromTargetType(Target->getSignedSizeType());
}
/// getIntMaxType - Return the unique type for "intmax_t" (C99 7.18.1.5).
CanQualType ASTContext::getIntMaxType() const {
return getFromTargetType(Target->getIntMaxType());
}
/// getUIntMaxType - Return the unique type for "uintmax_t" (C99 7.18.1.5).
CanQualType ASTContext::getUIntMaxType() const {
return getFromTargetType(Target->getUIntMaxType());
}
/// getSignedWCharType - Return the type of "signed wchar_t".
/// Used when in C++, as a GCC extension.
QualType ASTContext::getSignedWCharType() const {
// FIXME: derive from "Target" ?
return WCharTy;
}
/// getUnsignedWCharType - Return the type of "unsigned wchar_t".
/// Used when in C++, as a GCC extension.
QualType ASTContext::getUnsignedWCharType() const {
// FIXME: derive from "Target" ?
return UnsignedIntTy;
}
QualType ASTContext::getIntPtrType() const {
return getFromTargetType(Target->getIntPtrType());
}
QualType ASTContext::getUIntPtrType() const {
return getCorrespondingUnsignedType(getIntPtrType());
}
/// getPointerDiffType - Return the unique type for "ptrdiff_t" (C99 7.17)
/// defined in <stddef.h>. Pointer - pointer requires this (C99 6.5.6p9).
QualType ASTContext::getPointerDiffType() const {
return getFromTargetType(Target->getPtrDiffType(0));
}
/// Return the unique unsigned counterpart of "ptrdiff_t"
/// integer type. The standard (C11 7.21.6.1p7) refers to this type
/// in the definition of %tu format specifier.
QualType ASTContext::getUnsignedPointerDiffType() const {
return getFromTargetType(Target->getUnsignedPtrDiffType(0));
}
/// Return the unique type for "pid_t" defined in
/// <sys/types.h>. We need this to compute the correct type for vfork().
QualType ASTContext::getProcessIDType() const {
return getFromTargetType(Target->getProcessIDType());
}
//===----------------------------------------------------------------------===//
// Type Operators
//===----------------------------------------------------------------------===//
CanQualType ASTContext::getCanonicalParamType(QualType T) const {
// Push qualifiers into arrays, and then discard any remaining
// qualifiers.
T = getCanonicalType(T);
T = getVariableArrayDecayedType(T);
const Type *Ty = T.getTypePtr();
QualType Result;
if (isa<ArrayType>(Ty)) {
Result = getArrayDecayedType(QualType(Ty,0));
} else if (isa<FunctionType>(Ty)) {
Result = getPointerType(QualType(Ty, 0));
} else {
Result = QualType(Ty, 0);
}
return CanQualType::CreateUnsafe(Result);
}
QualType ASTContext::getUnqualifiedArrayType(QualType type,
Qualifiers &quals) {
SplitQualType splitType = type.getSplitUnqualifiedType();
// FIXME: getSplitUnqualifiedType() actually walks all the way to
// the unqualified desugared type and then drops it on the floor.
// We then have to strip that sugar back off with
// getUnqualifiedDesugaredType(), which is silly.
const auto *AT =
dyn_cast<ArrayType>(splitType.Ty->getUnqualifiedDesugaredType());
// If we don't have an array, just use the results in splitType.
if (!AT) {
quals = splitType.Quals;
return QualType(splitType.Ty, 0);
}
// Otherwise, recurse on the array's element type.
QualType elementType = AT->getElementType();
QualType unqualElementType = getUnqualifiedArrayType(elementType, quals);
// If that didn't change the element type, AT has no qualifiers, so we
// can just use the results in splitType.
if (elementType == unqualElementType) {
assert(quals.empty()); // from the recursive call
quals = splitType.Quals;
return QualType(splitType.Ty, 0);
}
// Otherwise, add in the qualifiers from the outermost type, then
// build the type back up.
quals.addConsistentQualifiers(splitType.Quals);
if (const auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
return getConstantArrayType(unqualElementType, CAT->getSize(),
CAT->getSizeExpr(), CAT->getSizeModifier(), 0);
}
if (const auto *IAT = dyn_cast<IncompleteArrayType>(AT)) {
return getIncompleteArrayType(unqualElementType, IAT->getSizeModifier(), 0);
}
if (const auto *VAT = dyn_cast<VariableArrayType>(AT)) {
return getVariableArrayType(unqualElementType,
VAT->getSizeExpr(),
VAT->getSizeModifier(),
VAT->getIndexTypeCVRQualifiers(),
VAT->getBracketsRange());
}
const auto *DSAT = cast<DependentSizedArrayType>(AT);
return getDependentSizedArrayType(unqualElementType, DSAT->getSizeExpr(),
DSAT->getSizeModifier(), 0,
SourceRange());
}
/// Attempt to unwrap two types that may both be array types with the same bound
/// (or both be array types of unknown bound) for the purpose of comparing the
/// cv-decomposition of two types per C++ [conv.qual].
bool ASTContext::UnwrapSimilarArrayTypes(QualType &T1, QualType &T2) {
bool UnwrappedAny = false;
while (true) {
auto *AT1 = getAsArrayType(T1);
if (!AT1) return UnwrappedAny;
auto *AT2 = getAsArrayType(T2);
if (!AT2) return UnwrappedAny;
// If we don't have two array types with the same constant bound nor two
// incomplete array types, we've unwrapped everything we can.
if (auto *CAT1 = dyn_cast<ConstantArrayType>(AT1)) {
auto *CAT2 = dyn_cast<ConstantArrayType>(AT2);
if (!CAT2 || CAT1->getSize() != CAT2->getSize())
return UnwrappedAny;
} else if (!isa<IncompleteArrayType>(AT1) ||
!isa<IncompleteArrayType>(AT2)) {
return UnwrappedAny;
}
T1 = AT1->getElementType();
T2 = AT2->getElementType();
UnwrappedAny = true;
}
}
/// Attempt to unwrap two types that may be similar (C++ [conv.qual]).
///
/// If T1 and T2 are both pointer types of the same kind, or both array types
/// with the same bound, unwraps layers from T1 and T2 until a pointer type is
/// unwrapped. Top-level qualifiers on T1 and T2 are ignored.
///
/// This function will typically be called in a loop that successively
/// "unwraps" pointer and pointer-to-member types to compare them at each
/// level.
///
/// \return \c true if a pointer type was unwrapped, \c false if we reached a
/// pair of types that can't be unwrapped further.
bool ASTContext::UnwrapSimilarTypes(QualType &T1, QualType &T2) {
UnwrapSimilarArrayTypes(T1, T2);
const auto *T1PtrType = T1->getAs<PointerType>();
const auto *T2PtrType = T2->getAs<PointerType>();
if (T1PtrType && T2PtrType) {
T1 = T1PtrType->getPointeeType();
T2 = T2PtrType->getPointeeType();
return true;
}
const auto *T1MPType = T1->getAs<MemberPointerType>();
const auto *T2MPType = T2->getAs<MemberPointerType>();
if (T1MPType && T2MPType &&
hasSameUnqualifiedType(QualType(T1MPType->getClass(), 0),
QualType(T2MPType->getClass(), 0))) {
T1 = T1MPType->getPointeeType();
T2 = T2MPType->getPointeeType();
return true;
}
if (getLangOpts().ObjC) {
const auto *T1OPType = T1->getAs<ObjCObjectPointerType>();
const auto *T2OPType = T2->getAs<ObjCObjectPointerType>();
if (T1OPType && T2OPType) {
T1 = T1OPType->getPointeeType();
T2 = T2OPType->getPointeeType();
return true;
}
}
// FIXME: Block pointers, too?
return false;
}
bool ASTContext::hasSimilarType(QualType T1, QualType T2) {
while (true) {
Qualifiers Quals;
T1 = getUnqualifiedArrayType(T1, Quals);
T2 = getUnqualifiedArrayType(T2, Quals);
if (hasSameType(T1, T2))
return true;
if (!UnwrapSimilarTypes(T1, T2))
return false;
}
}
bool ASTContext::hasCvrSimilarType(QualType T1, QualType T2) {
while (true) {
Qualifiers Quals1, Quals2;
T1 = getUnqualifiedArrayType(T1, Quals1);
T2 = getUnqualifiedArrayType(T2, Quals2);
Quals1.removeCVRQualifiers();
Quals2.removeCVRQualifiers();
if (Quals1 != Quals2)
return false;
if (hasSameType(T1, T2))
return true;
if (!UnwrapSimilarTypes(T1, T2))
return false;
}
}
DeclarationNameInfo
ASTContext::getNameForTemplate(TemplateName Name,
SourceLocation NameLoc) const {
switch (Name.getKind()) {
case TemplateName::QualifiedTemplate:
case TemplateName::Template:
// DNInfo work in progress: CHECKME: what about DNLoc?
return DeclarationNameInfo(Name.getAsTemplateDecl()->getDeclName(),
NameLoc);
case TemplateName::OverloadedTemplate: {
OverloadedTemplateStorage *Storage = Name.getAsOverloadedTemplate();
// DNInfo work in progress: CHECKME: what about DNLoc?
return DeclarationNameInfo((*Storage->begin())->getDeclName(), NameLoc);
}
case TemplateName::AssumedTemplate: {
AssumedTemplateStorage *Storage = Name.getAsAssumedTemplateName();
return DeclarationNameInfo(Storage->getDeclName(), NameLoc);
}
case TemplateName::DependentTemplate: {
DependentTemplateName *DTN = Name.getAsDependentTemplateName();
DeclarationName DName;
if (DTN->isIdentifier()) {
DName = DeclarationNames.getIdentifier(DTN->getIdentifier());
return DeclarationNameInfo(DName, NameLoc);
} else {
DName = DeclarationNames.getCXXOperatorName(DTN->getOperator());
// DNInfo work in progress: FIXME: source locations?
DeclarationNameLoc DNLoc;
DNLoc.CXXOperatorName.BeginOpNameLoc = SourceLocation().getRawEncoding();
DNLoc.CXXOperatorName.EndOpNameLoc = SourceLocation().getRawEncoding();
return DeclarationNameInfo(DName, NameLoc, DNLoc);
}
}
case TemplateName::SubstTemplateTemplateParm: {
SubstTemplateTemplateParmStorage *subst
= Name.getAsSubstTemplateTemplateParm();
return DeclarationNameInfo(subst->getParameter()->getDeclName(),
NameLoc);
}
case TemplateName::SubstTemplateTemplateParmPack: {
SubstTemplateTemplateParmPackStorage *subst
= Name.getAsSubstTemplateTemplateParmPack();
return DeclarationNameInfo(subst->getParameterPack()->getDeclName(),
NameLoc);
}
}
llvm_unreachable("bad template name kind!");
}
TemplateName ASTContext::getCanonicalTemplateName(TemplateName Name) const {
switch (Name.getKind()) {
case TemplateName::QualifiedTemplate:
case TemplateName::Template: {
TemplateDecl *Template = Name.getAsTemplateDecl();
if (auto *TTP = dyn_cast<TemplateTemplateParmDecl>(Template))
Template = getCanonicalTemplateTemplateParmDecl(TTP);
// The canonical template name is the canonical template declaration.
return TemplateName(cast<TemplateDecl>(Template->getCanonicalDecl()));
}
case TemplateName::OverloadedTemplate:
case TemplateName::AssumedTemplate:
llvm_unreachable("cannot canonicalize unresolved template");
case TemplateName::DependentTemplate: {
DependentTemplateName *DTN = Name.getAsDependentTemplateName();
assert(DTN && "Non-dependent template names must refer to template decls.");
return DTN->CanonicalTemplateName;
}
case TemplateName::SubstTemplateTemplateParm: {
SubstTemplateTemplateParmStorage *subst
= Name.getAsSubstTemplateTemplateParm();
return getCanonicalTemplateName(subst->getReplacement());
}
case TemplateName::SubstTemplateTemplateParmPack: {
SubstTemplateTemplateParmPackStorage *subst
= Name.getAsSubstTemplateTemplateParmPack();
TemplateTemplateParmDecl *canonParameter
= getCanonicalTemplateTemplateParmDecl(subst->getParameterPack());
TemplateArgument canonArgPack
= getCanonicalTemplateArgument(subst->getArgumentPack());
return getSubstTemplateTemplateParmPack(canonParameter, canonArgPack);
}
}
llvm_unreachable("bad template name!");
}
bool ASTContext::hasSameTemplateName(TemplateName X, TemplateName Y) {
X = getCanonicalTemplateName(X);
Y = getCanonicalTemplateName(Y);
return X.getAsVoidPointer() == Y.getAsVoidPointer();
}
TemplateArgument
ASTContext::getCanonicalTemplateArgument(const TemplateArgument &Arg) const {
switch (Arg.getKind()) {
case TemplateArgument::Null:
return Arg;
case TemplateArgument::Expression:
return Arg;
case TemplateArgument::Declaration: {
auto *D = cast<ValueDecl>(Arg.getAsDecl()->getCanonicalDecl());
return TemplateArgument(D, Arg.getParamTypeForDecl());
}
case TemplateArgument::NullPtr:
return TemplateArgument(getCanonicalType(Arg.getNullPtrType()),
/*isNullPtr*/true);
case TemplateArgument::Template:
return TemplateArgument(getCanonicalTemplateName(Arg.getAsTemplate()));
case TemplateArgument::TemplateExpansion:
return TemplateArgument(getCanonicalTemplateName(
Arg.getAsTemplateOrTemplatePattern()),
Arg.getNumTemplateExpansions());
case TemplateArgument::Integral:
return TemplateArgument(Arg, getCanonicalType(Arg.getIntegralType()));
case TemplateArgument::Type:
return TemplateArgument(getCanonicalType(Arg.getAsType()));
case TemplateArgument::Pack: {
if (Arg.pack_size() == 0)
return Arg;
auto *CanonArgs = new (*this) TemplateArgument[Arg.pack_size()];
unsigned Idx = 0;
for (TemplateArgument::pack_iterator A = Arg.pack_begin(),
AEnd = Arg.pack_end();
A != AEnd; (void)++A, ++Idx)
CanonArgs[Idx] = getCanonicalTemplateArgument(*A);
return TemplateArgument(llvm::makeArrayRef(CanonArgs, Arg.pack_size()));
}
}
// Silence GCC warning
llvm_unreachable("Unhandled template argument kind");
}
NestedNameSpecifier *
ASTContext::getCanonicalNestedNameSpecifier(NestedNameSpecifier *NNS) const {
if (!NNS)
return nullptr;
switch (NNS->getKind()) {
case NestedNameSpecifier::Identifier:
// Canonicalize the prefix but keep the identifier the same.
return NestedNameSpecifier::Create(*this,
getCanonicalNestedNameSpecifier(NNS->getPrefix()),
NNS->getAsIdentifier());
case NestedNameSpecifier::Namespace:
// A namespace is canonical; build a nested-name-specifier with
// this namespace and no prefix.
return NestedNameSpecifier::Create(*this, nullptr,
NNS->getAsNamespace()->getOriginalNamespace());
case NestedNameSpecifier::NamespaceAlias:
// A namespace is canonical; build a nested-name-specifier with
// this namespace and no prefix.
return NestedNameSpecifier::Create(*this, nullptr,
NNS->getAsNamespaceAlias()->getNamespace()
->getOriginalNamespace());
case NestedNameSpecifier::TypeSpec:
case NestedNameSpecifier::TypeSpecWithTemplate: {
QualType T = getCanonicalType(QualType(NNS->getAsType(), 0));
// If we have some kind of dependent-named type (e.g., "typename T::type"),
// break it apart into its prefix and identifier, then reconsititute those
// as the canonical nested-name-specifier. This is required to canonicalize
// a dependent nested-name-specifier involving typedefs of dependent-name
// types, e.g.,
// typedef typename T::type T1;
// typedef typename T1::type T2;
if (const auto *DNT = T->getAs<DependentNameType>())
return NestedNameSpecifier::Create(*this, DNT->getQualifier(),
const_cast<IdentifierInfo *>(DNT->getIdentifier()));
// Otherwise, just canonicalize the type, and force it to be a TypeSpec.
// FIXME: Why are TypeSpec and TypeSpecWithTemplate distinct in the
// first place?
return NestedNameSpecifier::Create(*this, nullptr, false,
const_cast<Type *>(T.getTypePtr()));
}
case NestedNameSpecifier::Global:
case NestedNameSpecifier::Super:
// The global specifier and __super specifer are canonical and unique.
return NNS;
}
llvm_unreachable("Invalid NestedNameSpecifier::Kind!");
}
const ArrayType *ASTContext::getAsArrayType(QualType T) const {
// Handle the non-qualified case efficiently.
if (!T.hasLocalQualifiers()) {
// Handle the common positive case fast.
if (const auto *AT = dyn_cast<ArrayType>(T))
return AT;
}
// Handle the common negative case fast.
if (!isa<ArrayType>(T.getCanonicalType()))
return nullptr;
// Apply any qualifiers from the array type to the element type. This
// implements C99 6.7.3p8: "If the specification of an array type includes
// any type qualifiers, the element type is so qualified, not the array type."
// If we get here, we either have type qualifiers on the type, or we have
// sugar such as a typedef in the way. If we have type qualifiers on the type
// we must propagate them down into the element type.
SplitQualType split = T.getSplitDesugaredType();
Qualifiers qs = split.Quals;
// If we have a simple case, just return now.
const auto *ATy = dyn_cast<ArrayType>(split.Ty);
if (!ATy || qs.empty())
return ATy;
// Otherwise, we have an array and we have qualifiers on it. Push the
// qualifiers into the array element type and return a new array type.
QualType NewEltTy = getQualifiedType(ATy->getElementType(), qs);
if (const auto *CAT = dyn_cast<ConstantArrayType>(ATy))
return cast<ArrayType>(getConstantArrayType(NewEltTy, CAT->getSize(),
CAT->getSizeExpr(),
CAT->getSizeModifier(),
CAT->getIndexTypeCVRQualifiers()));
if (const auto *IAT = dyn_cast<IncompleteArrayType>(ATy))
return cast<ArrayType>(getIncompleteArrayType(NewEltTy,
IAT->getSizeModifier(),
IAT->getIndexTypeCVRQualifiers()));
if (const auto *DSAT = dyn_cast<DependentSizedArrayType>(ATy))
return cast<ArrayType>(
getDependentSizedArrayType(NewEltTy,
DSAT->getSizeExpr(),
DSAT->getSizeModifier(),
DSAT->getIndexTypeCVRQualifiers(),
DSAT->getBracketsRange()));
const auto *VAT = cast<VariableArrayType>(ATy);
return cast<ArrayType>(getVariableArrayType(NewEltTy,
VAT->getSizeExpr(),
VAT->getSizeModifier(),
VAT->getIndexTypeCVRQualifiers(),
VAT->getBracketsRange()));
}
QualType ASTContext::getAdjustedParameterType(QualType T) const {
if (T->isArrayType() || T->isFunctionType())
return getDecayedType(T);
return T;
}
QualType ASTContext::getSignatureParameterType(QualType T) const {
T = getVariableArrayDecayedType(T);
T = getAdjustedParameterType(T);
return T.getUnqualifiedType();
}
QualType ASTContext::getExceptionObjectType(QualType T) const {
// C++ [except.throw]p3:
// A throw-expression initializes a temporary object, called the exception
// object, the type of which is determined by removing any top-level
// cv-qualifiers from the static type of the operand of throw and adjusting
// the type from "array of T" or "function returning T" to "pointer to T"
// or "pointer to function returning T", [...]
T = getVariableArrayDecayedType(T);
if (T->isArrayType() || T->isFunctionType())
T = getDecayedType(T);
return T.getUnqualifiedType();
}
/// getArrayDecayedType - Return the properly qualified result of decaying the
/// specified array type to a pointer. This operation is non-trivial when
/// handling typedefs etc. The canonical type of "T" must be an array type,
/// this returns a pointer to a properly qualified element of the array.
///
/// See C99 6.7.5.3p7 and C99 6.3.2.1p3.
QualType ASTContext::getArrayDecayedType(QualType Ty) const {
// Get the element type with 'getAsArrayType' so that we don't lose any
// typedefs in the element type of the array. This also handles propagation
// of type qualifiers from the array type into the element type if present
// (C99 6.7.3p8).
const ArrayType *PrettyArrayType = getAsArrayType(Ty);
assert(PrettyArrayType && "Not an array type!");
QualType PtrTy = getPointerType(PrettyArrayType->getElementType());
// int x[restrict 4] -> int *restrict
QualType Result = getQualifiedType(PtrTy,
PrettyArrayType->getIndexTypeQualifiers());
// int x[_Nullable] -> int * _Nullable
if (auto Nullability = Ty->getNullability(*this)) {
Result = const_cast<ASTContext *>(this)->getAttributedType(
AttributedType::getNullabilityAttrKind(*Nullability), Result, Result);
}
return Result;
}
QualType ASTContext::getBaseElementType(const ArrayType *array) const {
return getBaseElementType(array->getElementType());
}
QualType ASTContext::getBaseElementType(QualType type) const {
Qualifiers qs;
while (true) {
SplitQualType split = type.getSplitDesugaredType();
const ArrayType *array = split.Ty->getAsArrayTypeUnsafe();
if (!array) break;
type = array->getElementType();
qs.addConsistentQualifiers(split.Quals);
}
return getQualifiedType(type, qs);
}
/// getConstantArrayElementCount - Returns number of constant array elements.
uint64_t
ASTContext::getConstantArrayElementCount(const ConstantArrayType *CA) const {
uint64_t ElementCount = 1;
do {
ElementCount *= CA->getSize().getZExtValue();
CA = dyn_cast_or_null<ConstantArrayType>(
CA->getElementType()->getAsArrayTypeUnsafe());
} while (CA);
return ElementCount;
}
/// getFloatingRank - Return a relative rank for floating point types.
/// This routine will assert if passed a built-in type that isn't a float.
static FloatingRank getFloatingRank(QualType T) {
if (const auto *CT = T->getAs<ComplexType>())
return getFloatingRank(CT->getElementType());
switch (T->castAs<BuiltinType>()->getKind()) {
default: llvm_unreachable("getFloatingRank(): not a floating type");
case BuiltinType::Float16: return Float16Rank;
case BuiltinType::Half: return HalfRank;
case BuiltinType::Float: return FloatRank;
case BuiltinType::Double: return DoubleRank;
case BuiltinType::LongDouble: return LongDoubleRank;
case BuiltinType::Float128: return Float128Rank;
}
}
/// getFloatingTypeOfSizeWithinDomain - Returns a real floating
/// point or a complex type (based on typeDomain/typeSize).
/// 'typeDomain' is a real floating point or complex type.
/// 'typeSize' is a real floating point or complex type.
QualType ASTContext::getFloatingTypeOfSizeWithinDomain(QualType Size,
QualType Domain) const {
FloatingRank EltRank = getFloatingRank(Size);
if (Domain->isComplexType()) {
switch (EltRank) {
case Float16Rank:
case HalfRank: llvm_unreachable("Complex half is not supported");
case FloatRank: return FloatComplexTy;
case DoubleRank: return DoubleComplexTy;
case LongDoubleRank: return LongDoubleComplexTy;
case Float128Rank: return Float128ComplexTy;
}
}
assert(Domain->isRealFloatingType() && "Unknown domain!");
switch (EltRank) {
case Float16Rank: return HalfTy;
case HalfRank: return HalfTy;
case FloatRank: return FloatTy;
case DoubleRank: return DoubleTy;
case LongDoubleRank: return LongDoubleTy;
case Float128Rank: return Float128Ty;
}
llvm_unreachable("getFloatingRank(): illegal value for rank");
}
/// getFloatingTypeOrder - Compare the rank of the two specified floating
/// point types, ignoring the domain of the type (i.e. 'double' ==
/// '_Complex double'). If LHS > RHS, return 1. If LHS == RHS, return 0. If
/// LHS < RHS, return -1.
int ASTContext::getFloatingTypeOrder(QualType LHS, QualType RHS) const {
FloatingRank LHSR = getFloatingRank(LHS);
FloatingRank RHSR = getFloatingRank(RHS);
if (LHSR == RHSR)
return 0;
if (LHSR > RHSR)
return 1;
return -1;
}
int ASTContext::getFloatingTypeSemanticOrder(QualType LHS, QualType RHS) const {
if (&getFloatTypeSemantics(LHS) == &getFloatTypeSemantics(RHS))
return 0;
return getFloatingTypeOrder(LHS, RHS);
}
/// getIntegerRank - Return an integer conversion rank (C99 6.3.1.1p1). This
/// routine will assert if passed a built-in type that isn't an integer or enum,
/// or if it is not canonicalized.
unsigned ASTContext::getIntegerRank(const Type *T) const {
assert(T->isCanonicalUnqualified() && "T should be canonicalized");
switch (cast<BuiltinType>(T)->getKind()) {
default: llvm_unreachable("getIntegerRank(): not a built-in integer");
// Scaffold will use that rank for cbit and qbit as a result
case BuiltinType::Abit:
case BuiltinType::Cbit:
case BuiltinType::Qbit:
case BuiltinType::Qint:
case BuiltinType::Bool:
return 1 + (getIntWidth(BoolTy) << 3);
case BuiltinType::Char_S:
case BuiltinType::Char_U:
case BuiltinType::SChar:
case BuiltinType::UChar:
return 2 + (getIntWidth(CharTy) << 3);
case BuiltinType::Short:
case BuiltinType::UShort:
return 3 + (getIntWidth(ShortTy) << 3);
case BuiltinType::Int:
case BuiltinType::UInt:
return 4 + (getIntWidth(IntTy) << 3);
case BuiltinType::Long:
case BuiltinType::ULong:
return 5 + (getIntWidth(LongTy) << 3);
case BuiltinType::LongLong:
case BuiltinType::ULongLong:
return 6 + (getIntWidth(LongLongTy) << 3);
case BuiltinType::Int128:
case BuiltinType::UInt128:
return 7 + (getIntWidth(Int128Ty) << 3);
}
}
/// Whether this is a promotable bitfield reference according
/// to C99 6.3.1.1p2, bullet 2 (and GCC extensions).
///
/// \returns the type this bit-field will promote to, or NULL if no
/// promotion occurs.
QualType ASTContext::isPromotableBitField(Expr *E) const {
if (E->isTypeDependent() || E->isValueDependent())
return {};
// C++ [conv.prom]p5:
// If the bit-field has an enumerated type, it is treated as any other
// value of that type for promotion purposes.
if (getLangOpts().CPlusPlus && E->getType()->isEnumeralType())
return {};
// FIXME: We should not do this unless E->refersToBitField() is true. This
// matters in C where getSourceBitField() will find bit-fields for various
// cases where the source expression is not a bit-field designator.
FieldDecl *Field = E->getSourceBitField(); // FIXME: conditional bit-fields?
if (!Field)
return {};
QualType FT = Field->getType();
uint64_t BitWidth = Field->getBitWidthValue(*this);
uint64_t IntSize = getTypeSize(IntTy);
// C++ [conv.prom]p5:
// A prvalue for an integral bit-field can be converted to a prvalue of type
// int if int can represent all the values of the bit-field; otherwise, it
// can be converted to unsigned int if unsigned int can represent all the
// values of the bit-field. If the bit-field is larger yet, no integral
// promotion applies to it.
// C11 6.3.1.1/2:
// [For a bit-field of type _Bool, int, signed int, or unsigned int:]
// If an int can represent all values of the original type (as restricted by
// the width, for a bit-field), the value is converted to an int; otherwise,
// it is converted to an unsigned int.
//
// FIXME: C does not permit promotion of a 'long : 3' bitfield to int.
// We perform that promotion here to match GCC and C++.
// FIXME: C does not permit promotion of an enum bit-field whose rank is
// greater than that of 'int'. We perform that promotion to match GCC.
if (BitWidth < IntSize)
return IntTy;
if (BitWidth == IntSize)
return FT->isSignedIntegerType() ? IntTy : UnsignedIntTy;
// Bit-fields wider than int are not subject to promotions, and therefore act
// like the base type. GCC has some weird bugs in this area that we
// deliberately do not follow (GCC follows a pre-standard resolution to
// C's DR315 which treats bit-width as being part of the type, and this leaks
// into their semantics in some cases).
return {};
}
/// getPromotedIntegerType - Returns the type that Promotable will
/// promote to: C99 6.3.1.1p2, assuming that Promotable is a promotable
/// integer type.
QualType ASTContext::getPromotedIntegerType(QualType Promotable) const {
assert(!Promotable.isNull());
assert(Promotable->isPromotableIntegerType());
if (const auto *ET = Promotable->getAs<EnumType>())
return ET->getDecl()->getPromotionType();
if (const auto *BT = Promotable->getAs<BuiltinType>()) {
// C++ [conv.prom]: A prvalue of type char16_t, char32_t, or wchar_t
// (3.9.1) can be converted to a prvalue of the first of the following
// types that can represent all the values of its underlying type:
// int, unsigned int, long int, unsigned long int, long long int, or
// unsigned long long int [...]
// FIXME: Is there some better way to compute this?
if (BT->getKind() == BuiltinType::WChar_S ||
BT->getKind() == BuiltinType::WChar_U ||
BT->getKind() == BuiltinType::Char8 ||
BT->getKind() == BuiltinType::Char16 ||
BT->getKind() == BuiltinType::Char32) {
bool FromIsSigned = BT->getKind() == BuiltinType::WChar_S;
uint64_t FromSize = getTypeSize(BT);
QualType PromoteTypes[] = { IntTy, UnsignedIntTy, LongTy, UnsignedLongTy,
LongLongTy, UnsignedLongLongTy };
for (size_t Idx = 0; Idx < llvm::array_lengthof(PromoteTypes); ++Idx) {
uint64_t ToSize = getTypeSize(PromoteTypes[Idx]);
if (FromSize < ToSize ||
(FromSize == ToSize &&
FromIsSigned == PromoteTypes[Idx]->isSignedIntegerType()))
return PromoteTypes[Idx];
}
llvm_unreachable("char type should fit into long long");
}
}
// At this point, we should have a signed or unsigned integer type.
if (Promotable->isSignedIntegerType())
return IntTy;
uint64_t PromotableSize = getIntWidth(Promotable);
uint64_t IntSize = getIntWidth(IntTy);
assert(Promotable->isUnsignedIntegerType() && PromotableSize <= IntSize);
return (PromotableSize != IntSize) ? IntTy : UnsignedIntTy;
}
/// Recurses in pointer/array types until it finds an objc retainable
/// type and returns its ownership.
Qualifiers::ObjCLifetime ASTContext::getInnerObjCOwnership(QualType T) const {
while (!T.isNull()) {
if (T.getObjCLifetime() != Qualifiers::OCL_None)
return T.getObjCLifetime();
if (T->isArrayType())
T = getBaseElementType(T);
else if (const auto *PT = T->getAs<PointerType>())
T = PT->getPointeeType();
else if (const auto *RT = T->getAs<ReferenceType>())
T = RT->getPointeeType();
else
break;
}
return Qualifiers::OCL_None;
}
static const Type *getIntegerTypeForEnum(const EnumType *ET) {
// Incomplete enum types are not treated as integer types.
// FIXME: In C++, enum types are never integer types.
if (ET->getDecl()->isComplete() && !ET->getDecl()->isScoped())
return ET->getDecl()->getIntegerType().getTypePtr();
return nullptr;
}
/// getIntegerTypeOrder - Returns the highest ranked integer type:
/// C99 6.3.1.8p1. If LHS > RHS, return 1. If LHS == RHS, return 0. If
/// LHS < RHS, return -1.
int ASTContext::getIntegerTypeOrder(QualType LHS, QualType RHS) const {
const Type *LHSC = getCanonicalType(LHS).getTypePtr();
const Type *RHSC = getCanonicalType(RHS).getTypePtr();
// Unwrap enums to their underlying type.
if (const auto *ET = dyn_cast<EnumType>(LHSC))
LHSC = getIntegerTypeForEnum(ET);
if (const auto *ET = dyn_cast<EnumType>(RHSC))
RHSC = getIntegerTypeForEnum(ET);
if (LHSC == RHSC) return 0;
bool LHSUnsigned = LHSC->isUnsignedIntegerType();
bool RHSUnsigned = RHSC->isUnsignedIntegerType();
unsigned LHSRank = getIntegerRank(LHSC);
unsigned RHSRank = getIntegerRank(RHSC);
if (LHSUnsigned == RHSUnsigned) { // Both signed or both unsigned.
if (LHSRank == RHSRank) return 0;
return LHSRank > RHSRank ? 1 : -1;
}
// Otherwise, the LHS is signed and the RHS is unsigned or visa versa.
if (LHSUnsigned) {
// If the unsigned [LHS] type is larger, return it.
if (LHSRank >= RHSRank)
return 1;
// If the signed type can represent all values of the unsigned type, it
// wins. Because we are dealing with 2's complement and types that are
// powers of two larger than each other, this is always safe.
return -1;
}
// If the unsigned [RHS] type is larger, return it.
if (RHSRank >= LHSRank)
return -1;
// If the signed type can represent all values of the unsigned type, it
// wins. Because we are dealing with 2's complement and types that are
// powers of two larger than each other, this is always safe.
return 1;
}
TypedefDecl *ASTContext::getCFConstantStringDecl() const {
if (CFConstantStringTypeDecl)
return CFConstantStringTypeDecl;
assert(!CFConstantStringTagDecl &&
"tag and typedef should be initialized together");
CFConstantStringTagDecl = buildImplicitRecord("__NSConstantString_tag");
CFConstantStringTagDecl->startDefinition();
struct {
QualType Type;
const char *Name;
} Fields[5];
unsigned Count = 0;
/// Objective-C ABI
///
/// typedef struct __NSConstantString_tag {
/// const int *isa;
/// int flags;
/// const char *str;
/// long length;
/// } __NSConstantString;
///
/// Swift ABI (4.1, 4.2)
///
/// typedef struct __NSConstantString_tag {
/// uintptr_t _cfisa;
/// uintptr_t _swift_rc;
/// _Atomic(uint64_t) _cfinfoa;
/// const char *_ptr;
/// uint32_t _length;
/// } __NSConstantString;
///
/// Swift ABI (5.0)
///
/// typedef struct __NSConstantString_tag {
/// uintptr_t _cfisa;
/// uintptr_t _swift_rc;
/// _Atomic(uint64_t) _cfinfoa;
/// const char *_ptr;
/// uintptr_t _length;
/// } __NSConstantString;
const auto CFRuntime = getLangOpts().CFRuntime;
if (static_cast<unsigned>(CFRuntime) <
static_cast<unsigned>(LangOptions::CoreFoundationABI::Swift)) {
Fields[Count++] = { getPointerType(IntTy.withConst()), "isa" };
Fields[Count++] = { IntTy, "flags" };
Fields[Count++] = { getPointerType(CharTy.withConst()), "str" };
Fields[Count++] = { LongTy, "length" };
} else {
Fields[Count++] = { getUIntPtrType(), "_cfisa" };
Fields[Count++] = { getUIntPtrType(), "_swift_rc" };
Fields[Count++] = { getFromTargetType(Target->getUInt64Type()), "_swift_rc" };
Fields[Count++] = { getPointerType(CharTy.withConst()), "_ptr" };
if (CFRuntime == LangOptions::CoreFoundationABI::Swift4_1 ||
CFRuntime == LangOptions::CoreFoundationABI::Swift4_2)
Fields[Count++] = { IntTy, "_ptr" };
else
Fields[Count++] = { getUIntPtrType(), "_ptr" };
}
// Create fields
for (unsigned i = 0; i < Count; ++i) {
FieldDecl *Field =
FieldDecl::Create(*this, CFConstantStringTagDecl, SourceLocation(),
SourceLocation(), &Idents.get(Fields[i].Name),
Fields[i].Type, /*TInfo=*/nullptr,
/*BitWidth=*/nullptr, /*Mutable=*/false, ICIS_NoInit);
Field->setAccess(AS_public);
CFConstantStringTagDecl->addDecl(Field);
}
CFConstantStringTagDecl->completeDefinition();
// This type is designed to be compatible with NSConstantString, but cannot
// use the same name, since NSConstantString is an interface.
auto tagType = getTagDeclType(CFConstantStringTagDecl);
CFConstantStringTypeDecl =
buildImplicitTypedef(tagType, "__NSConstantString");
return CFConstantStringTypeDecl;
}
RecordDecl *ASTContext::getCFConstantStringTagDecl() const {
if (!CFConstantStringTagDecl)
getCFConstantStringDecl(); // Build the tag and the typedef.
return CFConstantStringTagDecl;
}
// getCFConstantStringType - Return the type used for constant CFStrings.
QualType ASTContext::getCFConstantStringType() const {
return getTypedefType(getCFConstantStringDecl());
}
QualType ASTContext::getObjCSuperType() const {
if (ObjCSuperType.isNull()) {
RecordDecl *ObjCSuperTypeDecl = buildImplicitRecord("objc_super");
TUDecl->addDecl(ObjCSuperTypeDecl);
ObjCSuperType = getTagDeclType(ObjCSuperTypeDecl);
}
return ObjCSuperType;
}
void ASTContext::setCFConstantStringType(QualType T) {
const auto *TD = T->castAs<TypedefType>();
CFConstantStringTypeDecl = cast<TypedefDecl>(TD->getDecl());
const auto *TagType =
CFConstantStringTypeDecl->getUnderlyingType()->castAs<RecordType>();
CFConstantStringTagDecl = TagType->getDecl();
}
QualType ASTContext::getBlockDescriptorType() const {
if (BlockDescriptorType)
return getTagDeclType(BlockDescriptorType);
RecordDecl *RD;
// FIXME: Needs the FlagAppleBlock bit.
RD = buildImplicitRecord("__block_descriptor");
RD->startDefinition();
QualType FieldTypes[] = {
UnsignedLongTy,
UnsignedLongTy,
};
static const char *const FieldNames[] = {
"reserved",
"Size"
};
for (size_t i = 0; i < 2; ++i) {
FieldDecl *Field = FieldDecl::Create(
*this, RD, SourceLocation(), SourceLocation(),
&Idents.get(FieldNames[i]), FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr, /*Mutable=*/false, ICIS_NoInit);
Field->setAccess(AS_public);
RD->addDecl(Field);
}
RD->completeDefinition();
BlockDescriptorType = RD;
return getTagDeclType(BlockDescriptorType);
}
QualType ASTContext::getBlockDescriptorExtendedType() const {
if (BlockDescriptorExtendedType)
return getTagDeclType(BlockDescriptorExtendedType);
RecordDecl *RD;
// FIXME: Needs the FlagAppleBlock bit.
RD = buildImplicitRecord("__block_descriptor_withcopydispose");
RD->startDefinition();
QualType FieldTypes[] = {
UnsignedLongTy,
UnsignedLongTy,
getPointerType(VoidPtrTy),
getPointerType(VoidPtrTy)
};
static const char *const FieldNames[] = {
"reserved",
"Size",
"CopyFuncPtr",
"DestroyFuncPtr"
};
for (size_t i = 0; i < 4; ++i) {
FieldDecl *Field = FieldDecl::Create(
*this, RD, SourceLocation(), SourceLocation(),
&Idents.get(FieldNames[i]), FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false, ICIS_NoInit);
Field->setAccess(AS_public);
RD->addDecl(Field);
}
RD->completeDefinition();
BlockDescriptorExtendedType = RD;
return getTagDeclType(BlockDescriptorExtendedType);
}
TargetInfo::OpenCLTypeKind ASTContext::getOpenCLTypeKind(const Type *T) const {
const auto *BT = dyn_cast<BuiltinType>(T);
if (!BT) {
if (isa<PipeType>(T))
return TargetInfo::OCLTK_Pipe;
return TargetInfo::OCLTK_Default;
}
switch (BT->getKind()) {
#define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
case BuiltinType::Id: \
return TargetInfo::OCLTK_Image;
#include "clang/Basic/OpenCLImageTypes.def"
case BuiltinType::OCLClkEvent:
return TargetInfo::OCLTK_ClkEvent;
case BuiltinType::OCLEvent:
return TargetInfo::OCLTK_Event;
case BuiltinType::OCLQueue:
return TargetInfo::OCLTK_Queue;
case BuiltinType::OCLReserveID:
return TargetInfo::OCLTK_ReserveID;
case BuiltinType::OCLSampler:
return TargetInfo::OCLTK_Sampler;
default:
return TargetInfo::OCLTK_Default;
}
}
LangAS ASTContext::getOpenCLTypeAddrSpace(const Type *T) const {
return Target->getOpenCLTypeAddrSpace(getOpenCLTypeKind(T));
}
/// BlockRequiresCopying - Returns true if byref variable "D" of type "Ty"
/// requires copy/dispose. Note that this must match the logic
/// in buildByrefHelpers.
bool ASTContext::BlockRequiresCopying(QualType Ty,
const VarDecl *D) {
if (const CXXRecordDecl *record = Ty->getAsCXXRecordDecl()) {
const Expr *copyExpr = getBlockVarCopyInit(D).getCopyExpr();
if (!copyExpr && record->hasTrivialDestructor()) return false;
return true;
}
// The block needs copy/destroy helpers if Ty is non-trivial to destructively
// move or destroy.
if (Ty.isNonTrivialToPrimitiveDestructiveMove() || Ty.isDestructedType())
return true;
if (!Ty->isObjCRetainableType()) return false;
Qualifiers qs = Ty.getQualifiers();
// If we have lifetime, that dominates.
if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
switch (lifetime) {
case Qualifiers::OCL_None: llvm_unreachable("impossible");
// These are just bits as far as the runtime is concerned.
case Qualifiers::OCL_ExplicitNone:
case Qualifiers::OCL_Autoreleasing:
return false;
// These cases should have been taken care of when checking the type's
// non-triviality.
case Qualifiers::OCL_Weak:
case Qualifiers::OCL_Strong:
llvm_unreachable("impossible");
}
llvm_unreachable("fell out of lifetime switch!");
}
return (Ty->isBlockPointerType() || isObjCNSObjectType(Ty) ||
Ty->isObjCObjectPointerType());
}
bool ASTContext::getByrefLifetime(QualType Ty,
Qualifiers::ObjCLifetime &LifeTime,
bool &HasByrefExtendedLayout) const {
if (!getLangOpts().ObjC ||
getLangOpts().getGC() != LangOptions::NonGC)
return false;
HasByrefExtendedLayout = false;
if (Ty->isRecordType()) {
HasByrefExtendedLayout = true;
LifeTime = Qualifiers::OCL_None;
} else if ((LifeTime = Ty.getObjCLifetime())) {
// Honor the ARC qualifiers.
} else if (Ty->isObjCObjectPointerType() || Ty->isBlockPointerType()) {
// The MRR rule.
LifeTime = Qualifiers::OCL_ExplicitNone;
} else {
LifeTime = Qualifiers::OCL_None;
}
return true;
}
TypedefDecl *ASTContext::getObjCInstanceTypeDecl() {
if (!ObjCInstanceTypeDecl)
ObjCInstanceTypeDecl =
buildImplicitTypedef(getObjCIdType(), "instancetype");
return ObjCInstanceTypeDecl;
}
// This returns true if a type has been typedefed to BOOL:
// typedef <type> BOOL;
static bool isTypeTypedefedAsBOOL(QualType T) {
if (const auto *TT = dyn_cast<TypedefType>(T))
if (IdentifierInfo *II = TT->getDecl()->getIdentifier())
return II->isStr("BOOL");
return false;
}
/// getObjCEncodingTypeSize returns size of type for objective-c encoding
/// purpose.
CharUnits ASTContext::getObjCEncodingTypeSize(QualType type) const {
if (!type->isIncompleteArrayType() && type->isIncompleteType())
return CharUnits::Zero();
CharUnits sz = getTypeSizeInChars(type);
// Make all integer and enum types at least as large as an int
if (sz.isPositive() && type->isIntegralOrEnumerationType())
sz = std::max(sz, getTypeSizeInChars(IntTy));
// Treat arrays as pointers, since that's how they're passed in.
else if (type->isArrayType())
sz = getTypeSizeInChars(VoidPtrTy);
return sz;
}
bool ASTContext::isMSStaticDataMemberInlineDefinition(const VarDecl *VD) const {
return getTargetInfo().getCXXABI().isMicrosoft() &&
VD->isStaticDataMember() &&
VD->getType()->isIntegralOrEnumerationType() &&
!VD->getFirstDecl()->isOutOfLine() && VD->getFirstDecl()->hasInit();
}
ASTContext::InlineVariableDefinitionKind
ASTContext::getInlineVariableDefinitionKind(const VarDecl *VD) const {
if (!VD->isInline())
return InlineVariableDefinitionKind::None;
// In almost all cases, it's a weak definition.
auto *First = VD->getFirstDecl();
if (First->isInlineSpecified() || !First->isStaticDataMember())
return InlineVariableDefinitionKind::Weak;
// If there's a file-context declaration in this translation unit, it's a
// non-discardable definition.
for (auto *D : VD->redecls())
if (D->getLexicalDeclContext()->isFileContext() &&
!D->isInlineSpecified() && (D->isConstexpr() || First->isConstexpr()))
return InlineVariableDefinitionKind::Strong;
// If we've not seen one yet, we don't know.
return InlineVariableDefinitionKind::WeakUnknown;
}
static std::string charUnitsToString(const CharUnits &CU) {
return llvm::itostr(CU.getQuantity());
}
/// getObjCEncodingForBlock - Return the encoded type for this block
/// declaration.
std::string ASTContext::getObjCEncodingForBlock(const BlockExpr *Expr) const {
std::string S;
const BlockDecl *Decl = Expr->getBlockDecl();
QualType BlockTy =
Expr->getType()->castAs<BlockPointerType>()->getPointeeType();
QualType BlockReturnTy = BlockTy->castAs<FunctionType>()->getReturnType();
// Encode result type.
if (getLangOpts().EncodeExtendedBlockSig)
getObjCEncodingForMethodParameter(Decl::OBJC_TQ_None, BlockReturnTy, S,
true /*Extended*/);
else
getObjCEncodingForType(BlockReturnTy, S);
// Compute size of all parameters.
// Start with computing size of a pointer in number of bytes.
// FIXME: There might(should) be a better way of doing this computation!
CharUnits PtrSize = getTypeSizeInChars(VoidPtrTy);
CharUnits ParmOffset = PtrSize;
for (auto PI : Decl->parameters()) {
QualType PType = PI->getType();
CharUnits sz = getObjCEncodingTypeSize(PType);
if (sz.isZero())
continue;
assert(sz.isPositive() && "BlockExpr - Incomplete param type");
ParmOffset += sz;
}
// Size of the argument frame
S += charUnitsToString(ParmOffset);
// Block pointer and offset.
S += "@?0";
// Argument types.
ParmOffset = PtrSize;
for (auto PVDecl : Decl->parameters()) {
QualType PType = PVDecl->getOriginalType();
if (const auto *AT =
dyn_cast<ArrayType>(PType->getCanonicalTypeInternal())) {
// Use array's original type only if it has known number of
// elements.
if (!isa<ConstantArrayType>(AT))
PType = PVDecl->getType();
} else if (PType->isFunctionType())
PType = PVDecl->getType();
if (getLangOpts().EncodeExtendedBlockSig)
getObjCEncodingForMethodParameter(Decl::OBJC_TQ_None, PType,
S, true /*Extended*/);
else
getObjCEncodingForType(PType, S);
S += charUnitsToString(ParmOffset);
ParmOffset += getObjCEncodingTypeSize(PType);
}
return S;
}
std::string
ASTContext::getObjCEncodingForFunctionDecl(const FunctionDecl *Decl) const {
std::string S;
// Encode result type.
getObjCEncodingForType(Decl->getReturnType(), S);
CharUnits ParmOffset;
// Compute size of all parameters.
for (auto PI : Decl->parameters()) {
QualType PType = PI->getType();
CharUnits sz = getObjCEncodingTypeSize(PType);
if (sz.isZero())
continue;
assert(sz.isPositive() &&
"getObjCEncodingForFunctionDecl - Incomplete param type");
ParmOffset += sz;
}
S += charUnitsToString(ParmOffset);
ParmOffset = CharUnits::Zero();
// Argument types.
for (auto PVDecl : Decl->parameters()) {
QualType PType = PVDecl->getOriginalType();
if (const auto *AT =
dyn_cast<ArrayType>(PType->getCanonicalTypeInternal())) {
// Use array's original type only if it has known number of
// elements.
if (!isa<ConstantArrayType>(AT))
PType = PVDecl->getType();
} else if (PType->isFunctionType())
PType = PVDecl->getType();
getObjCEncodingForType(PType, S);
S += charUnitsToString(ParmOffset);
ParmOffset += getObjCEncodingTypeSize(PType);
}
return S;
}
/// getObjCEncodingForMethodParameter - Return the encoded type for a single
/// method parameter or return type. If Extended, include class names and
/// block object types.
void ASTContext::getObjCEncodingForMethodParameter(Decl::ObjCDeclQualifier QT,
QualType T, std::string& S,
bool Extended) const {
// Encode type qualifer, 'in', 'inout', etc. for the parameter.
getObjCEncodingForTypeQualifier(QT, S);
// Encode parameter type.
ObjCEncOptions Options = ObjCEncOptions()
.setExpandPointedToStructures()
.setExpandStructures()
.setIsOutermostType();
if (Extended)
Options.setEncodeBlockParameters().setEncodeClassNames();
getObjCEncodingForTypeImpl(T, S, Options, /*Field=*/nullptr);
}
/// getObjCEncodingForMethodDecl - Return the encoded type for this method
/// declaration.
std::string ASTContext::getObjCEncodingForMethodDecl(const ObjCMethodDecl *Decl,
bool Extended) const {
// FIXME: This is not very efficient.
// Encode return type.
std::string S;
getObjCEncodingForMethodParameter(Decl->getObjCDeclQualifier(),
Decl->getReturnType(), S, Extended);
// Compute size of all parameters.
// Start with computing size of a pointer in number of bytes.
// FIXME: There might(should) be a better way of doing this computation!
CharUnits PtrSize = getTypeSizeInChars(VoidPtrTy);
// The first two arguments (self and _cmd) are pointers; account for
// their size.
CharUnits ParmOffset = 2 * PtrSize;
for (ObjCMethodDecl::param_const_iterator PI = Decl->param_begin(),
E = Decl->sel_param_end(); PI != E; ++PI) {
QualType PType = (*PI)->getType();
CharUnits sz = getObjCEncodingTypeSize(PType);
if (sz.isZero())
continue;
assert(sz.isPositive() &&
"getObjCEncodingForMethodDecl - Incomplete param type");
ParmOffset += sz;
}
S += charUnitsToString(ParmOffset);
S += "@0:";
S += charUnitsToString(PtrSize);
// Argument types.
ParmOffset = 2 * PtrSize;
for (ObjCMethodDecl::param_const_iterator PI = Decl->param_begin(),
E = Decl->sel_param_end(); PI != E; ++PI) {
const ParmVarDecl *PVDecl = *PI;
QualType PType = PVDecl->getOriginalType();
if (const auto *AT =
dyn_cast<ArrayType>(PType->getCanonicalTypeInternal())) {
// Use array's original type only if it has known number of
// elements.
if (!isa<ConstantArrayType>(AT))
PType = PVDecl->getType();
} else if (PType->isFunctionType())
PType = PVDecl->getType();
getObjCEncodingForMethodParameter(PVDecl->getObjCDeclQualifier(),
PType, S, Extended);
S += charUnitsToString(ParmOffset);
ParmOffset += getObjCEncodingTypeSize(PType);
}
return S;
}
ObjCPropertyImplDecl *
ASTContext::getObjCPropertyImplDeclForPropertyDecl(
const ObjCPropertyDecl *PD,
const Decl *Container) const {
if (!Container)
return nullptr;
if (const auto *CID = dyn_cast<ObjCCategoryImplDecl>(Container)) {
for (auto *PID : CID->property_impls())
if (PID->getPropertyDecl() == PD)
return PID;
} else {
const auto *OID = cast<ObjCImplementationDecl>(Container);
for (auto *PID : OID->property_impls())
if (PID->getPropertyDecl() == PD)
return PID;
}
return nullptr;
}
/// getObjCEncodingForPropertyDecl - Return the encoded type for this
/// property declaration. If non-NULL, Container must be either an
/// ObjCCategoryImplDecl or ObjCImplementationDecl; it should only be
/// NULL when getting encodings for protocol properties.
/// Property attributes are stored as a comma-delimited C string. The simple
/// attributes readonly and bycopy are encoded as single characters. The
/// parametrized attributes, getter=name, setter=name, and ivar=name, are
/// encoded as single characters, followed by an identifier. Property types
/// are also encoded as a parametrized attribute. The characters used to encode
/// these attributes are defined by the following enumeration:
/// @code
/// enum PropertyAttributes {
/// kPropertyReadOnly = 'R', // property is read-only.
/// kPropertyBycopy = 'C', // property is a copy of the value last assigned
/// kPropertyByref = '&', // property is a reference to the value last assigned
/// kPropertyDynamic = 'D', // property is dynamic
/// kPropertyGetter = 'G', // followed by getter selector name
/// kPropertySetter = 'S', // followed by setter selector name
/// kPropertyInstanceVariable = 'V' // followed by instance variable name
/// kPropertyType = 'T' // followed by old-style type encoding.
/// kPropertyWeak = 'W' // 'weak' property
/// kPropertyStrong = 'P' // property GC'able
/// kPropertyNonAtomic = 'N' // property non-atomic
/// };
/// @endcode
std::string
ASTContext::getObjCEncodingForPropertyDecl(const ObjCPropertyDecl *PD,
const Decl *Container) const {
// Collect information from the property implementation decl(s).
bool Dynamic = false;
ObjCPropertyImplDecl *SynthesizePID = nullptr;
if (ObjCPropertyImplDecl *PropertyImpDecl =
getObjCPropertyImplDeclForPropertyDecl(PD, Container)) {
if (PropertyImpDecl->getPropertyImplementation() == ObjCPropertyImplDecl::Dynamic)
Dynamic = true;
else
SynthesizePID = PropertyImpDecl;
}
// FIXME: This is not very efficient.
std::string S = "T";
// Encode result type.
// GCC has some special rules regarding encoding of properties which
// closely resembles encoding of ivars.
getObjCEncodingForPropertyType(PD->getType(), S);
if (PD->isReadOnly()) {
S += ",R";
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_copy)
S += ",C";
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_retain)
S += ",&";
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_weak)
S += ",W";
} else {
switch (PD->getSetterKind()) {
case ObjCPropertyDecl::Assign: break;
case ObjCPropertyDecl::Copy: S += ",C"; break;
case ObjCPropertyDecl::Retain: S += ",&"; break;
case ObjCPropertyDecl::Weak: S += ",W"; break;
}
}
// It really isn't clear at all what this means, since properties
// are "dynamic by default".
if (Dynamic)
S += ",D";
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_nonatomic)
S += ",N";
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_getter) {
S += ",G";
S += PD->getGetterName().getAsString();
}
if (PD->getPropertyAttributes() & ObjCPropertyDecl::OBJC_PR_setter) {
S += ",S";
S += PD->getSetterName().getAsString();
}
if (SynthesizePID) {
const ObjCIvarDecl *OID = SynthesizePID->getPropertyIvarDecl();
S += ",V";
S += OID->getNameAsString();
}
// FIXME: OBJCGC: weak & strong
return S;
}
/// getLegacyIntegralTypeEncoding -
/// Another legacy compatibility encoding: 32-bit longs are encoded as
/// 'l' or 'L' , but not always. For typedefs, we need to use
/// 'i' or 'I' instead if encoding a struct field, or a pointer!
void ASTContext::getLegacyIntegralTypeEncoding (QualType &PointeeTy) const {
if (isa<TypedefType>(PointeeTy.getTypePtr())) {
if (const auto *BT = PointeeTy->getAs<BuiltinType>()) {
if (BT->getKind() == BuiltinType::ULong && getIntWidth(PointeeTy) == 32)
PointeeTy = UnsignedIntTy;
else
if (BT->getKind() == BuiltinType::Long && getIntWidth(PointeeTy) == 32)
PointeeTy = IntTy;
}
}
}
void ASTContext::getObjCEncodingForType(QualType T, std::string& S,
const FieldDecl *Field,
QualType *NotEncodedT) const {
// We follow the behavior of gcc, expanding structures which are
// directly pointed to, and expanding embedded structures. Note that
// these rules are sufficient to prevent recursive encoding of the
// same type.
getObjCEncodingForTypeImpl(T, S,
ObjCEncOptions()
.setExpandPointedToStructures()
.setExpandStructures()
.setIsOutermostType(),
Field, NotEncodedT);
}
void ASTContext::getObjCEncodingForPropertyType(QualType T,
std::string& S) const {
// Encode result type.
// GCC has some special rules regarding encoding of properties which
// closely resembles encoding of ivars.
getObjCEncodingForTypeImpl(T, S,
ObjCEncOptions()
.setExpandPointedToStructures()
.setExpandStructures()
.setIsOutermostType()
.setEncodingProperty(),
/*Field=*/nullptr);
}
static char getObjCEncodingForPrimitiveType(const ASTContext *C,
const BuiltinType *BT) {
BuiltinType::Kind kind = BT->getKind();
switch (kind) {
case BuiltinType::Void: return 'v';
case BuiltinType::Bool: return 'B';
case BuiltinType::Char8:
case BuiltinType::Char_U:
case BuiltinType::UChar: return 'C';
case BuiltinType::Char16:
case BuiltinType::UShort: return 'S';
case BuiltinType::Char32:
case BuiltinType::UInt: return 'I';
case BuiltinType::ULong:
return C->getTargetInfo().getLongWidth() == 32 ? 'L' : 'Q';
case BuiltinType::UInt128: return 'T';
case BuiltinType::ULongLong: return 'Q';
case BuiltinType::Char_S:
case BuiltinType::SChar: return 'c';
case BuiltinType::Short: return 's';
case BuiltinType::WChar_S:
case BuiltinType::WChar_U:
case BuiltinType::Int: return 'i';
case BuiltinType::Long:
return C->getTargetInfo().getLongWidth() == 32 ? 'l' : 'q';
case BuiltinType::LongLong: return 'q';
case BuiltinType::Int128: return 't';
case BuiltinType::Float: return 'f';
case BuiltinType::Double: return 'd';
case BuiltinType::LongDouble: return 'D';
case BuiltinType::NullPtr: return '*'; // like char*
// Scaffold types
// -- FIXME collision between cbit and qbit values, may cause other problems
case BuiltinType::Abit: return 'a';
case BuiltinType::Cbit: return 'l';
case BuiltinType::Qbit: return 'q';
case BuiltinType::zzBit:
case BuiltinType::zgBit:
case BuiltinType::ooBit:
case BuiltinType::ogBit:
case BuiltinType::Qint: return 'y';
case BuiltinType::Float16:
case BuiltinType::Float128:
case BuiltinType::Half:
case BuiltinType::ShortAccum:
case BuiltinType::Accum:
case BuiltinType::LongAccum:
case BuiltinType::UShortAccum:
case BuiltinType::UAccum:
case BuiltinType::ULongAccum:
case BuiltinType::ShortFract:
case BuiltinType::Fract:
case BuiltinType::LongFract:
case BuiltinType::UShortFract:
case BuiltinType::UFract:
case BuiltinType::ULongFract:
case BuiltinType::SatShortAccum:
case BuiltinType::SatAccum:
case BuiltinType::SatLongAccum:
case BuiltinType::SatUShortAccum:
case BuiltinType::SatUAccum:
case BuiltinType::SatULongAccum:
case BuiltinType::SatShortFract:
case BuiltinType::SatFract:
case BuiltinType::SatLongFract:
case BuiltinType::SatUShortFract:
case BuiltinType::SatUFract:
case BuiltinType::SatULongFract:
// FIXME: potentially need @encodes for these!
return ' ';
#define SVE_TYPE(Name, Id, SingletonId) \
case BuiltinType::Id:
#include "clang/Basic/AArch64SVEACLETypes.def"
{
DiagnosticsEngine &Diags = C->getDiagnostics();
unsigned DiagID = Diags.getCustomDiagID(
DiagnosticsEngine::Error, "cannot yet @encode type %0");
Diags.Report(DiagID) << BT->getName(C->getPrintingPolicy());
return ' ';
}
case BuiltinType::ObjCId:
case BuiltinType::ObjCClass:
case BuiltinType::ObjCSel:
llvm_unreachable("@encoding ObjC primitive type");
// OpenCL and placeholder types don't need @encodings.
#define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
case BuiltinType::Id:
#include "clang/Basic/OpenCLImageTypes.def"
#define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
case BuiltinType::Id:
#include "clang/Basic/OpenCLExtensionTypes.def"
case BuiltinType::OCLEvent:
case BuiltinType::OCLClkEvent:
case BuiltinType::OCLQueue:
case BuiltinType::OCLReserveID:
case BuiltinType::OCLSampler:
case BuiltinType::Dependent:
#define BUILTIN_TYPE(KIND, ID)
#define PLACEHOLDER_TYPE(KIND, ID) \
case BuiltinType::KIND:
#include "clang/AST/BuiltinTypes.def"
llvm_unreachable("invalid builtin type for @encode");
}
llvm_unreachable("invalid BuiltinType::Kind value");
}
static char ObjCEncodingForEnumType(const ASTContext *C, const EnumType *ET) {
EnumDecl *Enum = ET->getDecl();
// The encoding of an non-fixed enum type is always 'i', regardless of size.
if (!Enum->isFixed())
return 'i';
// The encoding of a fixed enum type matches its fixed underlying type.
const auto *BT = Enum->getIntegerType()->castAs<BuiltinType>();
return getObjCEncodingForPrimitiveType(C, BT);
}
static void EncodeBitField(const ASTContext *Ctx, std::string& S,
QualType T, const FieldDecl *FD) {
assert(FD->isBitField() && "not a bitfield - getObjCEncodingForTypeImpl");
S += 'b';
// The NeXT runtime encodes bit fields as b followed by the number of bits.
// The GNU runtime requires more information; bitfields are encoded as b,
// then the offset (in bits) of the first element, then the type of the
// bitfield, then the size in bits. For example, in this structure:
//
// struct
// {
// int integer;
// int flags:2;
// };
// On a 32-bit system, the encoding for flags would be b2 for the NeXT
// runtime, but b32i2 for the GNU runtime. The reason for this extra
// information is not especially sensible, but we're stuck with it for
// compatibility with GCC, although providing it breaks anything that
// actually uses runtime introspection and wants to work on both runtimes...
if (Ctx->getLangOpts().ObjCRuntime.isGNUFamily()) {
uint64_t Offset;
if (const auto *IVD = dyn_cast<ObjCIvarDecl>(FD)) {
Offset = Ctx->lookupFieldBitOffset(IVD->getContainingInterface(), nullptr,
IVD);
} else {
const RecordDecl *RD = FD->getParent();
const ASTRecordLayout &RL = Ctx->getASTRecordLayout(RD);
Offset = RL.getFieldOffset(FD->getFieldIndex());
}
S += llvm::utostr(Offset);
if (const auto *ET = T->getAs<EnumType>())
S += ObjCEncodingForEnumType(Ctx, ET);
else {
const auto *BT = T->castAs<BuiltinType>();
S += getObjCEncodingForPrimitiveType(Ctx, BT);
}
}
S += llvm::utostr(FD->getBitWidthValue(*Ctx));
}
// FIXME: Use SmallString for accumulating string.
void ASTContext::getObjCEncodingForTypeImpl(QualType T, std::string &S,
const ObjCEncOptions Options,
const FieldDecl *FD,
QualType *NotEncodedT) const {
CanQualType CT = getCanonicalType(T);
switch (CT->getTypeClass()) {
case Type::Builtin:
case Type::Enum:
if (FD && FD->isBitField())
return EncodeBitField(this, S, T, FD);
if (const auto *BT = dyn_cast<BuiltinType>(CT))
S += getObjCEncodingForPrimitiveType(this, BT);
else
S += ObjCEncodingForEnumType(this, cast<EnumType>(CT));
return;
case Type::Complex:
S += 'j';
getObjCEncodingForTypeImpl(T->castAs<ComplexType>()->getElementType(), S,
ObjCEncOptions(),
/*Field=*/nullptr);
return;
case Type::Atomic:
S += 'A';
getObjCEncodingForTypeImpl(T->castAs<AtomicType>()->getValueType(), S,
ObjCEncOptions(),
/*Field=*/nullptr);
return;
// encoding for pointer or reference types.
case Type::Pointer:
case Type::LValueReference:
case Type::RValueReference: {
QualType PointeeTy;
if (isa<PointerType>(CT)) {
const auto *PT = T->castAs<PointerType>();
if (PT->isObjCSelType()) {
S += ':';
return;
}
PointeeTy = PT->getPointeeType();
} else {
PointeeTy = T->castAs<ReferenceType>()->getPointeeType();
}
bool isReadOnly = false;
// For historical/compatibility reasons, the read-only qualifier of the
// pointee gets emitted _before_ the '^'. The read-only qualifier of
// the pointer itself gets ignored, _unless_ we are looking at a typedef!
// Also, do not emit the 'r' for anything but the outermost type!
if (isa<TypedefType>(T.getTypePtr())) {
if (Options.IsOutermostType() && T.isConstQualified()) {
isReadOnly = true;
S += 'r';
}
} else if (Options.IsOutermostType()) {
QualType P = PointeeTy;
while (auto PT = P->getAs<PointerType>())
P = PT->getPointeeType();
if (P.isConstQualified()) {
isReadOnly = true;
S += 'r';
}
}
if (isReadOnly) {
// Another legacy compatibility encoding. Some ObjC qualifier and type
// combinations need to be rearranged.
// Rewrite "in const" from "nr" to "rn"
if (StringRef(S).endswith("nr"))
S.replace(S.end()-2, S.end(), "rn");
}
if (PointeeTy->isCharType()) {
// char pointer types should be encoded as '*' unless it is a
// type that has been typedef'd to 'BOOL'.
if (!isTypeTypedefedAsBOOL(PointeeTy)) {
S += '*';
return;
}
} else if (const auto *RTy = PointeeTy->getAs<RecordType>()) {
// GCC binary compat: Need to convert "struct objc_class *" to "#".
if (RTy->getDecl()->getIdentifier() == &Idents.get("objc_class")) {
S += '#';
return;
}
// GCC binary compat: Need to convert "struct objc_object *" to "@".
if (RTy->getDecl()->getIdentifier() == &Idents.get("objc_object")) {
S += '@';
return;
}
// fall through...
}
S += '^';
getLegacyIntegralTypeEncoding(PointeeTy);
ObjCEncOptions NewOptions;
if (Options.ExpandPointedToStructures())
NewOptions.setExpandStructures();
getObjCEncodingForTypeImpl(PointeeTy, S, NewOptions,
/*Field=*/nullptr, NotEncodedT);
return;
}
case Type::ConstantArray:
case Type::IncompleteArray:
case Type::VariableArray: {
const auto *AT = cast<ArrayType>(CT);
if (isa<IncompleteArrayType>(AT) && !Options.IsStructField()) {
// Incomplete arrays are encoded as a pointer to the array element.
S += '^';
getObjCEncodingForTypeImpl(
AT->getElementType(), S,
Options.keepingOnly(ObjCEncOptions().setExpandStructures()), FD);
} else {
S += '[';
if (const auto *CAT = dyn_cast<ConstantArrayType>(AT))
S += llvm::utostr(CAT->getSize().getZExtValue());
else {
//Variable length arrays are encoded as a regular array with 0 elements.
assert((isa<VariableArrayType>(AT) || isa<IncompleteArrayType>(AT)) &&
"Unknown array type!");
S += '0';
}
getObjCEncodingForTypeImpl(
AT->getElementType(), S,
Options.keepingOnly(ObjCEncOptions().setExpandStructures()), FD,
NotEncodedT);
S += ']';
}
return;
}
case Type::FunctionNoProto:
case Type::FunctionProto:
S += '?';
return;
case Type::Record: {
RecordDecl *RDecl = cast<RecordType>(CT)->getDecl();
S += RDecl->isUnion() ? '(' : '{';
// Anonymous structures print as '?'
if (const IdentifierInfo *II = RDecl->getIdentifier()) {
S += II->getName();
if (const auto *Spec = dyn_cast<ClassTemplateSpecializationDecl>(RDecl)) {
const TemplateArgumentList &TemplateArgs = Spec->getTemplateArgs();
llvm::raw_string_ostream OS(S);
printTemplateArgumentList(OS, TemplateArgs.asArray(),
getPrintingPolicy());
}
} else {
S += '?';
}
if (Options.ExpandStructures()) {
S += '=';
if (!RDecl->isUnion()) {
getObjCEncodingForStructureImpl(RDecl, S, FD, true, NotEncodedT);
} else {
for (const auto *Field : RDecl->fields()) {
if (FD) {
S += '"';
S += Field->getNameAsString();
S += '"';
}
// Special case bit-fields.
if (Field->isBitField()) {
getObjCEncodingForTypeImpl(Field->getType(), S,
ObjCEncOptions().setExpandStructures(),
Field);
} else {
QualType qt = Field->getType();
getLegacyIntegralTypeEncoding(qt);
getObjCEncodingForTypeImpl(
qt, S,
ObjCEncOptions().setExpandStructures().setIsStructField(), FD,
NotEncodedT);
}
}
}
}
S += RDecl->isUnion() ? ')' : '}';
return;
}
case Type::BlockPointer: {
const auto *BT = T->castAs<BlockPointerType>();
S += "@?"; // Unlike a pointer-to-function, which is "^?".
if (Options.EncodeBlockParameters()) {
const auto *FT = BT->getPointeeType()->castAs<FunctionType>();
S += '<';
// Block return type
getObjCEncodingForTypeImpl(FT->getReturnType(), S,
Options.forComponentType(), FD, NotEncodedT);
// Block self
S += "@?";
// Block parameters
if (const auto *FPT = dyn_cast<FunctionProtoType>(FT)) {
for (const auto &I : FPT->param_types())
getObjCEncodingForTypeImpl(I, S, Options.forComponentType(), FD,
NotEncodedT);
}
S += '>';
}
return;
}
case Type::ObjCObject: {
// hack to match legacy encoding of *id and *Class
QualType Ty = getObjCObjectPointerType(CT);
if (Ty->isObjCIdType()) {
S += "{objc_object=}";
return;
}
else if (Ty->isObjCClassType()) {
S += "{objc_class=}";
return;
}
// TODO: Double check to make sure this intentionally falls through.
LLVM_FALLTHROUGH;
}
case Type::ObjCInterface: {
// Ignore protocol qualifiers when mangling at this level.
// @encode(class_name)
ObjCInterfaceDecl *OI = T->castAs<ObjCObjectType>()->getInterface();
S += '{';
S += OI->getObjCRuntimeNameAsString();
if (Options.ExpandStructures()) {
S += '=';
SmallVector<const ObjCIvarDecl*, 32> Ivars;
DeepCollectObjCIvars(OI, true, Ivars);
for (unsigned i = 0, e = Ivars.size(); i != e; ++i) {
const FieldDecl *Field = Ivars[i];
if (Field->isBitField())
getObjCEncodingForTypeImpl(Field->getType(), S,
ObjCEncOptions().setExpandStructures(),
Field);
else
getObjCEncodingForTypeImpl(Field->getType(), S,
ObjCEncOptions().setExpandStructures(), FD,
NotEncodedT);
}
}
S += '}';
return;
}
case Type::ObjCObjectPointer: {
const auto *OPT = T->castAs<ObjCObjectPointerType>();
if (OPT->isObjCIdType()) {
S += '@';
return;
}
if (OPT->isObjCClassType() || OPT->isObjCQualifiedClassType()) {
// FIXME: Consider if we need to output qualifiers for 'Class<p>'.
// Since this is a binary compatibility issue, need to consult with
// runtime folks. Fortunately, this is a *very* obscure construct.
S += '#';
return;
}
if (OPT->isObjCQualifiedIdType()) {
getObjCEncodingForTypeImpl(
getObjCIdType(), S,
Options.keepingOnly(ObjCEncOptions()
.setExpandPointedToStructures()
.setExpandStructures()),
FD);
if (FD || Options.EncodingProperty() || Options.EncodeClassNames()) {
// Note that we do extended encoding of protocol qualifer list
// Only when doing ivar or property encoding.
S += '"';
for (const auto *I : OPT->quals()) {
S += '<';
S += I->getObjCRuntimeNameAsString();
S += '>';
}
S += '"';
}
return;
}
S += '@';
if (OPT->getInterfaceDecl() &&
(FD || Options.EncodingProperty() || Options.EncodeClassNames())) {
S += '"';
S += OPT->getInterfaceDecl()->getObjCRuntimeNameAsString();
for (const auto *I : OPT->quals()) {
S += '<';
S += I->getObjCRuntimeNameAsString();
S += '>';
}
S += '"';
}
return;
}
// gcc just blithely ignores member pointers.
// FIXME: we should do better than that. 'M' is available.
case Type::MemberPointer:
// This matches gcc's encoding, even though technically it is insufficient.
//FIXME. We should do a better job than gcc.
case Type::Vector:
case Type::ExtVector:
// Until we have a coherent encoding of these three types, issue warning.
if (NotEncodedT)
*NotEncodedT = T;
return;
// We could see an undeduced auto type here during error recovery.
// Just ignore it.
case Type::Auto:
case Type::DeducedTemplateSpecialization:
return;
case Type::Pipe:
#define ABSTRACT_TYPE(KIND, BASE)
#define TYPE(KIND, BASE)
#define DEPENDENT_TYPE(KIND, BASE) \
case Type::KIND:
#define NON_CANONICAL_TYPE(KIND, BASE) \
case Type::KIND:
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(KIND, BASE) \
case Type::KIND:
#include "clang/AST/TypeNodes.inc"
llvm_unreachable("@encode for dependent type!");
}
llvm_unreachable("bad type kind!");
}
void ASTContext::getObjCEncodingForStructureImpl(RecordDecl *RDecl,
std::string &S,
const FieldDecl *FD,
bool includeVBases,
QualType *NotEncodedT) const {
assert(RDecl && "Expected non-null RecordDecl");
assert(!RDecl->isUnion() && "Should not be called for unions");
if (!RDecl->getDefinition() || RDecl->getDefinition()->isInvalidDecl())
return;
const auto *CXXRec = dyn_cast<CXXRecordDecl>(RDecl);
std::multimap<uint64_t, NamedDecl *> FieldOrBaseOffsets;
const ASTRecordLayout &layout = getASTRecordLayout(RDecl);
if (CXXRec) {
for (const auto &BI : CXXRec->bases()) {
if (!BI.isVirtual()) {
CXXRecordDecl *base = BI.getType()->getAsCXXRecordDecl();
if (base->isEmpty())
continue;
uint64_t offs = toBits(layout.getBaseClassOffset(base));
FieldOrBaseOffsets.insert(FieldOrBaseOffsets.upper_bound(offs),
std::make_pair(offs, base));
}
}
}
unsigned i = 0;
for (auto *Field : RDecl->fields()) {
uint64_t offs = layout.getFieldOffset(i);
FieldOrBaseOffsets.insert(FieldOrBaseOffsets.upper_bound(offs),
std::make_pair(offs, Field));
++i;
}
if (CXXRec && includeVBases) {
for (const auto &BI : CXXRec->vbases()) {
CXXRecordDecl *base = BI.getType()->getAsCXXRecordDecl();
if (base->isEmpty())
continue;
uint64_t offs = toBits(layout.getVBaseClassOffset(base));
if (offs >= uint64_t(toBits(layout.getNonVirtualSize())) &&
FieldOrBaseOffsets.find(offs) == FieldOrBaseOffsets.end())
FieldOrBaseOffsets.insert(FieldOrBaseOffsets.end(),
std::make_pair(offs, base));
}
}
CharUnits size;
if (CXXRec) {
size = includeVBases ? layout.getSize() : layout.getNonVirtualSize();
} else {
size = layout.getSize();
}
#ifndef NDEBUG
uint64_t CurOffs = 0;
#endif
std::multimap<uint64_t, NamedDecl *>::iterator
CurLayObj = FieldOrBaseOffsets.begin();
if (CXXRec && CXXRec->isDynamicClass() &&
(CurLayObj == FieldOrBaseOffsets.end() || CurLayObj->first != 0)) {
if (FD) {
S += "\"_vptr$";
std::string recname = CXXRec->getNameAsString();
if (recname.empty()) recname = "?";
S += recname;
S += '"';
}
S += "^^?";
#ifndef NDEBUG
CurOffs += getTypeSize(VoidPtrTy);
#endif
}
if (!RDecl->hasFlexibleArrayMember()) {
// Mark the end of the structure.
uint64_t offs = toBits(size);
FieldOrBaseOffsets.insert(FieldOrBaseOffsets.upper_bound(offs),
std::make_pair(offs, nullptr));
}
for (; CurLayObj != FieldOrBaseOffsets.end(); ++CurLayObj) {
#ifndef NDEBUG
assert(CurOffs <= CurLayObj->first);
if (CurOffs < CurLayObj->first) {
uint64_t padding = CurLayObj->first - CurOffs;
// FIXME: There doesn't seem to be a way to indicate in the encoding that
// packing/alignment of members is different that normal, in which case
// the encoding will be out-of-sync with the real layout.
// If the runtime switches to just consider the size of types without
// taking into account alignment, we could make padding explicit in the
// encoding (e.g. using arrays of chars). The encoding strings would be
// longer then though.
CurOffs += padding;
}
#endif
NamedDecl *dcl = CurLayObj->second;
if (!dcl)
break; // reached end of structure.
if (auto *base = dyn_cast<CXXRecordDecl>(dcl)) {
// We expand the bases without their virtual bases since those are going
// in the initial structure. Note that this differs from gcc which
// expands virtual bases each time one is encountered in the hierarchy,
// making the encoding type bigger than it really is.
getObjCEncodingForStructureImpl(base, S, FD, /*includeVBases*/false,
NotEncodedT);
assert(!base->isEmpty());
#ifndef NDEBUG
CurOffs += toBits(getASTRecordLayout(base).getNonVirtualSize());
#endif
} else {
const auto *field = cast<FieldDecl>(dcl);
if (FD) {
S += '"';
S += field->getNameAsString();
S += '"';
}
if (field->isBitField()) {
EncodeBitField(this, S, field->getType(), field);
#ifndef NDEBUG
CurOffs += field->getBitWidthValue(*this);
#endif
} else {
QualType qt = field->getType();
getLegacyIntegralTypeEncoding(qt);
getObjCEncodingForTypeImpl(
qt, S, ObjCEncOptions().setExpandStructures().setIsStructField(),
FD, NotEncodedT);
#ifndef NDEBUG
CurOffs += getTypeSize(field->getType());
#endif
}
}
}
}
void ASTContext::getObjCEncodingForTypeQualifier(Decl::ObjCDeclQualifier QT,
std::string& S) const {
if (QT & Decl::OBJC_TQ_In)
S += 'n';
if (QT & Decl::OBJC_TQ_Inout)
S += 'N';
if (QT & Decl::OBJC_TQ_Out)
S += 'o';
if (QT & Decl::OBJC_TQ_Bycopy)
S += 'O';
if (QT & Decl::OBJC_TQ_Byref)
S += 'R';
if (QT & Decl::OBJC_TQ_Oneway)
S += 'V';
}
TypedefDecl *ASTContext::getObjCIdDecl() const {
if (!ObjCIdDecl) {
QualType T = getObjCObjectType(ObjCBuiltinIdTy, {}, {});
T = getObjCObjectPointerType(T);
ObjCIdDecl = buildImplicitTypedef(T, "id");
}
return ObjCIdDecl;
}
TypedefDecl *ASTContext::getObjCSelDecl() const {
if (!ObjCSelDecl) {
QualType T = getPointerType(ObjCBuiltinSelTy);
ObjCSelDecl = buildImplicitTypedef(T, "SEL");
}
return ObjCSelDecl;
}
TypedefDecl *ASTContext::getObjCClassDecl() const {
if (!ObjCClassDecl) {
QualType T = getObjCObjectType(ObjCBuiltinClassTy, {}, {});
T = getObjCObjectPointerType(T);
ObjCClassDecl = buildImplicitTypedef(T, "Class");
}
return ObjCClassDecl;
}
ObjCInterfaceDecl *ASTContext::getObjCProtocolDecl() const {
if (!ObjCProtocolClassDecl) {
ObjCProtocolClassDecl
= ObjCInterfaceDecl::Create(*this, getTranslationUnitDecl(),
SourceLocation(),
&Idents.get("Protocol"),
/*typeParamList=*/nullptr,
/*PrevDecl=*/nullptr,
SourceLocation(), true);
}
return ObjCProtocolClassDecl;
}
//===----------------------------------------------------------------------===//
// __builtin_va_list Construction Functions
//===----------------------------------------------------------------------===//
static TypedefDecl *CreateCharPtrNamedVaListDecl(const ASTContext *Context,
StringRef Name) {
// typedef char* __builtin[_ms]_va_list;
QualType T = Context->getPointerType(Context->CharTy);
return Context->buildImplicitTypedef(T, Name);
}
static TypedefDecl *CreateMSVaListDecl(const ASTContext *Context) {
return CreateCharPtrNamedVaListDecl(Context, "__builtin_ms_va_list");
}
static TypedefDecl *CreateCharPtrBuiltinVaListDecl(const ASTContext *Context) {
return CreateCharPtrNamedVaListDecl(Context, "__builtin_va_list");
}
static TypedefDecl *CreateVoidPtrBuiltinVaListDecl(const ASTContext *Context) {
// typedef void* __builtin_va_list;
QualType T = Context->getPointerType(Context->VoidTy);
return Context->buildImplicitTypedef(T, "__builtin_va_list");
}
static TypedefDecl *
CreateAArch64ABIBuiltinVaListDecl(const ASTContext *Context) {
// struct __va_list
RecordDecl *VaListTagDecl = Context->buildImplicitRecord("__va_list");
if (Context->getLangOpts().CPlusPlus) {
// namespace std { struct __va_list {
NamespaceDecl *NS;
NS = NamespaceDecl::Create(const_cast<ASTContext &>(*Context),
Context->getTranslationUnitDecl(),
/*Inline*/ false, SourceLocation(),
SourceLocation(), &Context->Idents.get("std"),
/*PrevDecl*/ nullptr);
NS->setImplicit();
VaListTagDecl->setDeclContext(NS);
}
VaListTagDecl->startDefinition();
const size_t NumFields = 5;
QualType FieldTypes[NumFields];
const char *FieldNames[NumFields];
// void *__stack;
FieldTypes[0] = Context->getPointerType(Context->VoidTy);
FieldNames[0] = "__stack";
// void *__gr_top;
FieldTypes[1] = Context->getPointerType(Context->VoidTy);
FieldNames[1] = "__gr_top";
// void *__vr_top;
FieldTypes[2] = Context->getPointerType(Context->VoidTy);
FieldNames[2] = "__vr_top";
// int __gr_offs;
FieldTypes[3] = Context->IntTy;
FieldNames[3] = "__gr_offs";
// int __vr_offs;
FieldTypes[4] = Context->IntTy;
FieldNames[4] = "__vr_offs";
// Create fields
for (unsigned i = 0; i < NumFields; ++i) {
FieldDecl *Field = FieldDecl::Create(const_cast<ASTContext &>(*Context),
VaListTagDecl,
SourceLocation(),
SourceLocation(),
&Context->Idents.get(FieldNames[i]),
FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false,
ICIS_NoInit);
Field->setAccess(AS_public);
VaListTagDecl->addDecl(Field);
}
VaListTagDecl->completeDefinition();
Context->VaListTagDecl = VaListTagDecl;
QualType VaListTagType = Context->getRecordType(VaListTagDecl);
// } __builtin_va_list;
return Context->buildImplicitTypedef(VaListTagType, "__builtin_va_list");
}
static TypedefDecl *CreatePowerABIBuiltinVaListDecl(const ASTContext *Context) {
// typedef struct __va_list_tag {
RecordDecl *VaListTagDecl;
VaListTagDecl = Context->buildImplicitRecord("__va_list_tag");
VaListTagDecl->startDefinition();
const size_t NumFields = 5;
QualType FieldTypes[NumFields];
const char *FieldNames[NumFields];
// unsigned char gpr;
FieldTypes[0] = Context->UnsignedCharTy;
FieldNames[0] = "gpr";
// unsigned char fpr;
FieldTypes[1] = Context->UnsignedCharTy;
FieldNames[1] = "fpr";
// unsigned short reserved;
FieldTypes[2] = Context->UnsignedShortTy;
FieldNames[2] = "reserved";
// void* overflow_arg_area;
FieldTypes[3] = Context->getPointerType(Context->VoidTy);
FieldNames[3] = "overflow_arg_area";
// void* reg_save_area;
FieldTypes[4] = Context->getPointerType(Context->VoidTy);
FieldNames[4] = "reg_save_area";
// Create fields
for (unsigned i = 0; i < NumFields; ++i) {
FieldDecl *Field = FieldDecl::Create(*Context, VaListTagDecl,
SourceLocation(),
SourceLocation(),
&Context->Idents.get(FieldNames[i]),
FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false,
ICIS_NoInit);
Field->setAccess(AS_public);
VaListTagDecl->addDecl(Field);
}
VaListTagDecl->completeDefinition();
Context->VaListTagDecl = VaListTagDecl;
QualType VaListTagType = Context->getRecordType(VaListTagDecl);
// } __va_list_tag;
TypedefDecl *VaListTagTypedefDecl =
Context->buildImplicitTypedef(VaListTagType, "__va_list_tag");
QualType VaListTagTypedefType =
Context->getTypedefType(VaListTagTypedefDecl);
// typedef __va_list_tag __builtin_va_list[1];
llvm::APInt Size(Context->getTypeSize(Context->getSizeType()), 1);
QualType VaListTagArrayType
= Context->getConstantArrayType(VaListTagTypedefType,
Size, nullptr, ArrayType::Normal, 0);
return Context->buildImplicitTypedef(VaListTagArrayType, "__builtin_va_list");
}
static TypedefDecl *
CreateX86_64ABIBuiltinVaListDecl(const ASTContext *Context) {
// struct __va_list_tag {
RecordDecl *VaListTagDecl;
VaListTagDecl = Context->buildImplicitRecord("__va_list_tag");
VaListTagDecl->startDefinition();
const size_t NumFields = 4;
QualType FieldTypes[NumFields];
const char *FieldNames[NumFields];
// unsigned gp_offset;
FieldTypes[0] = Context->UnsignedIntTy;
FieldNames[0] = "gp_offset";
// unsigned fp_offset;
FieldTypes[1] = Context->UnsignedIntTy;
FieldNames[1] = "fp_offset";
// void* overflow_arg_area;
FieldTypes[2] = Context->getPointerType(Context->VoidTy);
FieldNames[2] = "overflow_arg_area";
// void* reg_save_area;
FieldTypes[3] = Context->getPointerType(Context->VoidTy);
FieldNames[3] = "reg_save_area";
// Create fields
for (unsigned i = 0; i < NumFields; ++i) {
FieldDecl *Field = FieldDecl::Create(const_cast<ASTContext &>(*Context),
VaListTagDecl,
SourceLocation(),
SourceLocation(),
&Context->Idents.get(FieldNames[i]),
FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false,
ICIS_NoInit);
Field->setAccess(AS_public);
VaListTagDecl->addDecl(Field);
}
VaListTagDecl->completeDefinition();
Context->VaListTagDecl = VaListTagDecl;
QualType VaListTagType = Context->getRecordType(VaListTagDecl);
// };
// typedef struct __va_list_tag __builtin_va_list[1];
llvm::APInt Size(Context->getTypeSize(Context->getSizeType()), 1);
QualType VaListTagArrayType = Context->getConstantArrayType(
VaListTagType, Size, nullptr, ArrayType::Normal, 0);
return Context->buildImplicitTypedef(VaListTagArrayType, "__builtin_va_list");
}
static TypedefDecl *CreatePNaClABIBuiltinVaListDecl(const ASTContext *Context) {
// typedef int __builtin_va_list[4];
llvm::APInt Size(Context->getTypeSize(Context->getSizeType()), 4);
QualType IntArrayType = Context->getConstantArrayType(
Context->IntTy, Size, nullptr, ArrayType::Normal, 0);
return Context->buildImplicitTypedef(IntArrayType, "__builtin_va_list");
}
static TypedefDecl *
CreateAAPCSABIBuiltinVaListDecl(const ASTContext *Context) {
// struct __va_list
RecordDecl *VaListDecl = Context->buildImplicitRecord("__va_list");
if (Context->getLangOpts().CPlusPlus) {
// namespace std { struct __va_list {
NamespaceDecl *NS;
NS = NamespaceDecl::Create(const_cast<ASTContext &>(*Context),
Context->getTranslationUnitDecl(),
/*Inline*/false, SourceLocation(),
SourceLocation(), &Context->Idents.get("std"),
/*PrevDecl*/ nullptr);
NS->setImplicit();
VaListDecl->setDeclContext(NS);
}
VaListDecl->startDefinition();
// void * __ap;
FieldDecl *Field = FieldDecl::Create(const_cast<ASTContext &>(*Context),
VaListDecl,
SourceLocation(),
SourceLocation(),
&Context->Idents.get("__ap"),
Context->getPointerType(Context->VoidTy),
/*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false,
ICIS_NoInit);
Field->setAccess(AS_public);
VaListDecl->addDecl(Field);
// };
VaListDecl->completeDefinition();
Context->VaListTagDecl = VaListDecl;
// typedef struct __va_list __builtin_va_list;
QualType T = Context->getRecordType(VaListDecl);
return Context->buildImplicitTypedef(T, "__builtin_va_list");
}
static TypedefDecl *
CreateSystemZBuiltinVaListDecl(const ASTContext *Context) {
// struct __va_list_tag {
RecordDecl *VaListTagDecl;
VaListTagDecl = Context->buildImplicitRecord("__va_list_tag");
VaListTagDecl->startDefinition();
const size_t NumFields = 4;
QualType FieldTypes[NumFields];
const char *FieldNames[NumFields];
// long __gpr;
FieldTypes[0] = Context->LongTy;
FieldNames[0] = "__gpr";
// long __fpr;
FieldTypes[1] = Context->LongTy;
FieldNames[1] = "__fpr";
// void *__overflow_arg_area;
FieldTypes[2] = Context->getPointerType(Context->VoidTy);
FieldNames[2] = "__overflow_arg_area";
// void *__reg_save_area;
FieldTypes[3] = Context->getPointerType(Context->VoidTy);
FieldNames[3] = "__reg_save_area";
// Create fields
for (unsigned i = 0; i < NumFields; ++i) {
FieldDecl *Field = FieldDecl::Create(const_cast<ASTContext &>(*Context),
VaListTagDecl,
SourceLocation(),
SourceLocation(),
&Context->Idents.get(FieldNames[i]),
FieldTypes[i], /*TInfo=*/nullptr,
/*BitWidth=*/nullptr,
/*Mutable=*/false,
ICIS_NoInit);
Field->setAccess(AS_public);
VaListTagDecl->addDecl(Field);
}
VaListTagDecl->completeDefinition();
Context->VaListTagDecl = VaListTagDecl;
QualType VaListTagType = Context->getRecordType(VaListTagDecl);
// };
// typedef __va_list_tag __builtin_va_list[1];
llvm::APInt Size(Context->getTypeSize(Context->getSizeType()), 1);
QualType VaListTagArrayType = Context->getConstantArrayType(
VaListTagType, Size, nullptr, ArrayType::Normal, 0);
return Context->buildImplicitTypedef(VaListTagArrayType, "__builtin_va_list");
}
static TypedefDecl *CreateVaListDecl(const ASTContext *Context,
TargetInfo::BuiltinVaListKind Kind) {
switch (Kind) {
case TargetInfo::CharPtrBuiltinVaList:
return CreateCharPtrBuiltinVaListDecl(Context);
case TargetInfo::VoidPtrBuiltinVaList:
return CreateVoidPtrBuiltinVaListDecl(Context);
case TargetInfo::AArch64ABIBuiltinVaList:
return CreateAArch64ABIBuiltinVaListDecl(Context);
case TargetInfo::PowerABIBuiltinVaList:
return CreatePowerABIBuiltinVaListDecl(Context);
case TargetInfo::X86_64ABIBuiltinVaList:
return CreateX86_64ABIBuiltinVaListDecl(Context);
case TargetInfo::PNaClABIBuiltinVaList:
return CreatePNaClABIBuiltinVaListDecl(Context);
case TargetInfo::AAPCSABIBuiltinVaList:
return CreateAAPCSABIBuiltinVaListDecl(Context);
case TargetInfo::SystemZBuiltinVaList:
return CreateSystemZBuiltinVaListDecl(Context);
}
llvm_unreachable("Unhandled __builtin_va_list type kind");
}
TypedefDecl *ASTContext::getBuiltinVaListDecl() const {
if (!BuiltinVaListDecl) {
BuiltinVaListDecl = CreateVaListDecl(this, Target->getBuiltinVaListKind());
assert(BuiltinVaListDecl->isImplicit());
}
return BuiltinVaListDecl;
}
Decl *ASTContext::getVaListTagDecl() const {
// Force the creation of VaListTagDecl by building the __builtin_va_list
// declaration.
if (!VaListTagDecl)
(void)getBuiltinVaListDecl();
return VaListTagDecl;
}
TypedefDecl *ASTContext::getBuiltinMSVaListDecl() const {
if (!BuiltinMSVaListDecl)
BuiltinMSVaListDecl = CreateMSVaListDecl(this);
return BuiltinMSVaListDecl;
}
bool ASTContext::canBuiltinBeRedeclared(const FunctionDecl *FD) const {
return BuiltinInfo.canBeRedeclared(FD->getBuiltinID());
}
void ASTContext::setObjCConstantStringInterface(ObjCInterfaceDecl *Decl) {
assert(ObjCConstantStringType.isNull() &&
"'NSConstantString' type already set!");
ObjCConstantStringType = getObjCInterfaceType(Decl);
}
/// Retrieve the template name that corresponds to a non-empty
/// lookup.
TemplateName
ASTContext::getOverloadedTemplateName(UnresolvedSetIterator Begin,
UnresolvedSetIterator End) const {
unsigned size = End - Begin;
assert(size > 1 && "set is not overloaded!");
void *memory = Allocate(sizeof(OverloadedTemplateStorage) +
size * sizeof(FunctionTemplateDecl*));
auto *OT = new (memory) OverloadedTemplateStorage(size);
NamedDecl **Storage = OT->getStorage();
for (UnresolvedSetIterator I = Begin; I != End; ++I) {
NamedDecl *D = *I;
assert(isa<FunctionTemplateDecl>(D) ||
isa<UnresolvedUsingValueDecl>(D) ||
(isa<UsingShadowDecl>(D) &&
isa<FunctionTemplateDecl>(D->getUnderlyingDecl())));
*Storage++ = D;
}
return TemplateName(OT);
}
/// Retrieve a template name representing an unqualified-id that has been
/// assumed to name a template for ADL purposes.
TemplateName ASTContext::getAssumedTemplateName(DeclarationName Name) const {
auto *OT = new (*this) AssumedTemplateStorage(Name);
return TemplateName(OT);
}
/// Retrieve the template name that represents a qualified
/// template name such as \c std::vector.
TemplateName
ASTContext::getQualifiedTemplateName(NestedNameSpecifier *NNS,
bool TemplateKeyword,
TemplateDecl *Template) const {
assert(NNS && "Missing nested-name-specifier in qualified template name");
// FIXME: Canonicalization?
llvm::FoldingSetNodeID ID;
QualifiedTemplateName::Profile(ID, NNS, TemplateKeyword, Template);
void *InsertPos = nullptr;
QualifiedTemplateName *QTN =
QualifiedTemplateNames.FindNodeOrInsertPos(ID, InsertPos);
if (!QTN) {
QTN = new (*this, alignof(QualifiedTemplateName))
QualifiedTemplateName(NNS, TemplateKeyword, Template);
QualifiedTemplateNames.InsertNode(QTN, InsertPos);
}
return TemplateName(QTN);
}
/// Retrieve the template name that represents a dependent
/// template name such as \c MetaFun::template apply.
TemplateName
ASTContext::getDependentTemplateName(NestedNameSpecifier *NNS,
const IdentifierInfo *Name) const {
assert((!NNS || NNS->isDependent()) &&
"Nested name specifier must be dependent");
llvm::FoldingSetNodeID ID;
DependentTemplateName::Profile(ID, NNS, Name);
void *InsertPos = nullptr;
DependentTemplateName *QTN =
DependentTemplateNames.FindNodeOrInsertPos(ID, InsertPos);
if (QTN)
return TemplateName(QTN);
NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS);
if (CanonNNS == NNS) {
QTN = new (*this, alignof(DependentTemplateName))
DependentTemplateName(NNS, Name);
} else {
TemplateName Canon = getDependentTemplateName(CanonNNS, Name);
QTN = new (*this, alignof(DependentTemplateName))
DependentTemplateName(NNS, Name, Canon);
DependentTemplateName *CheckQTN =
DependentTemplateNames.FindNodeOrInsertPos(ID, InsertPos);
assert(!CheckQTN && "Dependent type name canonicalization broken");
(void)CheckQTN;
}
DependentTemplateNames.InsertNode(QTN, InsertPos);
return TemplateName(QTN);
}
/// Retrieve the template name that represents a dependent
/// template name such as \c MetaFun::template operator+.
TemplateName
ASTContext::getDependentTemplateName(NestedNameSpecifier *NNS,
OverloadedOperatorKind Operator) const {
assert((!NNS || NNS->isDependent()) &&
"Nested name specifier must be dependent");
llvm::FoldingSetNodeID ID;
DependentTemplateName::Profile(ID, NNS, Operator);
void *InsertPos = nullptr;
DependentTemplateName *QTN
= DependentTemplateNames.FindNodeOrInsertPos(ID, InsertPos);
if (QTN)
return TemplateName(QTN);
NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS);
if (CanonNNS == NNS) {
QTN = new (*this, alignof(DependentTemplateName))
DependentTemplateName(NNS, Operator);
} else {
TemplateName Canon = getDependentTemplateName(CanonNNS, Operator);
QTN = new (*this, alignof(DependentTemplateName))
DependentTemplateName(NNS, Operator, Canon);
DependentTemplateName *CheckQTN
= DependentTemplateNames.FindNodeOrInsertPos(ID, InsertPos);
assert(!CheckQTN && "Dependent template name canonicalization broken");
(void)CheckQTN;
}
DependentTemplateNames.InsertNode(QTN, InsertPos);
return TemplateName(QTN);
}
TemplateName
ASTContext::getSubstTemplateTemplateParm(TemplateTemplateParmDecl *param,
TemplateName replacement) const {
llvm::FoldingSetNodeID ID;
SubstTemplateTemplateParmStorage::Profile(ID, param, replacement);
void *insertPos = nullptr;
SubstTemplateTemplateParmStorage *subst
= SubstTemplateTemplateParms.FindNodeOrInsertPos(ID, insertPos);
if (!subst) {
subst = new (*this) SubstTemplateTemplateParmStorage(param, replacement);
SubstTemplateTemplateParms.InsertNode(subst, insertPos);
}
return TemplateName(subst);
}
TemplateName
ASTContext::getSubstTemplateTemplateParmPack(TemplateTemplateParmDecl *Param,
const TemplateArgument &ArgPack) const {
auto &Self = const_cast<ASTContext &>(*this);
llvm::FoldingSetNodeID ID;
SubstTemplateTemplateParmPackStorage::Profile(ID, Self, Param, ArgPack);
void *InsertPos = nullptr;
SubstTemplateTemplateParmPackStorage *Subst
= SubstTemplateTemplateParmPacks.FindNodeOrInsertPos(ID, InsertPos);
if (!Subst) {
Subst = new (*this) SubstTemplateTemplateParmPackStorage(Param,
ArgPack.pack_size(),
ArgPack.pack_begin());
SubstTemplateTemplateParmPacks.InsertNode(Subst, InsertPos);
}
return TemplateName(Subst);
}
/// getFromTargetType - Given one of the integer types provided by
/// TargetInfo, produce the corresponding type. The unsigned @p Type
/// is actually a value of type @c TargetInfo::IntType.
CanQualType ASTContext::getFromTargetType(unsigned Type) const {
switch (Type) {
case TargetInfo::NoInt: return {};
case TargetInfo::SignedChar: return SignedCharTy;
case TargetInfo::UnsignedChar: return UnsignedCharTy;
case TargetInfo::SignedShort: return ShortTy;
case TargetInfo::UnsignedShort: return UnsignedShortTy;
case TargetInfo::SignedInt: return IntTy;
case TargetInfo::UnsignedInt: return UnsignedIntTy;
case TargetInfo::SignedLong: return LongTy;
case TargetInfo::UnsignedLong: return UnsignedLongTy;
case TargetInfo::SignedLongLong: return LongLongTy;
case TargetInfo::UnsignedLongLong: return UnsignedLongLongTy;
}
llvm_unreachable("Unhandled TargetInfo::IntType value");
}
//===----------------------------------------------------------------------===//
// Type Predicates.
//===----------------------------------------------------------------------===//
/// getObjCGCAttr - Returns one of GCNone, Weak or Strong objc's
/// garbage collection attribute.
///
Qualifiers::GC ASTContext::getObjCGCAttrKind(QualType Ty) const {
if (getLangOpts().getGC() == LangOptions::NonGC)
return Qualifiers::GCNone;
assert(getLangOpts().ObjC);
Qualifiers::GC GCAttrs = Ty.getObjCGCAttr();
// Default behaviour under objective-C's gc is for ObjC pointers
// (or pointers to them) be treated as though they were declared
// as __strong.
if (GCAttrs == Qualifiers::GCNone) {
if (Ty->isObjCObjectPointerType() || Ty->isBlockPointerType())
return Qualifiers::Strong;
else if (Ty->isPointerType())
return getObjCGCAttrKind(Ty->castAs<PointerType>()->getPointeeType());
} else {
// It's not valid to set GC attributes on anything that isn't a
// pointer.
#ifndef NDEBUG
QualType CT = Ty->getCanonicalTypeInternal();
while (const auto *AT = dyn_cast<ArrayType>(CT))
CT = AT->getElementType();
assert(CT->isAnyPointerType() || CT->isBlockPointerType());
#endif
}
return GCAttrs;
}
//===----------------------------------------------------------------------===//
// Type Compatibility Testing
//===----------------------------------------------------------------------===//
/// areCompatVectorTypes - Return true if the two specified vector types are
/// compatible.
static bool areCompatVectorTypes(const VectorType *LHS,
const VectorType *RHS) {
assert(LHS->isCanonicalUnqualified() && RHS->isCanonicalUnqualified());
return LHS->getElementType() == RHS->getElementType() &&
LHS->getNumElements() == RHS->getNumElements();
}
bool ASTContext::areCompatibleVectorTypes(QualType FirstVec,
QualType SecondVec) {
assert(FirstVec->isVectorType() && "FirstVec should be a vector type");
assert(SecondVec->isVectorType() && "SecondVec should be a vector type");
if (hasSameUnqualifiedType(FirstVec, SecondVec))
return true;
// Treat Neon vector types and most AltiVec vector types as if they are the
// equivalent GCC vector types.
const auto *First = FirstVec->castAs<VectorType>();
const auto *Second = SecondVec->castAs<VectorType>();
if (First->getNumElements() == Second->getNumElements() &&
hasSameType(First->getElementType(), Second->getElementType()) &&
First->getVectorKind() != VectorType::AltiVecPixel &&
First->getVectorKind() != VectorType::AltiVecBool &&
Second->getVectorKind() != VectorType::AltiVecPixel &&
Second->getVectorKind() != VectorType::AltiVecBool)
return true;
return false;
}
bool ASTContext::hasDirectOwnershipQualifier(QualType Ty) const {
while (true) {
// __strong id
if (const AttributedType *Attr = dyn_cast<AttributedType>(Ty)) {
if (Attr->getAttrKind() == attr::ObjCOwnership)
return true;
Ty = Attr->getModifiedType();
// X *__strong (...)
} else if (const ParenType *Paren = dyn_cast<ParenType>(Ty)) {
Ty = Paren->getInnerType();
// We do not want to look through typedefs, typeof(expr),
// typeof(type), or any other way that the type is somehow
// abstracted.
} else {
return false;
}
}
}
//===----------------------------------------------------------------------===//
// ObjCQualifiedIdTypesAreCompatible - Compatibility testing for qualified id's.
//===----------------------------------------------------------------------===//
/// ProtocolCompatibleWithProtocol - return 'true' if 'lProto' is in the
/// inheritance hierarchy of 'rProto'.
bool
ASTContext::ProtocolCompatibleWithProtocol(ObjCProtocolDecl *lProto,
ObjCProtocolDecl *rProto) const {
if (declaresSameEntity(lProto, rProto))
return true;
for (auto *PI : rProto->protocols())
if (ProtocolCompatibleWithProtocol(lProto, PI))
return true;
return false;
}
/// ObjCQualifiedClassTypesAreCompatible - compare Class<pr,...> and
/// Class<pr1, ...>.
bool ASTContext::ObjCQualifiedClassTypesAreCompatible(
const ObjCObjectPointerType *lhs, const ObjCObjectPointerType *rhs) {
for (auto *lhsProto : lhs->quals()) {
bool match = false;
for (auto *rhsProto : rhs->quals()) {
if (ProtocolCompatibleWithProtocol(lhsProto, rhsProto)) {
match = true;
break;
}
}
if (!match)
return false;
}
return true;
}
/// ObjCQualifiedIdTypesAreCompatible - We know that one of lhs/rhs is an
/// ObjCQualifiedIDType.
bool ASTContext::ObjCQualifiedIdTypesAreCompatible(
const ObjCObjectPointerType *lhs, const ObjCObjectPointerType *rhs,
bool compare) {
// Allow id<P..> and an 'id' in all cases.
if (lhs->isObjCIdType() || rhs->isObjCIdType())
return true;
// Don't allow id<P..> to convert to Class or Class<P..> in either direction.
if (lhs->isObjCClassType() || lhs->isObjCQualifiedClassType() ||
rhs->isObjCClassType() || rhs->isObjCQualifiedClassType())
return false;
if (lhs->isObjCQualifiedIdType()) {
if (rhs->qual_empty()) {
// If the RHS is a unqualified interface pointer "NSString*",
// make sure we check the class hierarchy.
if (ObjCInterfaceDecl *rhsID = rhs->getInterfaceDecl()) {
for (auto *I : lhs->quals()) {
// when comparing an id<P> on lhs with a static type on rhs,
// see if static class implements all of id's protocols, directly or
// through its super class and categories.
if (!rhsID->ClassImplementsProtocol(I, true))
return false;
}
}
// If there are no qualifiers and no interface, we have an 'id'.
return true;
}
// Both the right and left sides have qualifiers.
for (auto *lhsProto : lhs->quals()) {
bool match = false;
// when comparing an id<P> on lhs with a static type on rhs,
// see if static class implements all of id's protocols, directly or
// through its super class and categories.
for (auto *rhsProto : rhs->quals()) {
if (ProtocolCompatibleWithProtocol(lhsProto, rhsProto) ||
(compare && ProtocolCompatibleWithProtocol(rhsProto, lhsProto))) {
match = true;
break;
}
}
// If the RHS is a qualified interface pointer "NSString<P>*",
// make sure we check the class hierarchy.
if (ObjCInterfaceDecl *rhsID = rhs->getInterfaceDecl()) {
for (auto *I : lhs->quals()) {
// when comparing an id<P> on lhs with a static type on rhs,
// see if static class implements all of id's protocols, directly or
// through its super class and categories.
if (rhsID->ClassImplementsProtocol(I, true)) {
match = true;
break;
}
}
}
if (!match)
return false;
}
return true;
}
assert(rhs->isObjCQualifiedIdType() && "One of the LHS/RHS should be id<x>");
if (lhs->getInterfaceType()) {
// If both the right and left sides have qualifiers.
for (auto *lhsProto : lhs->quals()) {
bool match = false;
// when comparing an id<P> on rhs with a static type on lhs,
// see if static class implements all of id's protocols, directly or
// through its super class and categories.
// First, lhs protocols in the qualifier list must be found, direct
// or indirect in rhs's qualifier list or it is a mismatch.
for (auto *rhsProto : rhs->quals()) {
if (ProtocolCompatibleWithProtocol(lhsProto, rhsProto) ||
(compare && ProtocolCompatibleWithProtocol(rhsProto, lhsProto))) {
match = true;
break;
}
}
if (!match)
return false;
}
// Static class's protocols, or its super class or category protocols
// must be found, direct or indirect in rhs's qualifier list or it is a mismatch.
if (ObjCInterfaceDecl *lhsID = lhs->getInterfaceDecl()) {
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> LHSInheritedProtocols;
CollectInheritedProtocols(lhsID, LHSInheritedProtocols);
// This is rather dubious but matches gcc's behavior. If lhs has
// no type qualifier and its class has no static protocol(s)
// assume that it is mismatch.
if (LHSInheritedProtocols.empty() && lhs->qual_empty())
return false;
for (auto *lhsProto : LHSInheritedProtocols) {
bool match = false;
for (auto *rhsProto : rhs->quals()) {
if (ProtocolCompatibleWithProtocol(lhsProto, rhsProto) ||
(compare && ProtocolCompatibleWithProtocol(rhsProto, lhsProto))) {
match = true;
break;
}
}
if (!match)
return false;
}
}
return true;
}
return false;
}
/// canAssignObjCInterfaces - Return true if the two interface types are
/// compatible for assignment from RHS to LHS. This handles validation of any
/// protocol qualifiers on the LHS or RHS.
bool ASTContext::canAssignObjCInterfaces(const ObjCObjectPointerType *LHSOPT,
const ObjCObjectPointerType *RHSOPT) {
const ObjCObjectType* LHS = LHSOPT->getObjectType();
const ObjCObjectType* RHS = RHSOPT->getObjectType();
// If either type represents the built-in 'id' type, return true.
if (LHS->isObjCUnqualifiedId() || RHS->isObjCUnqualifiedId())
return true;
// Function object that propagates a successful result or handles
// __kindof types.
auto finish = [&](bool succeeded) -> bool {
if (succeeded)
return true;
if (!RHS->isKindOfType())
return false;
// Strip off __kindof and protocol qualifiers, then check whether
// we can assign the other way.
return canAssignObjCInterfaces(RHSOPT->stripObjCKindOfTypeAndQuals(*this),
LHSOPT->stripObjCKindOfTypeAndQuals(*this));
};
// Casts from or to id<P> are allowed when the other side has compatible
// protocols.
if (LHS->isObjCQualifiedId() || RHS->isObjCQualifiedId()) {
return finish(ObjCQualifiedIdTypesAreCompatible(LHSOPT, RHSOPT, false));
}
// Verify protocol compatibility for casts from Class<P1> to Class<P2>.
if (LHS->isObjCQualifiedClass() && RHS->isObjCQualifiedClass()) {
return finish(ObjCQualifiedClassTypesAreCompatible(LHSOPT, RHSOPT));
}
// Casts from Class to Class<Foo>, or vice-versa, are allowed.
if (LHS->isObjCClass() && RHS->isObjCClass()) {
return true;
}
// If we have 2 user-defined types, fall into that path.
if (LHS->getInterface() && RHS->getInterface()) {
return finish(canAssignObjCInterfaces(LHS, RHS));
}
return false;
}
/// canAssignObjCInterfacesInBlockPointer - This routine is specifically written
/// for providing type-safety for objective-c pointers used to pass/return
/// arguments in block literals. When passed as arguments, passing 'A*' where
/// 'id' is expected is not OK. Passing 'Sub *" where 'Super *" is expected is
/// not OK. For the return type, the opposite is not OK.
bool ASTContext::canAssignObjCInterfacesInBlockPointer(
const ObjCObjectPointerType *LHSOPT,
const ObjCObjectPointerType *RHSOPT,
bool BlockReturnType) {
// Function object that propagates a successful result or handles
// __kindof types.
auto finish = [&](bool succeeded) -> bool {
if (succeeded)
return true;
const ObjCObjectPointerType *Expected = BlockReturnType ? RHSOPT : LHSOPT;
if (!Expected->isKindOfType())
return false;
// Strip off __kindof and protocol qualifiers, then check whether
// we can assign the other way.
return canAssignObjCInterfacesInBlockPointer(
RHSOPT->stripObjCKindOfTypeAndQuals(*this),
LHSOPT->stripObjCKindOfTypeAndQuals(*this),
BlockReturnType);
};
if (RHSOPT->isObjCBuiltinType() || LHSOPT->isObjCIdType())
return true;
if (LHSOPT->isObjCBuiltinType()) {
return finish(RHSOPT->isObjCBuiltinType() ||
RHSOPT->isObjCQualifiedIdType());
}
if (LHSOPT->isObjCQualifiedIdType() || RHSOPT->isObjCQualifiedIdType())
return finish(ObjCQualifiedIdTypesAreCompatible(
(BlockReturnType ? LHSOPT : RHSOPT),
(BlockReturnType ? RHSOPT : LHSOPT), false));
const ObjCInterfaceType* LHS = LHSOPT->getInterfaceType();
const ObjCInterfaceType* RHS = RHSOPT->getInterfaceType();
if (LHS && RHS) { // We have 2 user-defined types.
if (LHS != RHS) {
if (LHS->getDecl()->isSuperClassOf(RHS->getDecl()))
return finish(BlockReturnType);
if (RHS->getDecl()->isSuperClassOf(LHS->getDecl()))
return finish(!BlockReturnType);
}
else
return true;
}
return false;
}
/// Comparison routine for Objective-C protocols to be used with
/// llvm::array_pod_sort.
static int compareObjCProtocolsByName(ObjCProtocolDecl * const *lhs,
ObjCProtocolDecl * const *rhs) {
return (*lhs)->getName().compare((*rhs)->getName());
}
/// getIntersectionOfProtocols - This routine finds the intersection of set
/// of protocols inherited from two distinct objective-c pointer objects with
/// the given common base.
/// It is used to build composite qualifier list of the composite type of
/// the conditional expression involving two objective-c pointer objects.
static
void getIntersectionOfProtocols(ASTContext &Context,
const ObjCInterfaceDecl *CommonBase,
const ObjCObjectPointerType *LHSOPT,
const ObjCObjectPointerType *RHSOPT,
SmallVectorImpl<ObjCProtocolDecl *> &IntersectionSet) {
const ObjCObjectType* LHS = LHSOPT->getObjectType();
const ObjCObjectType* RHS = RHSOPT->getObjectType();
assert(LHS->getInterface() && "LHS must have an interface base");
assert(RHS->getInterface() && "RHS must have an interface base");
// Add all of the protocols for the LHS.
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> LHSProtocolSet;
// Start with the protocol qualifiers.
for (auto proto : LHS->quals()) {
Context.CollectInheritedProtocols(proto, LHSProtocolSet);
}
// Also add the protocols associated with the LHS interface.
Context.CollectInheritedProtocols(LHS->getInterface(), LHSProtocolSet);
// Add all of the protocols for the RHS.
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> RHSProtocolSet;
// Start with the protocol qualifiers.
for (auto proto : RHS->quals()) {
Context.CollectInheritedProtocols(proto, RHSProtocolSet);
}
// Also add the protocols associated with the RHS interface.
Context.CollectInheritedProtocols(RHS->getInterface(), RHSProtocolSet);
// Compute the intersection of the collected protocol sets.
for (auto proto : LHSProtocolSet) {
if (RHSProtocolSet.count(proto))
IntersectionSet.push_back(proto);
}
// Compute the set of protocols that is implied by either the common type or
// the protocols within the intersection.
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> ImpliedProtocols;
Context.CollectInheritedProtocols(CommonBase, ImpliedProtocols);
// Remove any implied protocols from the list of inherited protocols.
if (!ImpliedProtocols.empty()) {
IntersectionSet.erase(
std::remove_if(IntersectionSet.begin(),
IntersectionSet.end(),
[&](ObjCProtocolDecl *proto) -> bool {
return ImpliedProtocols.count(proto) > 0;
}),
IntersectionSet.end());
}
// Sort the remaining protocols by name.
llvm::array_pod_sort(IntersectionSet.begin(), IntersectionSet.end(),
compareObjCProtocolsByName);
}
/// Determine whether the first type is a subtype of the second.
static bool canAssignObjCObjectTypes(ASTContext &ctx, QualType lhs,
QualType rhs) {
// Common case: two object pointers.
const auto *lhsOPT = lhs->getAs<ObjCObjectPointerType>();
const auto *rhsOPT = rhs->getAs<ObjCObjectPointerType>();
if (lhsOPT && rhsOPT)
return ctx.canAssignObjCInterfaces(lhsOPT, rhsOPT);
// Two block pointers.
const auto *lhsBlock = lhs->getAs<BlockPointerType>();
const auto *rhsBlock = rhs->getAs<BlockPointerType>();
if (lhsBlock && rhsBlock)
return ctx.typesAreBlockPointerCompatible(lhs, rhs);
// If either is an unqualified 'id' and the other is a block, it's
// acceptable.
if ((lhsOPT && lhsOPT->isObjCIdType() && rhsBlock) ||
(rhsOPT && rhsOPT->isObjCIdType() && lhsBlock))
return true;
return false;
}
// Check that the given Objective-C type argument lists are equivalent.
static bool sameObjCTypeArgs(ASTContext &ctx,
const ObjCInterfaceDecl *iface,
ArrayRef<QualType> lhsArgs,
ArrayRef<QualType> rhsArgs,
bool stripKindOf) {
if (lhsArgs.size() != rhsArgs.size())
return false;
ObjCTypeParamList *typeParams = iface->getTypeParamList();
for (unsigned i = 0, n = lhsArgs.size(); i != n; ++i) {
if (ctx.hasSameType(lhsArgs[i], rhsArgs[i]))
continue;
switch (typeParams->begin()[i]->getVariance()) {
case ObjCTypeParamVariance::Invariant:
if (!stripKindOf ||
!ctx.hasSameType(lhsArgs[i].stripObjCKindOfType(ctx),
rhsArgs[i].stripObjCKindOfType(ctx))) {
return false;
}
break;
case ObjCTypeParamVariance::Covariant:
if (!canAssignObjCObjectTypes(ctx, lhsArgs[i], rhsArgs[i]))
return false;
break;
case ObjCTypeParamVariance::Contravariant:
if (!canAssignObjCObjectTypes(ctx, rhsArgs[i], lhsArgs[i]))
return false;
break;
}
}
return true;
}
QualType ASTContext::areCommonBaseCompatible(
const ObjCObjectPointerType *Lptr,
const ObjCObjectPointerType *Rptr) {
const ObjCObjectType *LHS = Lptr->getObjectType();
const ObjCObjectType *RHS = Rptr->getObjectType();
const ObjCInterfaceDecl* LDecl = LHS->getInterface();
const ObjCInterfaceDecl* RDecl = RHS->getInterface();
if (!LDecl || !RDecl)
return {};
// When either LHS or RHS is a kindof type, we should return a kindof type.
// For example, for common base of kindof(ASub1) and kindof(ASub2), we return
// kindof(A).
bool anyKindOf = LHS->isKindOfType() || RHS->isKindOfType();
// Follow the left-hand side up the class hierarchy until we either hit a
// root or find the RHS. Record the ancestors in case we don't find it.
llvm::SmallDenseMap<const ObjCInterfaceDecl *, const ObjCObjectType *, 4>
LHSAncestors;
while (true) {
// Record this ancestor. We'll need this if the common type isn't in the
// path from the LHS to the root.
LHSAncestors[LHS->getInterface()->getCanonicalDecl()] = LHS;
if (declaresSameEntity(LHS->getInterface(), RDecl)) {
// Get the type arguments.
ArrayRef<QualType> LHSTypeArgs = LHS->getTypeArgsAsWritten();
bool anyChanges = false;
if (LHS->isSpecialized() && RHS->isSpecialized()) {
// Both have type arguments, compare them.
if (!sameObjCTypeArgs(*this, LHS->getInterface(),
LHS->getTypeArgs(), RHS->getTypeArgs(),
/*stripKindOf=*/true))
return {};
} else if (LHS->isSpecialized() != RHS->isSpecialized()) {
// If only one has type arguments, the result will not have type
// arguments.
LHSTypeArgs = {};
anyChanges = true;
}
// Compute the intersection of protocols.
SmallVector<ObjCProtocolDecl *, 8> Protocols;
getIntersectionOfProtocols(*this, LHS->getInterface(), Lptr, Rptr,
Protocols);
if (!Protocols.empty())
anyChanges = true;
// If anything in the LHS will have changed, build a new result type.
// If we need to return a kindof type but LHS is not a kindof type, we
// build a new result type.
if (anyChanges || LHS->isKindOfType() != anyKindOf) {
QualType Result = getObjCInterfaceType(LHS->getInterface());
Result = getObjCObjectType(Result, LHSTypeArgs, Protocols,
anyKindOf || LHS->isKindOfType());
return getObjCObjectPointerType(Result);
}
return getObjCObjectPointerType(QualType(LHS, 0));
}
// Find the superclass.
QualType LHSSuperType = LHS->getSuperClassType();
if (LHSSuperType.isNull())
break;
LHS = LHSSuperType->castAs<ObjCObjectType>();
}
// We didn't find anything by following the LHS to its root; now check
// the RHS against the cached set of ancestors.
while (true) {
auto KnownLHS = LHSAncestors.find(RHS->getInterface()->getCanonicalDecl());
if (KnownLHS != LHSAncestors.end()) {
LHS = KnownLHS->second;
// Get the type arguments.
ArrayRef<QualType> RHSTypeArgs = RHS->getTypeArgsAsWritten();
bool anyChanges = false;
if (LHS->isSpecialized() && RHS->isSpecialized()) {
// Both have type arguments, compare them.
if (!sameObjCTypeArgs(*this, LHS->getInterface(),
LHS->getTypeArgs(), RHS->getTypeArgs(),
/*stripKindOf=*/true))
return {};
} else if (LHS->isSpecialized() != RHS->isSpecialized()) {
// If only one has type arguments, the result will not have type
// arguments.
RHSTypeArgs = {};
anyChanges = true;
}
// Compute the intersection of protocols.
SmallVector<ObjCProtocolDecl *, 8> Protocols;
getIntersectionOfProtocols(*this, RHS->getInterface(), Lptr, Rptr,
Protocols);
if (!Protocols.empty())
anyChanges = true;
// If we need to return a kindof type but RHS is not a kindof type, we
// build a new result type.
if (anyChanges || RHS->isKindOfType() != anyKindOf) {
QualType Result = getObjCInterfaceType(RHS->getInterface());
Result = getObjCObjectType(Result, RHSTypeArgs, Protocols,
anyKindOf || RHS->isKindOfType());
return getObjCObjectPointerType(Result);
}
return getObjCObjectPointerType(QualType(RHS, 0));
}
// Find the superclass of the RHS.
QualType RHSSuperType = RHS->getSuperClassType();
if (RHSSuperType.isNull())
break;
RHS = RHSSuperType->castAs<ObjCObjectType>();
}
return {};
}
bool ASTContext::canAssignObjCInterfaces(const ObjCObjectType *LHS,
const ObjCObjectType *RHS) {
assert(LHS->getInterface() && "LHS is not an interface type");
assert(RHS->getInterface() && "RHS is not an interface type");
// Verify that the base decls are compatible: the RHS must be a subclass of
// the LHS.
ObjCInterfaceDecl *LHSInterface = LHS->getInterface();
bool IsSuperClass = LHSInterface->isSuperClassOf(RHS->getInterface());
if (!IsSuperClass)
return false;
// If the LHS has protocol qualifiers, determine whether all of them are
// satisfied by the RHS (i.e., the RHS has a superset of the protocols in the
// LHS).
if (LHS->getNumProtocols() > 0) {
// OK if conversion of LHS to SuperClass results in narrowing of types
// ; i.e., SuperClass may implement at least one of the protocols
// in LHS's protocol list. Example, SuperObj<P1> = lhs<P1,P2> is ok.
// But not SuperObj<P1,P2,P3> = lhs<P1,P2>.
llvm::SmallPtrSet<ObjCProtocolDecl *, 8> SuperClassInheritedProtocols;
CollectInheritedProtocols(RHS->getInterface(), SuperClassInheritedProtocols);
// Also, if RHS has explicit quelifiers, include them for comparing with LHS's
// qualifiers.
for (auto *RHSPI : RHS->quals())
CollectInheritedProtocols(RHSPI, SuperClassInheritedProtocols);
// If there is no protocols associated with RHS, it is not a match.
if (SuperClassInheritedProtocols.empty())
return false;
for (const auto *LHSProto : LHS->quals()) {
bool SuperImplementsProtocol = false;
for (auto *SuperClassProto : SuperClassInheritedProtocols)
if (SuperClassProto->lookupProtocolNamed(LHSProto->getIdentifier())) {
SuperImplementsProtocol = true;
break;
}
if (!SuperImplementsProtocol)
return false;
}
}
// If the LHS is specialized, we may need to check type arguments.
if (LHS->isSpecialized()) {
// Follow the superclass chain until we've matched the LHS class in the
// hierarchy. This substitutes type arguments through.
const ObjCObjectType *RHSSuper = RHS;
while (!declaresSameEntity(RHSSuper->getInterface(), LHSInterface))
RHSSuper = RHSSuper->getSuperClassType()->castAs<ObjCObjectType>();
// If the RHS is specializd, compare type arguments.
if (RHSSuper->isSpecialized() &&
!sameObjCTypeArgs(*this, LHS->getInterface(),
LHS->getTypeArgs(), RHSSuper->getTypeArgs(),
/*stripKindOf=*/true)) {
return false;
}
}
return true;
}
bool ASTContext::areComparableObjCPointerTypes(QualType LHS, QualType RHS) {
// get the "pointed to" types
const auto *LHSOPT = LHS->getAs<ObjCObjectPointerType>();
const auto *RHSOPT = RHS->getAs<ObjCObjectPointerType>();
if (!LHSOPT || !RHSOPT)
return false;
return canAssignObjCInterfaces(LHSOPT, RHSOPT) ||
canAssignObjCInterfaces(RHSOPT, LHSOPT);
}
bool ASTContext::canBindObjCObjectType(QualType To, QualType From) {
return canAssignObjCInterfaces(
getObjCObjectPointerType(To)->getAs<ObjCObjectPointerType>(),
getObjCObjectPointerType(From)->getAs<ObjCObjectPointerType>());
}
/// typesAreCompatible - C99 6.7.3p9: For two qualified types to be compatible,
/// both shall have the identically qualified version of a compatible type.
/// C99 6.2.7p1: Two types have compatible types if their types are the
/// same. See 6.7.[2,3,5] for additional rules.
bool ASTContext::typesAreCompatible(QualType LHS, QualType RHS,
bool CompareUnqualified) {
if (getLangOpts().CPlusPlus)
return hasSameType(LHS, RHS);
return !mergeTypes(LHS, RHS, false, CompareUnqualified).isNull();
}
bool ASTContext::propertyTypesAreCompatible(QualType LHS, QualType RHS) {
return typesAreCompatible(LHS, RHS);
}
bool ASTContext::typesAreBlockPointerCompatible(QualType LHS, QualType RHS) {
return !mergeTypes(LHS, RHS, true).isNull();
}
/// mergeTransparentUnionType - if T is a transparent union type and a member
/// of T is compatible with SubType, return the merged type, else return
/// QualType()
QualType ASTContext::mergeTransparentUnionType(QualType T, QualType SubType,
bool OfBlockPointer,
bool Unqualified) {
if (const RecordType *UT = T->getAsUnionType()) {
RecordDecl *UD = UT->getDecl();
if (UD->hasAttr<TransparentUnionAttr>()) {
for (const auto *I : UD->fields()) {
QualType ET = I->getType().getUnqualifiedType();
QualType MT = mergeTypes(ET, SubType, OfBlockPointer, Unqualified);
if (!MT.isNull())
return MT;
}
}
}
return {};
}
/// mergeFunctionParameterTypes - merge two types which appear as function
/// parameter types
QualType ASTContext::mergeFunctionParameterTypes(QualType lhs, QualType rhs,
bool OfBlockPointer,
bool Unqualified) {
// GNU extension: two types are compatible if they appear as a function
// argument, one of the types is a transparent union type and the other
// type is compatible with a union member
QualType lmerge = mergeTransparentUnionType(lhs, rhs, OfBlockPointer,
Unqualified);
if (!lmerge.isNull())
return lmerge;
QualType rmerge = mergeTransparentUnionType(rhs, lhs, OfBlockPointer,
Unqualified);
if (!rmerge.isNull())
return rmerge;
return mergeTypes(lhs, rhs, OfBlockPointer, Unqualified);
}
QualType ASTContext::mergeFunctionTypes(QualType lhs, QualType rhs,
bool OfBlockPointer,
bool Unqualified) {
const auto *lbase = lhs->castAs<FunctionType>();
const auto *rbase = rhs->castAs<FunctionType>();
const auto *lproto = dyn_cast<FunctionProtoType>(lbase);
const auto *rproto = dyn_cast<FunctionProtoType>(rbase);
bool allLTypes = true;
bool allRTypes = true;
// Check return type
QualType retType;
if (OfBlockPointer) {
QualType RHS = rbase->getReturnType();
QualType LHS = lbase->getReturnType();
bool UnqualifiedResult = Unqualified;
if (!UnqualifiedResult)
UnqualifiedResult = (!RHS.hasQualifiers() && LHS.hasQualifiers());
retType = mergeTypes(LHS, RHS, true, UnqualifiedResult, true);
}
else
retType = mergeTypes(lbase->getReturnType(), rbase->getReturnType(), false,
Unqualified);
if (retType.isNull())
return {};
if (Unqualified)
retType = retType.getUnqualifiedType();
CanQualType LRetType = getCanonicalType(lbase->getReturnType());
CanQualType RRetType = getCanonicalType(rbase->getReturnType());
if (Unqualified) {
LRetType = LRetType.getUnqualifiedType();
RRetType = RRetType.getUnqualifiedType();
}
if (getCanonicalType(retType) != LRetType)
allLTypes = false;
if (getCanonicalType(retType) != RRetType)
allRTypes = false;
// FIXME: double check this
// FIXME: should we error if lbase->getRegParmAttr() != 0 &&
// rbase->getRegParmAttr() != 0 &&
// lbase->getRegParmAttr() != rbase->getRegParmAttr()?
FunctionType::ExtInfo lbaseInfo = lbase->getExtInfo();
FunctionType::ExtInfo rbaseInfo = rbase->getExtInfo();
// Compatible functions must have compatible calling conventions
if (lbaseInfo.getCC() != rbaseInfo.getCC())
return {};
// Regparm is part of the calling convention.
if (lbaseInfo.getHasRegParm() != rbaseInfo.getHasRegParm())
return {};
if (lbaseInfo.getRegParm() != rbaseInfo.getRegParm())
return {};
if (lbaseInfo.getProducesResult() != rbaseInfo.getProducesResult())
return {};
if (lbaseInfo.getNoCallerSavedRegs() != rbaseInfo.getNoCallerSavedRegs())
return {};
if (lbaseInfo.getNoCfCheck() != rbaseInfo.getNoCfCheck())
return {};
// FIXME: some uses, e.g. conditional exprs, really want this to be 'both'.
bool NoReturn = lbaseInfo.getNoReturn() || rbaseInfo.getNoReturn();
if (lbaseInfo.getNoReturn() != NoReturn)
allLTypes = false;
if (rbaseInfo.getNoReturn() != NoReturn)
allRTypes = false;
FunctionType::ExtInfo einfo = lbaseInfo.withNoReturn(NoReturn);
if (lproto && rproto) { // two C99 style function prototypes
assert(!lproto->hasExceptionSpec() && !rproto->hasExceptionSpec() &&
"C++ shouldn't be here");
// Compatible functions must have the same number of parameters
if (lproto->getNumParams() != rproto->getNumParams())
return {};
// Variadic and non-variadic functions aren't compatible
if (lproto->isVariadic() != rproto->isVariadic())
return {};
if (lproto->getMethodQuals() != rproto->getMethodQuals())
return {};
SmallVector<FunctionProtoType::ExtParameterInfo, 4> newParamInfos;
bool canUseLeft, canUseRight;
if (!mergeExtParameterInfo(lproto, rproto, canUseLeft, canUseRight,
newParamInfos))
return {};
if (!canUseLeft)
allLTypes = false;
if (!canUseRight)
allRTypes = false;
// Check parameter type compatibility
SmallVector<QualType, 10> types;
for (unsigned i = 0, n = lproto->getNumParams(); i < n; i++) {
QualType lParamType = lproto->getParamType(i).getUnqualifiedType();
QualType rParamType = rproto->getParamType(i).getUnqualifiedType();
QualType paramType = mergeFunctionParameterTypes(
lParamType, rParamType, OfBlockPointer, Unqualified);
if (paramType.isNull())
return {};
if (Unqualified)
paramType = paramType.getUnqualifiedType();
types.push_back(paramType);
if (Unqualified) {
lParamType = lParamType.getUnqualifiedType();
rParamType = rParamType.getUnqualifiedType();
}
if (getCanonicalType(paramType) != getCanonicalType(lParamType))
allLTypes = false;
if (getCanonicalType(paramType) != getCanonicalType(rParamType))
allRTypes = false;
}
if (allLTypes) return lhs;
if (allRTypes) return rhs;
FunctionProtoType::ExtProtoInfo EPI = lproto->getExtProtoInfo();
EPI.ExtInfo = einfo;
EPI.ExtParameterInfos =
newParamInfos.empty() ? nullptr : newParamInfos.data();
return getFunctionType(retType, types, EPI);
}
if (lproto) allRTypes = false;
if (rproto) allLTypes = false;
const FunctionProtoType *proto = lproto ? lproto : rproto;
if (proto) {
assert(!proto->hasExceptionSpec() && "C++ shouldn't be here");
if (proto->isVariadic())
return {};
// Check that the types are compatible with the types that
// would result from default argument promotions (C99 6.7.5.3p15).
// The only types actually affected are promotable integer
// types and floats, which would be passed as a different
// type depending on whether the prototype is visible.
for (unsigned i = 0, n = proto->getNumParams(); i < n; ++i) {
QualType paramTy = proto->getParamType(i);
// Look at the converted type of enum types, since that is the type used
// to pass enum values.
if (const auto *Enum = paramTy->getAs<EnumType>()) {
paramTy = Enum->getDecl()->getIntegerType();
if (paramTy.isNull())
return {};
}
if (paramTy->isPromotableIntegerType() ||
getCanonicalType(paramTy).getUnqualifiedType() == FloatTy)
return {};
}
if (allLTypes) return lhs;
if (allRTypes) return rhs;
FunctionProtoType::ExtProtoInfo EPI = proto->getExtProtoInfo();
EPI.ExtInfo = einfo;
return getFunctionType(retType, proto->getParamTypes(), EPI);
}
if (allLTypes) return lhs;
if (allRTypes) return rhs;
return getFunctionNoProtoType(retType, einfo);
}
/// Given that we have an enum type and a non-enum type, try to merge them.
static QualType mergeEnumWithInteger(ASTContext &Context, const EnumType *ET,
QualType other, bool isBlockReturnType) {
// C99 6.7.2.2p4: Each enumerated type shall be compatible with char,
// a signed integer type, or an unsigned integer type.
// Compatibility is based on the underlying type, not the promotion
// type.
QualType underlyingType = ET->getDecl()->getIntegerType();
if (underlyingType.isNull())
return {};
if (Context.hasSameType(underlyingType, other))
return other;
// In block return types, we're more permissive and accept any
// integral type of the same size.
if (isBlockReturnType && other->isIntegerType() &&
Context.getTypeSize(underlyingType) == Context.getTypeSize(other))
return other;
return {};
}
QualType ASTContext::mergeTypes(QualType LHS, QualType RHS,
bool OfBlockPointer,
bool Unqualified, bool BlockReturnType) {
// C++ [expr]: If an expression initially has the type "reference to T", the
// type is adjusted to "T" prior to any further analysis, the expression
// designates the object or function denoted by the reference, and the
// expression is an lvalue unless the reference is an rvalue reference and
// the expression is a function call (possibly inside parentheses).
assert(!LHS->getAs<ReferenceType>() && "LHS is a reference type?");
assert(!RHS->getAs<ReferenceType>() && "RHS is a reference type?");
if (Unqualified) {
LHS = LHS.getUnqualifiedType();
RHS = RHS.getUnqualifiedType();
}
QualType LHSCan = getCanonicalType(LHS),
RHSCan = getCanonicalType(RHS);
// If two types are identical, they are compatible.
if (LHSCan == RHSCan)
return LHS;
// If the qualifiers are different, the types aren't compatible... mostly.
Qualifiers LQuals = LHSCan.getLocalQualifiers();
Qualifiers RQuals = RHSCan.getLocalQualifiers();
if (LQuals != RQuals) {
// If any of these qualifiers are different, we have a type
// mismatch.
if (LQuals.getCVRQualifiers() != RQuals.getCVRQualifiers() ||
LQuals.getAddressSpace() != RQuals.getAddressSpace() ||
LQuals.getObjCLifetime() != RQuals.getObjCLifetime() ||
LQuals.hasUnaligned() != RQuals.hasUnaligned())
return {};
// Exactly one GC qualifier difference is allowed: __strong is
// okay if the other type has no GC qualifier but is an Objective
// C object pointer (i.e. implicitly strong by default). We fix
// this by pretending that the unqualified type was actually
// qualified __strong.
Qualifiers::GC GC_L = LQuals.getObjCGCAttr();
Qualifiers::GC GC_R = RQuals.getObjCGCAttr();
assert((GC_L != GC_R) && "unequal qualifier sets had only equal elements");
if (GC_L == Qualifiers::Weak || GC_R == Qualifiers::Weak)
return {};
if (GC_L == Qualifiers::Strong && RHSCan->isObjCObjectPointerType()) {
return mergeTypes(LHS, getObjCGCQualType(RHS, Qualifiers::Strong));
}
if (GC_R == Qualifiers::Strong && LHSCan->isObjCObjectPointerType()) {
return mergeTypes(getObjCGCQualType(LHS, Qualifiers::Strong), RHS);
}
return {};
}
// Okay, qualifiers are equal.
Type::TypeClass LHSClass = LHSCan->getTypeClass();
Type::TypeClass RHSClass = RHSCan->getTypeClass();
// We want to consider the two function types to be the same for these
// comparisons, just force one to the other.
if (LHSClass == Type::FunctionProto) LHSClass = Type::FunctionNoProto;
if (RHSClass == Type::FunctionProto) RHSClass = Type::FunctionNoProto;
// Same as above for arrays
if (LHSClass == Type::VariableArray || LHSClass == Type::IncompleteArray)
LHSClass = Type::ConstantArray;
if (RHSClass == Type::VariableArray || RHSClass == Type::IncompleteArray)
RHSClass = Type::ConstantArray;
// ObjCInterfaces are just specialized ObjCObjects.
if (LHSClass == Type::ObjCInterface) LHSClass = Type::ObjCObject;
if (RHSClass == Type::ObjCInterface) RHSClass = Type::ObjCObject;
// Canonicalize ExtVector -> Vector.
if (LHSClass == Type::ExtVector) LHSClass = Type::Vector;
if (RHSClass == Type::ExtVector) RHSClass = Type::Vector;
// If the canonical type classes don't match.
if (LHSClass != RHSClass) {
// Note that we only have special rules for turning block enum
// returns into block int returns, not vice-versa.
if (const auto *ETy = LHS->getAs<EnumType>()) {
return mergeEnumWithInteger(*this, ETy, RHS, false);
}
if (const EnumType* ETy = RHS->getAs<EnumType>()) {
return mergeEnumWithInteger(*this, ETy, LHS, BlockReturnType);
}
// allow block pointer type to match an 'id' type.
if (OfBlockPointer && !BlockReturnType) {
if (LHS->isObjCIdType() && RHS->isBlockPointerType())
return LHS;
if (RHS->isObjCIdType() && LHS->isBlockPointerType())
return RHS;
}
return {};
}
// The canonical type classes match.
switch (LHSClass) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.inc"
llvm_unreachable("Non-canonical and dependent types shouldn't get here");
case Type::Auto:
case Type::DeducedTemplateSpecialization:
case Type::LValueReference:
case Type::RValueReference:
case Type::MemberPointer:
llvm_unreachable("C++ should never be in mergeTypes");
case Type::ObjCInterface:
case Type::IncompleteArray:
case Type::VariableArray:
case Type::FunctionProto:
case Type::ExtVector:
llvm_unreachable("Types are eliminated above");
case Type::Pointer:
{
// Merge two pointer types, while trying to preserve typedef info
QualType LHSPointee = LHS->castAs<PointerType>()->getPointeeType();
QualType RHSPointee = RHS->castAs<PointerType>()->getPointeeType();
if (Unqualified) {
LHSPointee = LHSPointee.getUnqualifiedType();
RHSPointee = RHSPointee.getUnqualifiedType();
}
QualType ResultType = mergeTypes(LHSPointee, RHSPointee, false,
Unqualified);
if (ResultType.isNull())
return {};
if (getCanonicalType(LHSPointee) == getCanonicalType(ResultType))
return LHS;
if (getCanonicalType(RHSPointee) == getCanonicalType(ResultType))
return RHS;
return getPointerType(ResultType);
}
case Type::BlockPointer:
{
// Merge two block pointer types, while trying to preserve typedef info
QualType LHSPointee = LHS->castAs<BlockPointerType>()->getPointeeType();
QualType RHSPointee = RHS->castAs<BlockPointerType>()->getPointeeType();
if (Unqualified) {
LHSPointee = LHSPointee.getUnqualifiedType();
RHSPointee = RHSPointee.getUnqualifiedType();
}
if (getLangOpts().OpenCL) {
Qualifiers LHSPteeQual = LHSPointee.getQualifiers();
Qualifiers RHSPteeQual = RHSPointee.getQualifiers();
// Blocks can't be an expression in a ternary operator (OpenCL v2.0
// 6.12.5) thus the following check is asymmetric.
if (!LHSPteeQual.isAddressSpaceSupersetOf(RHSPteeQual))
return {};
LHSPteeQual.removeAddressSpace();
RHSPteeQual.removeAddressSpace();
LHSPointee =
QualType(LHSPointee.getTypePtr(), LHSPteeQual.getAsOpaqueValue());
RHSPointee =
QualType(RHSPointee.getTypePtr(), RHSPteeQual.getAsOpaqueValue());
}
QualType ResultType = mergeTypes(LHSPointee, RHSPointee, OfBlockPointer,
Unqualified);
if (ResultType.isNull())
return {};
if (getCanonicalType(LHSPointee) == getCanonicalType(ResultType))
return LHS;
if (getCanonicalType(RHSPointee) == getCanonicalType(ResultType))
return RHS;
return getBlockPointerType(ResultType);
}
case Type::Atomic:
{
// Merge two pointer types, while trying to preserve typedef info
QualType LHSValue = LHS->castAs<AtomicType>()->getValueType();
QualType RHSValue = RHS->castAs<AtomicType>()->getValueType();
if (Unqualified) {
LHSValue = LHSValue.getUnqualifiedType();
RHSValue = RHSValue.getUnqualifiedType();
}
QualType ResultType = mergeTypes(LHSValue, RHSValue, false,
Unqualified);
if (ResultType.isNull())
return {};
if (getCanonicalType(LHSValue) == getCanonicalType(ResultType))
return LHS;
if (getCanonicalType(RHSValue) == getCanonicalType(ResultType))
return RHS;
return getAtomicType(ResultType);
}
case Type::ConstantArray:
{
const ConstantArrayType* LCAT = getAsConstantArrayType(LHS);
const ConstantArrayType* RCAT = getAsConstantArrayType(RHS);
if (LCAT && RCAT && RCAT->getSize() != LCAT->getSize())
return {};
QualType LHSElem = getAsArrayType(LHS)->getElementType();
QualType RHSElem = getAsArrayType(RHS)->getElementType();
if (Unqualified) {
LHSElem = LHSElem.getUnqualifiedType();
RHSElem = RHSElem.getUnqualifiedType();
}
QualType ResultType = mergeTypes(LHSElem, RHSElem, false, Unqualified);
if (ResultType.isNull())
return {};
const VariableArrayType* LVAT = getAsVariableArrayType(LHS);
const VariableArrayType* RVAT = getAsVariableArrayType(RHS);
// If either side is a variable array, and both are complete, check whether
// the current dimension is definite.
if (LVAT || RVAT) {
auto SizeFetch = [this](const VariableArrayType* VAT,
const ConstantArrayType* CAT)
-> std::pair<bool,llvm::APInt> {
if (VAT) {
llvm::APSInt TheInt;
Expr *E = VAT->getSizeExpr();
if (E && E->isIntegerConstantExpr(TheInt, *this))
return std::make_pair(true, TheInt);
else
return std::make_pair(false, TheInt);
} else if (CAT) {
return std::make_pair(true, CAT->getSize());
} else {
return std::make_pair(false, llvm::APInt());
}
};
bool HaveLSize, HaveRSize;
llvm::APInt LSize, RSize;
std::tie(HaveLSize, LSize) = SizeFetch(LVAT, LCAT);
std::tie(HaveRSize, RSize) = SizeFetch(RVAT, RCAT);
if (HaveLSize && HaveRSize && !llvm::APInt::isSameValue(LSize, RSize))
return {}; // Definite, but unequal, array dimension
}
if (LCAT && getCanonicalType(LHSElem) == getCanonicalType(ResultType))
return LHS;
if (RCAT && getCanonicalType(RHSElem) == getCanonicalType(ResultType))
return RHS;
if (LCAT)
return getConstantArrayType(ResultType, LCAT->getSize(),
LCAT->getSizeExpr(),
ArrayType::ArraySizeModifier(), 0);
if (RCAT)
return getConstantArrayType(ResultType, RCAT->getSize(),
RCAT->getSizeExpr(),
ArrayType::ArraySizeModifier(), 0);
if (LVAT && getCanonicalType(LHSElem) == getCanonicalType(ResultType))
return LHS;
if (RVAT && getCanonicalType(RHSElem) == getCanonicalType(ResultType))
return RHS;
if (LVAT) {
// FIXME: This isn't correct! But tricky to implement because
// the array's size has to be the size of LHS, but the type
// has to be different.
return LHS;
}
if (RVAT) {
// FIXME: This isn't correct! But tricky to implement because
// the array's size has to be the size of RHS, but the type
// has to be different.
return RHS;
}
if (getCanonicalType(LHSElem) == getCanonicalType(ResultType)) return LHS;
if (getCanonicalType(RHSElem) == getCanonicalType(ResultType)) return RHS;
return getIncompleteArrayType(ResultType,
ArrayType::ArraySizeModifier(), 0);
}
case Type::FunctionNoProto:
return mergeFunctionTypes(LHS, RHS, OfBlockPointer, Unqualified);
case Type::Record:
case Type::Enum:
return {};
case Type::Builtin:
// Only exactly equal builtin types are compatible, which is tested above.
return {};
case Type::Complex:
// Distinct complex types are incompatible.
return {};
case Type::Vector:
// FIXME: The merged type should be an ExtVector!
if (areCompatVectorTypes(LHSCan->castAs<VectorType>(),
RHSCan->castAs<VectorType>()))
return LHS;
return {};
case Type::ObjCObject: {
// Check if the types are assignment compatible.
// FIXME: This should be type compatibility, e.g. whether
// "LHS x; RHS x;" at global scope is legal.
if (canAssignObjCInterfaces(LHS->castAs<ObjCObjectType>(),
RHS->castAs<ObjCObjectType>()))
return LHS;
return {};
}
case Type::ObjCObjectPointer:
if (OfBlockPointer) {
if (canAssignObjCInterfacesInBlockPointer(
LHS->castAs<ObjCObjectPointerType>(),
RHS->castAs<ObjCObjectPointerType>(), BlockReturnType))
return LHS;
return {};
}
if (canAssignObjCInterfaces(LHS->castAs<ObjCObjectPointerType>(),
RHS->castAs<ObjCObjectPointerType>()))
return LHS;
return {};
case Type::Pipe:
assert(LHS != RHS &&
"Equivalent pipe types should have already been handled!");
return {};
}
llvm_unreachable("Invalid Type::Class!");
}
bool ASTContext::mergeExtParameterInfo(
const FunctionProtoType *FirstFnType, const FunctionProtoType *SecondFnType,
bool &CanUseFirst, bool &CanUseSecond,
SmallVectorImpl<FunctionProtoType::ExtParameterInfo> &NewParamInfos) {
assert(NewParamInfos.empty() && "param info list not empty");
CanUseFirst = CanUseSecond = true;
bool FirstHasInfo = FirstFnType->hasExtParameterInfos();
bool SecondHasInfo = SecondFnType->hasExtParameterInfos();
// Fast path: if the first type doesn't have ext parameter infos,
// we match if and only if the second type also doesn't have them.
if (!FirstHasInfo && !SecondHasInfo)
return true;
bool NeedParamInfo = false;
size_t E = FirstHasInfo ? FirstFnType->getExtParameterInfos().size()
: SecondFnType->getExtParameterInfos().size();
for (size_t I = 0; I < E; ++I) {
FunctionProtoType::ExtParameterInfo FirstParam, SecondParam;
if (FirstHasInfo)
FirstParam = FirstFnType->getExtParameterInfo(I);
if (SecondHasInfo)
SecondParam = SecondFnType->getExtParameterInfo(I);
// Cannot merge unless everything except the noescape flag matches.
if (FirstParam.withIsNoEscape(false) != SecondParam.withIsNoEscape(false))
return false;
bool FirstNoEscape = FirstParam.isNoEscape();
bool SecondNoEscape = SecondParam.isNoEscape();
bool IsNoEscape = FirstNoEscape && SecondNoEscape;
NewParamInfos.push_back(FirstParam.withIsNoEscape(IsNoEscape));
if (NewParamInfos.back().getOpaqueValue())
NeedParamInfo = true;
if (FirstNoEscape != IsNoEscape)
CanUseFirst = false;
if (SecondNoEscape != IsNoEscape)
CanUseSecond = false;
}
if (!NeedParamInfo)
NewParamInfos.clear();
return true;
}
void ASTContext::ResetObjCLayout(const ObjCContainerDecl *CD) {
ObjCLayouts[CD] = nullptr;
}
/// mergeObjCGCQualifiers - This routine merges ObjC's GC attribute of 'LHS' and
/// 'RHS' attributes and returns the merged version; including for function
/// return types.
QualType ASTContext::mergeObjCGCQualifiers(QualType LHS, QualType RHS) {
QualType LHSCan = getCanonicalType(LHS),
RHSCan = getCanonicalType(RHS);
// If two types are identical, they are compatible.
if (LHSCan == RHSCan)
return LHS;
if (RHSCan->isFunctionType()) {
if (!LHSCan->isFunctionType())
return {};
QualType OldReturnType =
cast<FunctionType>(RHSCan.getTypePtr())->getReturnType();
QualType NewReturnType =
cast<FunctionType>(LHSCan.getTypePtr())->getReturnType();
QualType ResReturnType =
mergeObjCGCQualifiers(NewReturnType, OldReturnType);
if (ResReturnType.isNull())
return {};
if (ResReturnType == NewReturnType || ResReturnType == OldReturnType) {
// id foo(); ... __strong id foo(); or: __strong id foo(); ... id foo();
// In either case, use OldReturnType to build the new function type.
const auto *F = LHS->castAs<FunctionType>();
if (const auto *FPT = cast<FunctionProtoType>(F)) {
FunctionProtoType::ExtProtoInfo EPI = FPT->getExtProtoInfo();
EPI.ExtInfo = getFunctionExtInfo(LHS);
QualType ResultType =
getFunctionType(OldReturnType, FPT->getParamTypes(), EPI);
return ResultType;
}
}
return {};
}
// If the qualifiers are different, the types can still be merged.
Qualifiers LQuals = LHSCan.getLocalQualifiers();
Qualifiers RQuals = RHSCan.getLocalQualifiers();
if (LQuals != RQuals) {
// If any of these qualifiers are different, we have a type mismatch.
if (LQuals.getCVRQualifiers() != RQuals.getCVRQualifiers() ||
LQuals.getAddressSpace() != RQuals.getAddressSpace())
return {};
// Exactly one GC qualifier difference is allowed: __strong is
// okay if the other type has no GC qualifier but is an Objective
// C object pointer (i.e. implicitly strong by default). We fix
// this by pretending that the unqualified type was actually
// qualified __strong.
Qualifiers::GC GC_L = LQuals.getObjCGCAttr();
Qualifiers::GC GC_R = RQuals.getObjCGCAttr();
assert((GC_L != GC_R) && "unequal qualifier sets had only equal elements");
if (GC_L == Qualifiers::Weak || GC_R == Qualifiers::Weak)
return {};
if (GC_L == Qualifiers::Strong)
return LHS;
if (GC_R == Qualifiers::Strong)
return RHS;
return {};
}
if (LHSCan->isObjCObjectPointerType() && RHSCan->isObjCObjectPointerType()) {
QualType LHSBaseQT = LHS->castAs<ObjCObjectPointerType>()->getPointeeType();
QualType RHSBaseQT = RHS->castAs<ObjCObjectPointerType>()->getPointeeType();
QualType ResQT = mergeObjCGCQualifiers(LHSBaseQT, RHSBaseQT);
if (ResQT == LHSBaseQT)
return LHS;
if (ResQT == RHSBaseQT)
return RHS;
}
return {};
}
//===----------------------------------------------------------------------===//
// Integer Predicates
//===----------------------------------------------------------------------===//
unsigned ASTContext::getIntWidth(QualType T) const {
if (const auto *ET = T->getAs<EnumType>())
T = ET->getDecl()->getIntegerType();
if (T->isBooleanType())
return 1;
// For builtin types, just use the standard type sizing method
return (unsigned)getTypeSize(T);
}
QualType ASTContext::getCorrespondingUnsignedType(QualType T) const {
assert((T->hasSignedIntegerRepresentation() || T->isSignedFixedPointType()) &&
"Unexpected type");
// Turn <4 x signed int> -> <4 x unsigned int>
if (const auto *VTy = T->getAs<VectorType>())
return getVectorType(getCorrespondingUnsignedType(VTy->getElementType()),
VTy->getNumElements(), VTy->getVectorKind());
// For enums, we return the unsigned version of the base type.
if (const auto *ETy = T->getAs<EnumType>())
T = ETy->getDecl()->getIntegerType();
switch (T->castAs<BuiltinType>()->getKind()) {
case BuiltinType::Char_S:
case BuiltinType::SChar:
return UnsignedCharTy;
case BuiltinType::Short:
return UnsignedShortTy;
case BuiltinType::Int:
return UnsignedIntTy;
case BuiltinType::Long:
return UnsignedLongTy;
case BuiltinType::LongLong:
return UnsignedLongLongTy;
case BuiltinType::Int128:
return UnsignedInt128Ty;
case BuiltinType::ShortAccum:
return UnsignedShortAccumTy;
case BuiltinType::Accum:
return UnsignedAccumTy;
case BuiltinType::LongAccum:
return UnsignedLongAccumTy;
case BuiltinType::SatShortAccum:
return SatUnsignedShortAccumTy;
case BuiltinType::SatAccum:
return SatUnsignedAccumTy;
case BuiltinType::SatLongAccum:
return SatUnsignedLongAccumTy;
case BuiltinType::ShortFract:
return UnsignedShortFractTy;
case BuiltinType::Fract:
return UnsignedFractTy;
case BuiltinType::LongFract:
return UnsignedLongFractTy;
case BuiltinType::SatShortFract:
return SatUnsignedShortFractTy;
case BuiltinType::SatFract:
return SatUnsignedFractTy;
case BuiltinType::SatLongFract:
return SatUnsignedLongFractTy;
default:
llvm_unreachable("Unexpected signed integer or fixed point type");
}
}
ASTMutationListener::~ASTMutationListener() = default;
void ASTMutationListener::DeducedReturnType(const FunctionDecl *FD,
QualType ReturnType) {}
//===----------------------------------------------------------------------===//
// Builtin Type Computation
//===----------------------------------------------------------------------===//
/// DecodeTypeFromStr - This decodes one type descriptor from Str, advancing the
/// pointer over the consumed characters. This returns the resultant type. If
/// AllowTypeModifiers is false then modifier like * are not parsed, just basic
/// types. This allows "v2i*" to be parsed as a pointer to a v2i instead of
/// a vector of "i*".
///
/// RequiresICE is filled in on return to indicate whether the value is required
/// to be an Integer Constant Expression.
static QualType DecodeTypeFromStr(const char *&Str, const ASTContext &Context,
ASTContext::GetBuiltinTypeError &Error,
bool &RequiresICE,
bool AllowTypeModifiers) {
// Modifiers.
int HowLong = 0;
bool Signed = false, Unsigned = false;
RequiresICE = false;
// Read the prefixed modifiers first.
bool Done = false;
#ifndef NDEBUG
bool IsSpecial = false;
#endif
while (!Done) {
switch (*Str++) {
default: Done = true; --Str; break;
case 'I':
RequiresICE = true;
break;
case 'S':
assert(!Unsigned && "Can't use both 'S' and 'U' modifiers!");
assert(!Signed && "Can't use 'S' modifier multiple times!");
Signed = true;
break;
case 'U':
assert(!Signed && "Can't use both 'S' and 'U' modifiers!");
assert(!Unsigned && "Can't use 'U' modifier multiple times!");
Unsigned = true;
break;
case 'L':
assert(!IsSpecial && "Can't use 'L' with 'W', 'N', 'Z' or 'O' modifiers");
assert(HowLong <= 2 && "Can't have LLLL modifier");
++HowLong;
break;
case 'N':
// 'N' behaves like 'L' for all non LP64 targets and 'int' otherwise.
assert(!IsSpecial && "Can't use two 'N', 'W', 'Z' or 'O' modifiers!");
assert(HowLong == 0 && "Can't use both 'L' and 'N' modifiers!");
#ifndef NDEBUG
IsSpecial = true;
#endif
if (Context.getTargetInfo().getLongWidth() == 32)
++HowLong;
break;
case 'W':
// This modifier represents int64 type.
assert(!IsSpecial && "Can't use two 'N', 'W', 'Z' or 'O' modifiers!");
assert(HowLong == 0 && "Can't use both 'L' and 'W' modifiers!");
#ifndef NDEBUG
IsSpecial = true;
#endif
switch (Context.getTargetInfo().getInt64Type()) {
default:
llvm_unreachable("Unexpected integer type");
case TargetInfo::SignedLong:
HowLong = 1;
break;
case TargetInfo::SignedLongLong:
HowLong = 2;
break;
}
break;
case 'Z':
// This modifier represents int32 type.
assert(!IsSpecial && "Can't use two 'N', 'W', 'Z' or 'O' modifiers!");
assert(HowLong == 0 && "Can't use both 'L' and 'Z' modifiers!");
#ifndef NDEBUG
IsSpecial = true;
#endif
switch (Context.getTargetInfo().getIntTypeByWidth(32, true)) {
default:
llvm_unreachable("Unexpected integer type");
case TargetInfo::SignedInt:
HowLong = 0;
break;
case TargetInfo::SignedLong:
HowLong = 1;
break;
case TargetInfo::SignedLongLong:
HowLong = 2;
break;
}
break;
case 'O':
assert(!IsSpecial && "Can't use two 'N', 'W', 'Z' or 'O' modifiers!");
assert(HowLong == 0 && "Can't use both 'L' and 'O' modifiers!");
#ifndef NDEBUG
IsSpecial = true;
#endif
if (Context.getLangOpts().OpenCL)
HowLong = 1;
else
HowLong = 2;
break;
}
}
QualType Type;
// Read the base type.
switch (*Str++) {
default: llvm_unreachable("Unknown builtin type letter!");
// Scaffold qbit type for builtin gates
case 'l':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'l'!");
Type = Context.CbitTy;
break;
case 'q':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'q'!");
Type = Context.QbitTy;
break;
case 'y':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'y'!");
Type = Context.QintTy;
break;
case 'v':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'v'!");
Type = Context.VoidTy;
break;
case 'h':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'h'!");
Type = Context.HalfTy;
break;
case 'f':
assert(HowLong == 0 && !Signed && !Unsigned &&
"Bad modifiers used with 'f'!");
Type = Context.FloatTy;
break;
case 'd':
assert(HowLong < 3 && !Signed && !Unsigned &&
"Bad modifiers used with 'd'!");
if (HowLong == 1)
Type = Context.LongDoubleTy;
else if (HowLong == 2)
Type = Context.Float128Ty;
else
Type = Context.DoubleTy;
break;
case 's':
assert(HowLong == 0 && "Bad modifiers used with 's'!");
if (Unsigned)
Type = Context.UnsignedShortTy;
else
Type = Context.ShortTy;
break;
case 'i':
if (HowLong == 3)
Type = Unsigned ? Context.UnsignedInt128Ty : Context.Int128Ty;
else if (HowLong == 2)
Type = Unsigned ? Context.UnsignedLongLongTy : Context.LongLongTy;
else if (HowLong == 1)
Type = Unsigned ? Context.UnsignedLongTy : Context.LongTy;
else
Type = Unsigned ? Context.UnsignedIntTy : Context.IntTy;
break;
case 'c':
assert(HowLong == 0 && "Bad modifiers used with 'c'!");
if (Signed)
Type = Context.SignedCharTy;
else if (Unsigned)
Type = Context.UnsignedCharTy;
else
Type = Context.CharTy;
break;
case 'b': // boolean
assert(HowLong == 0 && !Signed && !Unsigned && "Bad modifiers for 'b'!");
Type = Context.BoolTy;
break;
case 'z': // size_t.
assert(HowLong == 0 && !Signed && !Unsigned && "Bad modifiers for 'z'!");
Type = Context.getSizeType();
break;
case 'w': // wchar_t.
assert(HowLong == 0 && !Signed && !Unsigned && "Bad modifiers for 'w'!");
Type = Context.getWideCharType();
break;
case 'F':
Type = Context.getCFConstantStringType();
break;
case 'G':
Type = Context.getObjCIdType();
break;
case 'H':
Type = Context.getObjCSelType();
break;
case 'M':
Type = Context.getObjCSuperType();
break;
case 'a':
Type = Context.getBuiltinVaListType();
assert(!Type.isNull() && "builtin va list type not initialized!");
break;
case 'A':
// This is a "reference" to a va_list; however, what exactly
// this means depends on how va_list is defined. There are two
// different kinds of va_list: ones passed by value, and ones
// passed by reference. An example of a by-value va_list is
// x86, where va_list is a char*. An example of by-ref va_list
// is x86-64, where va_list is a __va_list_tag[1]. For x86,
// we want this argument to be a char*&; for x86-64, we want
// it to be a __va_list_tag*.
Type = Context.getBuiltinVaListType();
assert(!Type.isNull() && "builtin va list type not initialized!");
if (Type->isArrayType())
Type = Context.getArrayDecayedType(Type);
else
Type = Context.getLValueReferenceType(Type);
break;
case 'V': {
char *End;
unsigned NumElements = strtoul(Str, &End, 10);
assert(End != Str && "Missing vector size");
Str = End;
QualType ElementType = DecodeTypeFromStr(Str, Context, Error,
RequiresICE, false);
assert(!RequiresICE && "Can't require vector ICE");
// TODO: No way to make AltiVec vectors in builtins yet.
Type = Context.getVectorType(ElementType, NumElements,
VectorType::GenericVector);
break;
}
case 'E': {
char *End;
unsigned NumElements = strtoul(Str, &End, 10);
assert(End != Str && "Missing vector size");
Str = End;
QualType ElementType = DecodeTypeFromStr(Str, Context, Error, RequiresICE,
false);
Type = Context.getExtVectorType(ElementType, NumElements);
break;
}
case 'X': {
QualType ElementType = DecodeTypeFromStr(Str, Context, Error, RequiresICE,
false);
assert(!RequiresICE && "Can't require complex ICE");
Type = Context.getComplexType(ElementType);
break;
}
case 'Y':
Type = Context.getPointerDiffType();
break;
case 'P':
Type = Context.getFILEType();
if (Type.isNull()) {
Error = ASTContext::GE_Missing_stdio;
return {};
}
break;
case 'J':
if (Signed)
Type = Context.getsigjmp_bufType();
else
Type = Context.getjmp_bufType();
if (Type.isNull()) {
Error = ASTContext::GE_Missing_setjmp;
return {};
}
break;
case 'K':
assert(HowLong == 0 && !Signed && !Unsigned && "Bad modifiers for 'K'!");
Type = Context.getucontext_tType();
if (Type.isNull()) {
Error = ASTContext::GE_Missing_ucontext;
return {};
}
break;
case 'p':
Type = Context.getProcessIDType();
break;
}
// If there are modifiers and if we're allowed to parse them, go for it.
Done = !AllowTypeModifiers;
while (!Done) {
switch (char c = *Str++) {
default: Done = true; --Str; break;
case '*':
case '&': {
// Both pointers and references can have their pointee types
// qualified with an address space.
char *End;
unsigned AddrSpace = strtoul(Str, &End, 10);
if (End != Str) {
// Note AddrSpace == 0 is not the same as an unspecified address space.
Type = Context.getAddrSpaceQualType(
Type,
Context.getLangASForBuiltinAddressSpace(AddrSpace));
Str = End;
}
if (c == '*')
Type = Context.getPointerType(Type);
else
Type = Context.getLValueReferenceType(Type);
break;
}
// FIXME: There's no way to have a built-in with an rvalue ref arg.
case 'C':
Type = Type.withConst();
break;
case 'D':
Type = Context.getVolatileType(Type);
break;
case 'R':
Type = Type.withRestrict();
break;
}
}
assert((!RequiresICE || Type->isIntegralOrEnumerationType()) &&
"Integer constant 'I' type must be an integer");
return Type;
}
/// GetBuiltinType - Return the type for the specified builtin.
QualType ASTContext::GetBuiltinType(unsigned Id,
GetBuiltinTypeError &Error,
unsigned *IntegerConstantArgs) const {
const char *TypeStr = BuiltinInfo.getTypeString(Id);
if (TypeStr[0] == '\0') {
Error = GE_Missing_type;
return {};
}
SmallVector<QualType, 8> ArgTypes;
bool RequiresICE = false;
Error = GE_None;
QualType ResType = DecodeTypeFromStr(TypeStr, *this, Error,
RequiresICE, true);
if (Error != GE_None)
return {};
assert(!RequiresICE && "Result of intrinsic cannot be required to be an ICE");
while (TypeStr[0] && TypeStr[0] != '.') {
QualType Ty = DecodeTypeFromStr(TypeStr, *this, Error, RequiresICE, true);
if (Error != GE_None)
return {};
// If this argument is required to be an IntegerConstantExpression and the
// caller cares, fill in the bitmask we return.
if (RequiresICE && IntegerConstantArgs)
*IntegerConstantArgs |= 1 << ArgTypes.size();
// Do array -> pointer decay. The builtin should use the decayed type.
if (Ty->isArrayType())
Ty = getArrayDecayedType(Ty);
ArgTypes.push_back(Ty);
}
if (Id == Builtin::BI__GetExceptionInfo)
return {};
assert((TypeStr[0] != '.' || TypeStr[1] == 0) &&
"'.' should only occur at end of builtin type list!");
bool Variadic = (TypeStr[0] == '.');
FunctionType::ExtInfo EI(getDefaultCallingConvention(
Variadic, /*IsCXXMethod=*/false, /*IsBuiltin=*/true));
if (BuiltinInfo.isNoReturn(Id)) EI = EI.withNoReturn(true);
// We really shouldn't be making a no-proto type here.
if (ArgTypes.empty() && Variadic && !getLangOpts().CPlusPlus)
return getFunctionNoProtoType(ResType, EI);
FunctionProtoType::ExtProtoInfo EPI;
EPI.ExtInfo = EI;
EPI.Variadic = Variadic;
if (getLangOpts().CPlusPlus && BuiltinInfo.isNoThrow(Id))
EPI.ExceptionSpec.Type =
getLangOpts().CPlusPlus11 ? EST_BasicNoexcept : EST_DynamicNone;
return getFunctionType(ResType, ArgTypes, EPI);
}
static GVALinkage basicGVALinkageForFunction(const ASTContext &Context,
const FunctionDecl *FD) {
if (!FD->isExternallyVisible())
return GVA_Internal;
// Non-user-provided functions get emitted as weak definitions with every
// use, no matter whether they've been explicitly instantiated etc.
if (const auto *MD = dyn_cast<CXXMethodDecl>(FD))
if (!MD->isUserProvided())
return GVA_DiscardableODR;
GVALinkage External;
switch (FD->getTemplateSpecializationKind()) {
case TSK_Undeclared:
case TSK_ExplicitSpecialization:
External = GVA_StrongExternal;
break;
case TSK_ExplicitInstantiationDefinition:
return GVA_StrongODR;
// C++11 [temp.explicit]p10:
// [ Note: The intent is that an inline function that is the subject of
// an explicit instantiation declaration will still be implicitly
// instantiated when used so that the body can be considered for
// inlining, but that no out-of-line copy of the inline function would be
// generated in the translation unit. -- end note ]
case TSK_ExplicitInstantiationDeclaration:
return GVA_AvailableExternally;
case TSK_ImplicitInstantiation:
External = GVA_DiscardableODR;
break;
}
if (!FD->isInlined())
return External;
if ((!Context.getLangOpts().CPlusPlus &&
!Context.getTargetInfo().getCXXABI().isMicrosoft() &&
!FD->hasAttr<DLLExportAttr>()) ||
FD->hasAttr<GNUInlineAttr>()) {
// FIXME: This doesn't match gcc's behavior for dllexport inline functions.
// GNU or C99 inline semantics. Determine whether this symbol should be
// externally visible.
if (FD->isInlineDefinitionExternallyVisible())
return External;
// C99 inline semantics, where the symbol is not externally visible.
return GVA_AvailableExternally;
}
// Functions specified with extern and inline in -fms-compatibility mode
// forcibly get emitted. While the body of the function cannot be later
// replaced, the function definition cannot be discarded.
if (FD->isMSExternInline())
return GVA_StrongODR;
return GVA_DiscardableODR;
}
static GVALinkage adjustGVALinkageForAttributes(const ASTContext &Context,
const Decl *D, GVALinkage L) {
// See http://msdn.microsoft.com/en-us/library/xa0d9ste.aspx
// dllexport/dllimport on inline functions.
if (D->hasAttr<DLLImportAttr>()) {
if (L == GVA_DiscardableODR || L == GVA_StrongODR)
return GVA_AvailableExternally;
} else if (D->hasAttr<DLLExportAttr>()) {
if (L == GVA_DiscardableODR)
return GVA_StrongODR;
} else if (Context.getLangOpts().CUDA && Context.getLangOpts().CUDAIsDevice &&
D->hasAttr<CUDAGlobalAttr>()) {
// Device-side functions with __global__ attribute must always be
// visible externally so they can be launched from host.
if (L == GVA_DiscardableODR || L == GVA_Internal)
return GVA_StrongODR;
}
return L;
}
/// Adjust the GVALinkage for a declaration based on what an external AST source
/// knows about whether there can be other definitions of this declaration.
static GVALinkage
adjustGVALinkageForExternalDefinitionKind(const ASTContext &Ctx, const Decl *D,
GVALinkage L) {
ExternalASTSource *Source = Ctx.getExternalSource();
if (!Source)
return L;
switch (Source->hasExternalDefinitions(D)) {
case ExternalASTSource::EK_Never:
// Other translation units rely on us to provide the definition.
if (L == GVA_DiscardableODR)
return GVA_StrongODR;
break;
case ExternalASTSource::EK_Always:
return GVA_AvailableExternally;
case ExternalASTSource::EK_ReplyHazy:
break;
}
return L;
}
GVALinkage ASTContext::GetGVALinkageForFunction(const FunctionDecl *FD) const {
return adjustGVALinkageForExternalDefinitionKind(*this, FD,
adjustGVALinkageForAttributes(*this, FD,
basicGVALinkageForFunction(*this, FD)));
}
static GVALinkage basicGVALinkageForVariable(const ASTContext &Context,
const VarDecl *VD) {
if (!VD->isExternallyVisible())
return GVA_Internal;
if (VD->isStaticLocal()) {
const DeclContext *LexicalContext = VD->getParentFunctionOrMethod();
while (LexicalContext && !isa<FunctionDecl>(LexicalContext))
LexicalContext = LexicalContext->getLexicalParent();
// ObjC Blocks can create local variables that don't have a FunctionDecl
// LexicalContext.
if (!LexicalContext)
return GVA_DiscardableODR;
// Otherwise, let the static local variable inherit its linkage from the
// nearest enclosing function.
auto StaticLocalLinkage =
Context.GetGVALinkageForFunction(cast<FunctionDecl>(LexicalContext));
// Itanium ABI 5.2.2: "Each COMDAT group [for a static local variable] must
// be emitted in any object with references to the symbol for the object it
// contains, whether inline or out-of-line."
// Similar behavior is observed with MSVC. An alternative ABI could use
// StrongODR/AvailableExternally to match the function, but none are
// known/supported currently.
if (StaticLocalLinkage == GVA_StrongODR ||
StaticLocalLinkage == GVA_AvailableExternally)
return GVA_DiscardableODR;
return StaticLocalLinkage;
}
// MSVC treats in-class initialized static data members as definitions.
// By giving them non-strong linkage, out-of-line definitions won't
// cause link errors.
if (Context.isMSStaticDataMemberInlineDefinition(VD))
return GVA_DiscardableODR;
// Most non-template variables have strong linkage; inline variables are
// linkonce_odr or (occasionally, for compatibility) weak_odr.
GVALinkage StrongLinkage;
switch (Context.getInlineVariableDefinitionKind(VD)) {
case ASTContext::InlineVariableDefinitionKind::None:
StrongLinkage = GVA_StrongExternal;
break;
case ASTContext::InlineVariableDefinitionKind::Weak:
case ASTContext::InlineVariableDefinitionKind::WeakUnknown:
StrongLinkage = GVA_DiscardableODR;
break;
case ASTContext::InlineVariableDefinitionKind::Strong:
StrongLinkage = GVA_StrongODR;
break;
}
switch (VD->getTemplateSpecializationKind()) {
case TSK_Undeclared:
return StrongLinkage;
case TSK_ExplicitSpecialization:
return Context.getTargetInfo().getCXXABI().isMicrosoft() &&
VD->isStaticDataMember()
? GVA_StrongODR
: StrongLinkage;
case TSK_ExplicitInstantiationDefinition:
return GVA_StrongODR;
case TSK_ExplicitInstantiationDeclaration:
return GVA_AvailableExternally;
case TSK_ImplicitInstantiation:
return GVA_DiscardableODR;
}
llvm_unreachable("Invalid Linkage!");
}
GVALinkage ASTContext::GetGVALinkageForVariable(const VarDecl *VD) {
return adjustGVALinkageForExternalDefinitionKind(*this, VD,
adjustGVALinkageForAttributes(*this, VD,
basicGVALinkageForVariable(*this, VD)));
}
bool ASTContext::DeclMustBeEmitted(const Decl *D) {
if (const auto *VD = dyn_cast<VarDecl>(D)) {
if (!VD->isFileVarDecl())
return false;
// Global named register variables (GNU extension) are never emitted.
if (VD->getStorageClass() == SC_Register)
return false;
if (VD->getDescribedVarTemplate() ||
isa<VarTemplatePartialSpecializationDecl>(VD))
return false;
} else if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
// We never need to emit an uninstantiated function template.
if (FD->getTemplatedKind() == FunctionDecl::TK_FunctionTemplate)
return false;
} else if (isa<PragmaCommentDecl>(D))
return true;
else if (isa<PragmaDetectMismatchDecl>(D))
return true;
else if (isa<OMPThreadPrivateDecl>(D))
return !D->getDeclContext()->isDependentContext();
else if (isa<OMPAllocateDecl>(D))
return !D->getDeclContext()->isDependentContext();
else if (isa<OMPDeclareReductionDecl>(D) || isa<OMPDeclareMapperDecl>(D))
return !D->getDeclContext()->isDependentContext();
else if (isa<ImportDecl>(D))
return true;
else
return false;
if (D->isFromASTFile() && !LangOpts.BuildingPCHWithObjectFile) {
assert(getExternalSource() && "It's from an AST file; must have a source.");
// On Windows, PCH files are built together with an object file. If this
// declaration comes from such a PCH and DeclMustBeEmitted would return
// true, it would have returned true and the decl would have been emitted
// into that object file, so it doesn't need to be emitted here.
// Note that decls are still emitted if they're referenced, as usual;
// DeclMustBeEmitted is used to decide whether a decl must be emitted even
// if it's not referenced.
//
// Explicit template instantiation definitions are tricky. If there was an
// explicit template instantiation decl in the PCH before, it will look like
// the definition comes from there, even if that was just the declaration.
// (Explicit instantiation defs of variable templates always get emitted.)
bool IsExpInstDef =
isa<FunctionDecl>(D) &&
cast<FunctionDecl>(D)->getTemplateSpecializationKind() ==
TSK_ExplicitInstantiationDefinition;
// Implicit member function definitions, such as operator= might not be
// marked as template specializations, since they're not coming from a
// template but synthesized directly on the class.
IsExpInstDef |=
isa<CXXMethodDecl>(D) &&
cast<CXXMethodDecl>(D)->getParent()->getTemplateSpecializationKind() ==
TSK_ExplicitInstantiationDefinition;
if (getExternalSource()->DeclIsFromPCHWithObjectFile(D) && !IsExpInstDef)
return false;
}
// If this is a member of a class template, we do not need to emit it.
if (D->getDeclContext()->isDependentContext())
return false;
// Weak references don't produce any output by themselves.
if (D->hasAttr<WeakRefAttr>())
return false;
// Aliases and used decls are required.
if (D->hasAttr<AliasAttr>() || D->hasAttr<UsedAttr>())
return true;
if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
// Forward declarations aren't required.
if (!FD->doesThisDeclarationHaveABody())
return FD->doesDeclarationForceExternallyVisibleDefinition();
// Constructors and destructors are required.
if (FD->hasAttr<ConstructorAttr>() || FD->hasAttr<DestructorAttr>())
return true;
// The key function for a class is required. This rule only comes
// into play when inline functions can be key functions, though.
if (getTargetInfo().getCXXABI().canKeyFunctionBeInline()) {
if (const auto *MD = dyn_cast<CXXMethodDecl>(FD)) {
const CXXRecordDecl *RD = MD->getParent();
if (MD->isOutOfLine() && RD->isDynamicClass()) {
const CXXMethodDecl *KeyFunc = getCurrentKeyFunction(RD);
if (KeyFunc && KeyFunc->getCanonicalDecl() == MD->getCanonicalDecl())
return true;
}
}
}
GVALinkage Linkage = GetGVALinkageForFunction(FD);
// static, static inline, always_inline, and extern inline functions can
// always be deferred. Normal inline functions can be deferred in C99/C++.
// Implicit template instantiations can also be deferred in C++.
return !isDiscardableGVALinkage(Linkage);
}
const auto *VD = cast<VarDecl>(D);
assert(VD->isFileVarDecl() && "Expected file scoped var");
// If the decl is marked as `declare target to`, it should be emitted for the
// host and for the device.
if (LangOpts.OpenMP &&
OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
return true;
if (VD->isThisDeclarationADefinition() == VarDecl::DeclarationOnly &&
!isMSStaticDataMemberInlineDefinition(VD))
return false;
// Variables that can be needed in other TUs are required.
auto Linkage = GetGVALinkageForVariable(VD);
if (!isDiscardableGVALinkage(Linkage))
return true;
// We never need to emit a variable that is available in another TU.
if (Linkage == GVA_AvailableExternally)
return false;
// Variables that have destruction with side-effects are required.
if (VD->needsDestruction(*this))
return true;
// Variables that have initialization with side-effects are required.
if (VD->getInit() && VD->getInit()->HasSideEffects(*this) &&
// We can get a value-dependent initializer during error recovery.
(VD->getInit()->isValueDependent() || !VD->evaluateValue()))
return true;
// Likewise, variables with tuple-like bindings are required if their
// bindings have side-effects.
if (const auto *DD = dyn_cast<DecompositionDecl>(VD))
for (const auto *BD : DD->bindings())
if (const auto *BindingVD = BD->getHoldingVar())
if (DeclMustBeEmitted(BindingVD))
return true;
return false;
}
void ASTContext::forEachMultiversionedFunctionVersion(
const FunctionDecl *FD,
llvm::function_ref<void(FunctionDecl *)> Pred) const {
assert(FD->isMultiVersion() && "Only valid for multiversioned functions");
llvm::SmallDenseSet<const FunctionDecl*, 4> SeenDecls;
FD = FD->getMostRecentDecl();
for (auto *CurDecl :
FD->getDeclContext()->getRedeclContext()->lookup(FD->getDeclName())) {
FunctionDecl *CurFD = CurDecl->getAsFunction()->getMostRecentDecl();
if (CurFD && hasSameType(CurFD->getType(), FD->getType()) &&
std::end(SeenDecls) == llvm::find(SeenDecls, CurFD)) {
SeenDecls.insert(CurFD);
Pred(CurFD);
}
}
}
CallingConv ASTContext::getDefaultCallingConvention(bool IsVariadic,
bool IsCXXMethod,
bool IsBuiltin) const {
// Pass through to the C++ ABI object
if (IsCXXMethod)
return ABI->getDefaultMethodCallConv(IsVariadic);
// Builtins ignore user-specified default calling convention and remain the
// Target's default calling convention.
if (!IsBuiltin) {
switch (LangOpts.getDefaultCallingConv()) {
case LangOptions::DCC_None:
break;
case LangOptions::DCC_CDecl:
return CC_C;
case LangOptions::DCC_FastCall:
if (getTargetInfo().hasFeature("sse2") && !IsVariadic)
return CC_X86FastCall;
break;
case LangOptions::DCC_StdCall:
if (!IsVariadic)
return CC_X86StdCall;
break;
case LangOptions::DCC_VectorCall:
// __vectorcall cannot be applied to variadic functions.
if (!IsVariadic)
return CC_X86VectorCall;
break;
case LangOptions::DCC_RegCall:
// __regcall cannot be applied to variadic functions.
if (!IsVariadic)
return CC_X86RegCall;
break;
}
}
return Target->getDefaultCallingConv();
}
bool ASTContext::isNearlyEmpty(const CXXRecordDecl *RD) const {
// Pass through to the C++ ABI object
return ABI->isNearlyEmpty(RD);
}
VTableContextBase *ASTContext::getVTableContext() {
if (!VTContext.get()) {
if (Target->getCXXABI().isMicrosoft())
VTContext.reset(new MicrosoftVTableContext(*this));
else
VTContext.reset(new ItaniumVTableContext(*this));
}
return VTContext.get();
}
MangleContext *ASTContext::createMangleContext(const TargetInfo *T) {
if (!T)
T = Target;
switch (T->getCXXABI().getKind()) {
case TargetCXXABI::Fuchsia:
case TargetCXXABI::GenericAArch64:
case TargetCXXABI::GenericItanium:
case TargetCXXABI::GenericARM:
case TargetCXXABI::GenericMIPS:
case TargetCXXABI::iOS:
case TargetCXXABI::iOS64:
case TargetCXXABI::WebAssembly:
case TargetCXXABI::WatchOS:
return ItaniumMangleContext::create(*this, getDiagnostics());
case TargetCXXABI::Microsoft:
return MicrosoftMangleContext::create(*this, getDiagnostics());
}
llvm_unreachable("Unsupported ABI");
}
CXXABI::~CXXABI() = default;
size_t ASTContext::getSideTableAllocatedMemory() const {
return ASTRecordLayouts.getMemorySize() +
llvm::capacity_in_bytes(ObjCLayouts) +
llvm::capacity_in_bytes(KeyFunctions) +
llvm::capacity_in_bytes(ObjCImpls) +
llvm::capacity_in_bytes(BlockVarCopyInits) +
llvm::capacity_in_bytes(DeclAttrs) +
llvm::capacity_in_bytes(TemplateOrInstantiation) +
llvm::capacity_in_bytes(InstantiatedFromUsingDecl) +
llvm::capacity_in_bytes(InstantiatedFromUsingShadowDecl) +
llvm::capacity_in_bytes(InstantiatedFromUnnamedFieldDecl) +
llvm::capacity_in_bytes(OverriddenMethods) +
llvm::capacity_in_bytes(Types) +
llvm::capacity_in_bytes(VariableArrayTypes);
}
/// getIntTypeForBitwidth -
/// sets integer QualTy according to specified details:
/// bitwidth, signed/unsigned.
/// Returns empty type if there is no appropriate target types.
QualType ASTContext::getIntTypeForBitwidth(unsigned DestWidth,
unsigned Signed) const {
TargetInfo::IntType Ty = getTargetInfo().getIntTypeByWidth(DestWidth, Signed);
CanQualType QualTy = getFromTargetType(Ty);
if (!QualTy && DestWidth == 128)
return Signed ? Int128Ty : UnsignedInt128Ty;
return QualTy;
}
/// getRealTypeForBitwidth -
/// sets floating point QualTy according to specified bitwidth.
/// Returns empty type if there is no appropriate target types.
QualType ASTContext::getRealTypeForBitwidth(unsigned DestWidth) const {
TargetInfo::RealType Ty = getTargetInfo().getRealTypeByWidth(DestWidth);
switch (Ty) {
case TargetInfo::Float:
return FloatTy;
case TargetInfo::Double:
return DoubleTy;
case TargetInfo::LongDouble:
return LongDoubleTy;
case TargetInfo::Float128:
return Float128Ty;
case TargetInfo::NoFloat:
return {};
}
llvm_unreachable("Unhandled TargetInfo::RealType value");
}
void ASTContext::setManglingNumber(const NamedDecl *ND, unsigned Number) {
if (Number > 1)
MangleNumbers[ND] = Number;
}
unsigned ASTContext::getManglingNumber(const NamedDecl *ND) const {
auto I = MangleNumbers.find(ND);
return I != MangleNumbers.end() ? I->second : 1;
}
void ASTContext::setStaticLocalNumber(const VarDecl *VD, unsigned Number) {
if (Number > 1)
StaticLocalNumbers[VD] = Number;
}
unsigned ASTContext::getStaticLocalNumber(const VarDecl *VD) const {
auto I = StaticLocalNumbers.find(VD);
return I != StaticLocalNumbers.end() ? I->second : 1;
}
MangleNumberingContext &
ASTContext::getManglingNumberContext(const DeclContext *DC) {
assert(LangOpts.CPlusPlus); // We don't need mangling numbers for plain C.
std::unique_ptr<MangleNumberingContext> &MCtx = MangleNumberingContexts[DC];
if (!MCtx)
MCtx = createMangleNumberingContext();
return *MCtx;
}
MangleNumberingContext &
ASTContext::getManglingNumberContext(NeedExtraManglingDecl_t, const Decl *D) {
assert(LangOpts.CPlusPlus); // We don't need mangling numbers for plain C.
std::unique_ptr<MangleNumberingContext> &MCtx =
ExtraMangleNumberingContexts[D];
if (!MCtx)
MCtx = createMangleNumberingContext();
return *MCtx;
}
std::unique_ptr<MangleNumberingContext>
ASTContext::createMangleNumberingContext() const {
return ABI->createMangleNumberingContext();
}
const CXXConstructorDecl *
ASTContext::getCopyConstructorForExceptionObject(CXXRecordDecl *RD) {
return ABI->getCopyConstructorForExceptionObject(
cast<CXXRecordDecl>(RD->getFirstDecl()));
}
void ASTContext::addCopyConstructorForExceptionObject(CXXRecordDecl *RD,
CXXConstructorDecl *CD) {
return ABI->addCopyConstructorForExceptionObject(
cast<CXXRecordDecl>(RD->getFirstDecl()),
cast<CXXConstructorDecl>(CD->getFirstDecl()));
}
void ASTContext::addTypedefNameForUnnamedTagDecl(TagDecl *TD,
TypedefNameDecl *DD) {
return ABI->addTypedefNameForUnnamedTagDecl(TD, DD);
}
TypedefNameDecl *
ASTContext::getTypedefNameForUnnamedTagDecl(const TagDecl *TD) {
return ABI->getTypedefNameForUnnamedTagDecl(TD);
}
void ASTContext::addDeclaratorForUnnamedTagDecl(TagDecl *TD,
DeclaratorDecl *DD) {
return ABI->addDeclaratorForUnnamedTagDecl(TD, DD);
}
DeclaratorDecl *ASTContext::getDeclaratorForUnnamedTagDecl(const TagDecl *TD) {
return ABI->getDeclaratorForUnnamedTagDecl(TD);
}
void ASTContext::setParameterIndex(const ParmVarDecl *D, unsigned int index) {
ParamIndices[D] = index;
}
unsigned ASTContext::getParameterIndex(const ParmVarDecl *D) const {
ParameterIndexTable::const_iterator I = ParamIndices.find(D);
assert(I != ParamIndices.end() &&
"ParmIndices lacks entry set by ParmVarDecl");
return I->second;
}
QualType ASTContext::getStringLiteralArrayType(QualType EltTy,
unsigned Length) const {
// A C++ string literal has a const-qualified element type (C++ 2.13.4p1).
if (getLangOpts().CPlusPlus || getLangOpts().ConstStrings)
EltTy = EltTy.withConst();
EltTy = adjustStringLiteralBaseType(EltTy);
// Get an array type for the string, according to C99 6.4.5. This includes
// the null terminator character.
return getConstantArrayType(EltTy, llvm::APInt(32, Length + 1), nullptr,
ArrayType::Normal, /*IndexTypeQuals*/ 0);
}
StringLiteral *
ASTContext::getPredefinedStringLiteralFromCache(StringRef Key) const {
StringLiteral *&Result = StringLiteralCache[Key];
if (!Result)
Result = StringLiteral::Create(
*this, Key, StringLiteral::Ascii,
/*Pascal*/ false, getStringLiteralArrayType(CharTy, Key.size()),
SourceLocation());
return Result;
}
bool ASTContext::AtomicUsesUnsupportedLibcall(const AtomicExpr *E) const {
const llvm::Triple &T = getTargetInfo().getTriple();
if (!T.isOSDarwin())
return false;
if (!(T.isiOS() && T.isOSVersionLT(7)) &&
!(T.isMacOSX() && T.isOSVersionLT(10, 9)))
return false;
QualType AtomicTy = E->getPtr()->getType()->getPointeeType();
CharUnits sizeChars = getTypeSizeInChars(AtomicTy);
uint64_t Size = sizeChars.getQuantity();
CharUnits alignChars = getTypeAlignInChars(AtomicTy);
unsigned Align = alignChars.getQuantity();
unsigned MaxInlineWidthInBits = getTargetInfo().getMaxAtomicInlineWidth();
return (Size != Align || toBits(sizeChars) > MaxInlineWidthInBits);
}
/// Template specializations to abstract away from pointers and TypeLocs.
/// @{
template <typename T>
static ast_type_traits::DynTypedNode createDynTypedNode(const T &Node) {
return ast_type_traits::DynTypedNode::create(*Node);
}
template <>
ast_type_traits::DynTypedNode createDynTypedNode(const TypeLoc &Node) {
return ast_type_traits::DynTypedNode::create(Node);
}
template <>
ast_type_traits::DynTypedNode
createDynTypedNode(const NestedNameSpecifierLoc &Node) {
return ast_type_traits::DynTypedNode::create(Node);
}
/// @}
/// A \c RecursiveASTVisitor that builds a map from nodes to their
/// parents as defined by the \c RecursiveASTVisitor.
///
/// Note that the relationship described here is purely in terms of AST
/// traversal - there are other relationships (for example declaration context)
/// in the AST that are better modeled by special matchers.
///
/// FIXME: Currently only builds up the map using \c Stmt and \c Decl nodes.
class ASTContext::ParentMap::ASTVisitor
: public RecursiveASTVisitor<ASTVisitor> {
public:
ASTVisitor(ParentMap &Map, ASTContext &Context)
: Map(Map), Context(Context) {}
private:
friend class RecursiveASTVisitor<ASTVisitor>;
using VisitorBase = RecursiveASTVisitor<ASTVisitor>;
bool shouldVisitTemplateInstantiations() const { return true; }
bool shouldVisitImplicitCode() const { return true; }
template <typename T, typename MapNodeTy, typename BaseTraverseFn,
typename MapTy>
bool TraverseNode(T Node, MapNodeTy MapNode, BaseTraverseFn BaseTraverse,
MapTy *Parents) {
if (!Node)
return true;
if (ParentStack.size() > 0) {
// FIXME: Currently we add the same parent multiple times, but only
// when no memoization data is available for the type.
// For example when we visit all subexpressions of template
// instantiations; this is suboptimal, but benign: the only way to
// visit those is with hasAncestor / hasParent, and those do not create
// new matches.
// The plan is to enable DynTypedNode to be storable in a map or hash
// map. The main problem there is to implement hash functions /
// comparison operators for all types that DynTypedNode supports that
// do not have pointer identity.
auto &NodeOrVector = (*Parents)[MapNode];
if (NodeOrVector.isNull()) {
if (const auto *D = ParentStack.back().get<Decl>())
NodeOrVector = D;
else if (const auto *S = ParentStack.back().get<Stmt>())
NodeOrVector = S;
else
NodeOrVector = new ast_type_traits::DynTypedNode(ParentStack.back());
} else {
if (!NodeOrVector.template is<ParentVector *>()) {
auto *Vector = new ParentVector(
1, getSingleDynTypedNodeFromParentMap(NodeOrVector));
delete NodeOrVector
.template dyn_cast<ast_type_traits::DynTypedNode *>();
NodeOrVector = Vector;
}
auto *Vector = NodeOrVector.template get<ParentVector *>();
// Skip duplicates for types that have memoization data.
// We must check that the type has memoization data before calling
// std::find() because DynTypedNode::operator== can't compare all
// types.
bool Found = ParentStack.back().getMemoizationData() &&
std::find(Vector->begin(), Vector->end(),
ParentStack.back()) != Vector->end();
if (!Found)
Vector->push_back(ParentStack.back());
}
}
ParentStack.push_back(createDynTypedNode(Node));
bool Result = BaseTraverse();
ParentStack.pop_back();
return Result;
}
bool TraverseDecl(Decl *DeclNode) {
return TraverseNode(
DeclNode, DeclNode, [&] { return VisitorBase::TraverseDecl(DeclNode); },
&Map.PointerParents);
}
bool TraverseStmt(Stmt *StmtNode) {
Stmt *FilteredNode = StmtNode;
if (auto *ExprNode = dyn_cast_or_null<Expr>(FilteredNode))
FilteredNode = Context.traverseIgnored(ExprNode);
return TraverseNode(FilteredNode, FilteredNode,
[&] { return VisitorBase::TraverseStmt(FilteredNode); },
&Map.PointerParents);
}
bool TraverseTypeLoc(TypeLoc TypeLocNode) {
return TraverseNode(
TypeLocNode, ast_type_traits::DynTypedNode::create(TypeLocNode),
[&] { return VisitorBase::TraverseTypeLoc(TypeLocNode); },
&Map.OtherParents);
}
bool TraverseNestedNameSpecifierLoc(NestedNameSpecifierLoc NNSLocNode) {
return TraverseNode(
NNSLocNode, ast_type_traits::DynTypedNode::create(NNSLocNode),
[&] { return VisitorBase::TraverseNestedNameSpecifierLoc(NNSLocNode); },
&Map.OtherParents);
}
ParentMap ⤅
ASTContext &Context;
llvm::SmallVector<ast_type_traits::DynTypedNode, 16> ParentStack;
};
ASTContext::ParentMap::ParentMap(ASTContext &Ctx) {
ASTVisitor(*this, Ctx).TraverseAST(Ctx);
}
ASTContext::DynTypedNodeList
ASTContext::getParents(const ast_type_traits::DynTypedNode &Node) {
std::unique_ptr<ParentMap> &P = Parents[Traversal];
if (!P)
// We build the parent map for the traversal scope (usually whole TU), as
// hasAncestor can escape any subtree.
P = std::make_unique<ParentMap>(*this);
return P->getParents(Node);
}
bool
ASTContext::ObjCMethodsAreEqual(const ObjCMethodDecl *MethodDecl,
const ObjCMethodDecl *MethodImpl) {
// No point trying to match an unavailable/deprecated mothod.
if (MethodDecl->hasAttr<UnavailableAttr>()
|| MethodDecl->hasAttr<DeprecatedAttr>())
return false;
if (MethodDecl->getObjCDeclQualifier() !=
MethodImpl->getObjCDeclQualifier())
return false;
if (!hasSameType(MethodDecl->getReturnType(), MethodImpl->getReturnType()))
return false;
if (MethodDecl->param_size() != MethodImpl->param_size())
return false;
for (ObjCMethodDecl::param_const_iterator IM = MethodImpl->param_begin(),
IF = MethodDecl->param_begin(), EM = MethodImpl->param_end(),
EF = MethodDecl->param_end();
IM != EM && IF != EF; ++IM, ++IF) {
const ParmVarDecl *DeclVar = (*IF);
const ParmVarDecl *ImplVar = (*IM);
if (ImplVar->getObjCDeclQualifier() != DeclVar->getObjCDeclQualifier())
return false;
if (!hasSameType(DeclVar->getType(), ImplVar->getType()))
return false;
}
return (MethodDecl->isVariadic() == MethodImpl->isVariadic());
}
uint64_t ASTContext::getTargetNullPointerValue(QualType QT) const {
LangAS AS;
if (QT->getUnqualifiedDesugaredType()->isNullPtrType())
AS = LangAS::Default;
else
AS = QT->getPointeeType().getAddressSpace();
return getTargetInfo().getNullPointerValue(AS);
}
unsigned ASTContext::getTargetAddressSpace(LangAS AS) const {
if (isTargetAddressSpace(AS))
return toTargetAddressSpace(AS);
else
return (*AddrSpaceMap)[(unsigned)AS];
}
QualType ASTContext::getCorrespondingSaturatedType(QualType Ty) const {
assert(Ty->isFixedPointType());
if (Ty->isSaturatedFixedPointType()) return Ty;
switch (Ty->castAs<BuiltinType>()->getKind()) {
default:
llvm_unreachable("Not a fixed point type!");
case BuiltinType::ShortAccum:
return SatShortAccumTy;
case BuiltinType::Accum:
return SatAccumTy;
case BuiltinType::LongAccum:
return SatLongAccumTy;
case BuiltinType::UShortAccum:
return SatUnsignedShortAccumTy;
case BuiltinType::UAccum:
return SatUnsignedAccumTy;
case BuiltinType::ULongAccum:
return SatUnsignedLongAccumTy;
case BuiltinType::ShortFract:
return SatShortFractTy;
case BuiltinType::Fract:
return SatFractTy;
case BuiltinType::LongFract:
return SatLongFractTy;
case BuiltinType::UShortFract:
return SatUnsignedShortFractTy;
case BuiltinType::UFract:
return SatUnsignedFractTy;
case BuiltinType::ULongFract:
return SatUnsignedLongFractTy;
}
}
LangAS ASTContext::getLangASForBuiltinAddressSpace(unsigned AS) const {
if (LangOpts.OpenCL)
return getTargetInfo().getOpenCLBuiltinAddressSpace(AS);
if (LangOpts.CUDA)
return getTargetInfo().getCUDABuiltinAddressSpace(AS);
return getLangASFromTargetAS(AS);
}
// Explicitly instantiate this in case a Redeclarable<T> is used from a TU that
// doesn't include ASTContext.h
template
clang::LazyGenerationalUpdatePtr<
const Decl *, Decl *, &ExternalASTSource::CompleteRedeclChain>::ValueType
clang::LazyGenerationalUpdatePtr<
const Decl *, Decl *, &ExternalASTSource::CompleteRedeclChain>::makeValue(
const clang::ASTContext &Ctx, Decl *Value);
unsigned char ASTContext::getFixedPointScale(QualType Ty) const {
assert(Ty->isFixedPointType());
const TargetInfo &Target = getTargetInfo();
switch (Ty->castAs<BuiltinType>()->getKind()) {
default:
llvm_unreachable("Not a fixed point type!");
case BuiltinType::ShortAccum:
case BuiltinType::SatShortAccum:
return Target.getShortAccumScale();
case BuiltinType::Accum:
case BuiltinType::SatAccum:
return Target.getAccumScale();
case BuiltinType::LongAccum:
case BuiltinType::SatLongAccum:
return Target.getLongAccumScale();
case BuiltinType::UShortAccum:
case BuiltinType::SatUShortAccum:
return Target.getUnsignedShortAccumScale();
case BuiltinType::UAccum:
case BuiltinType::SatUAccum:
return Target.getUnsignedAccumScale();
case BuiltinType::ULongAccum:
case BuiltinType::SatULongAccum:
return Target.getUnsignedLongAccumScale();
case BuiltinType::ShortFract:
case BuiltinType::SatShortFract:
return Target.getShortFractScale();
case BuiltinType::Fract:
case BuiltinType::SatFract:
return Target.getFractScale();
case BuiltinType::LongFract:
case BuiltinType::SatLongFract:
return Target.getLongFractScale();
case BuiltinType::UShortFract:
case BuiltinType::SatUShortFract:
return Target.getUnsignedShortFractScale();
case BuiltinType::UFract:
case BuiltinType::SatUFract:
return Target.getUnsignedFractScale();
case BuiltinType::ULongFract:
case BuiltinType::SatULongFract:
return Target.getUnsignedLongFractScale();
}
}
unsigned char ASTContext::getFixedPointIBits(QualType Ty) const {
assert(Ty->isFixedPointType());
const TargetInfo &Target = getTargetInfo();
switch (Ty->castAs<BuiltinType>()->getKind()) {
default:
llvm_unreachable("Not a fixed point type!");
case BuiltinType::ShortAccum:
case BuiltinType::SatShortAccum:
return Target.getShortAccumIBits();
case BuiltinType::Accum:
case BuiltinType::SatAccum:
return Target.getAccumIBits();
case BuiltinType::LongAccum:
case BuiltinType::SatLongAccum:
return Target.getLongAccumIBits();
case BuiltinType::UShortAccum:
case BuiltinType::SatUShortAccum:
return Target.getUnsignedShortAccumIBits();
case BuiltinType::UAccum:
case BuiltinType::SatUAccum:
return Target.getUnsignedAccumIBits();
case BuiltinType::ULongAccum:
case BuiltinType::SatULongAccum:
return Target.getUnsignedLongAccumIBits();
case BuiltinType::ShortFract:
case BuiltinType::SatShortFract:
case BuiltinType::Fract:
case BuiltinType::SatFract:
case BuiltinType::LongFract:
case BuiltinType::SatLongFract:
case BuiltinType::UShortFract:
case BuiltinType::SatUShortFract:
case BuiltinType::UFract:
case BuiltinType::SatUFract:
case BuiltinType::ULongFract:
case BuiltinType::SatULongFract:
return 0;
}
}
FixedPointSemantics ASTContext::getFixedPointSemantics(QualType Ty) const {
assert((Ty->isFixedPointType() || Ty->isIntegerType()) &&
"Can only get the fixed point semantics for a "
"fixed point or integer type.");
if (Ty->isIntegerType())
return FixedPointSemantics::GetIntegerSemantics(getIntWidth(Ty),
Ty->isSignedIntegerType());
bool isSigned = Ty->isSignedFixedPointType();
return FixedPointSemantics(
static_cast<unsigned>(getTypeSize(Ty)), getFixedPointScale(Ty), isSigned,
Ty->isSaturatedFixedPointType(),
!isSigned && getTargetInfo().doUnsignedFixedPointTypesHavePadding());
}
APFixedPoint ASTContext::getFixedPointMax(QualType Ty) const {
assert(Ty->isFixedPointType());
return APFixedPoint::getMax(getFixedPointSemantics(Ty));
}
APFixedPoint ASTContext::getFixedPointMin(QualType Ty) const {
assert(Ty->isFixedPointType());
return APFixedPoint::getMin(getFixedPointSemantics(Ty));
}
QualType ASTContext::getCorrespondingSignedFixedPointType(QualType Ty) const {
assert(Ty->isUnsignedFixedPointType() &&
"Expected unsigned fixed point type");
switch (Ty->castAs<BuiltinType>()->getKind()) {
case BuiltinType::UShortAccum:
return ShortAccumTy;
case BuiltinType::UAccum:
return AccumTy;
case BuiltinType::ULongAccum:
return LongAccumTy;
case BuiltinType::SatUShortAccum:
return SatShortAccumTy;
case BuiltinType::SatUAccum:
return SatAccumTy;
case BuiltinType::SatULongAccum:
return SatLongAccumTy;
case BuiltinType::UShortFract:
return ShortFractTy;
case BuiltinType::UFract:
return FractTy;
case BuiltinType::ULongFract:
return LongFractTy;
case BuiltinType::SatUShortFract:
return SatShortFractTy;
case BuiltinType::SatUFract:
return SatFractTy;
case BuiltinType::SatULongFract:
return SatLongFractTy;
default:
llvm_unreachable("Unexpected unsigned fixed point type");
}
}
ParsedTargetAttr
ASTContext::filterFunctionTargetAttrs(const TargetAttr *TD) const {
assert(TD != nullptr);
ParsedTargetAttr ParsedAttr = TD->parse();
ParsedAttr.Features.erase(
llvm::remove_if(ParsedAttr.Features,
[&](const std::string &Feat) {
return !Target->isValidFeatureName(
StringRef{Feat}.substr(1));
}),
ParsedAttr.Features.end());
return ParsedAttr;
}
void ASTContext::getFunctionFeatureMap(llvm::StringMap<bool> &FeatureMap,
const FunctionDecl *FD) const {
if (FD)
getFunctionFeatureMap(FeatureMap, GlobalDecl().getWithDecl(FD));
else
Target->initFeatureMap(FeatureMap, getDiagnostics(),
Target->getTargetOpts().CPU,
Target->getTargetOpts().Features);
}
// Fills in the supplied string map with the set of target features for the
// passed in function.
void ASTContext::getFunctionFeatureMap(llvm::StringMap<bool> &FeatureMap,
GlobalDecl GD) const {
StringRef TargetCPU = Target->getTargetOpts().CPU;
const FunctionDecl *FD = GD.getDecl()->getAsFunction();
if (const auto *TD = FD->getAttr<TargetAttr>()) {
ParsedTargetAttr ParsedAttr = filterFunctionTargetAttrs(TD);
// Make a copy of the features as passed on the command line into the
// beginning of the additional features from the function to override.
ParsedAttr.Features.insert(
ParsedAttr.Features.begin(),
Target->getTargetOpts().FeaturesAsWritten.begin(),
Target->getTargetOpts().FeaturesAsWritten.end());
if (ParsedAttr.Architecture != "" &&
Target->isValidCPUName(ParsedAttr.Architecture))
TargetCPU = ParsedAttr.Architecture;
// Now populate the feature map, first with the TargetCPU which is either
// the default or a new one from the target attribute string. Then we'll use
// the passed in features (FeaturesAsWritten) along with the new ones from
// the attribute.
Target->initFeatureMap(FeatureMap, getDiagnostics(), TargetCPU,
ParsedAttr.Features);
} else if (const auto *SD = FD->getAttr<CPUSpecificAttr>()) {
llvm::SmallVector<StringRef, 32> FeaturesTmp;
Target->getCPUSpecificCPUDispatchFeatures(
SD->getCPUName(GD.getMultiVersionIndex())->getName(), FeaturesTmp);
std::vector<std::string> Features(FeaturesTmp.begin(), FeaturesTmp.end());
Target->initFeatureMap(FeatureMap, getDiagnostics(), TargetCPU, Features);
} else {
Target->initFeatureMap(FeatureMap, getDiagnostics(), TargetCPU,
Target->getTargetOpts().Features);
}
}<|fim▁end|> |
// If the pipe element type isn't canonical, this won't be a canonical type |
<|file_name|>t145_longjmp3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from runtest import TestBase
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'longjmp3', """
# DURATION TID FUNCTION
1.164 us [ 4107] | __monstartup();
0.657 us [ 4107] | __cxa_atexit();
[ 4107] | main() {
0.705 us [ 4107] | _setjmp() = 0;
1.823 us [ 4107] | getpid();
0.182 us [ 4107] | _setjmp() = 0;
[ 4107] | foo() {
[ 4107] | __longjmp_chk(1) {
8.790 us [ 4107] | } = 1; /* _setjmp */
0.540 us [ 4107] | getpid();
[ 4107] | bar() {
[ 4107] | baz() {
[ 4107] | __longjmp_chk(2) {<|fim▁hole|> 1.282 us [ 4107] | } = 2; /* _setjmp */
0.540 us [ 4107] | getpid();
[ 4107] | foo() {
[ 4107] | __longjmp_chk(3) {
0.578 us [ 4107] | } = 3; /* _setjmp */
[ 4107] | bar() {
[ 4107] | baz() {
[ 4107] | __longjmp_chk(4) {
0.642 us [ 4107] | } = 4; /* _setjmp */
18.019 us [ 4107] | } /* main */
""")
def build(self, name, cflags='', ldflags=''):
return TestBase.build(self, name, cflags + ' -D_FORTIFY_SOURCE=2', ldflags)
def runcmd(self):
args = '-A .?longjmp@arg2 -R .?setjmp@retval'
return '%s %s %s' % (TestBase.ftrace, args, 't-' + self.name)
def fixup(self, cflags, result):
return result.replace('__longjmp_chk', "longjmp")<|fim▁end|> | |
<|file_name|>reader.js<|end_file_name|><|fim▁begin|>module.exports = {
readFiles: readFiles
};
var filewalker = require('filewalker');<|fim▁hole|> return new Promise(function(resolve, reject) {
var files = [];
filewalker(directory)
.on('file', function(file) {
if (file.endsWith(fileSuffix)) {
files.push(file);
}
})
.on('done', function() {
var addLazyReaderCurried = _.curry(addLazyReader);
resolve(_.fmap(addLazyReaderCurried(directory), files));
})
.walk();
});
}
function addLazyReader(directory, file) {
return {
name: directory+file,
getContents: function() {
return fs.readFileSync(directory+file, 'utf8');
}
};
}<|fim▁end|> | var _ = require('kling/kling.js');
var fs = require('fs');
function readFiles(directory, fileSuffix) { |
<|file_name|>create_user.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.contrib.auth.models import UserManager
from django.utils.timezone import now as timezone_now
from zerver.models import UserProfile, Recipient, Subscription, Realm, Stream
import base64
import ujson
import os
import string
from six.moves import range
from typing import Optional, Text
def random_api_key():
# type: () -> Text
choices = string.ascii_letters + string.digits
altchars = ''.join([choices[ord(os.urandom(1)) % 62] for _ in range(2)]).encode("utf-8")
return base64.b64encode(os.urandom(24), altchars=altchars).decode("utf-8")
# create_user_profile is based on Django's User.objects.create_user,
# except that we don't save to the database so it can used in
# bulk_creates
#
# Only use this for bulk_create -- for normal usage one should use
# create_user (below) which will also make the Subscription and
# Recipient objects
def create_user_profile(realm, email, password, active, bot_type, full_name,
short_name, bot_owner, is_mirror_dummy, tos_version,
timezone, tutorial_status=UserProfile.TUTORIAL_WAITING,
enter_sends=False):
# type: (Realm, Text, Optional[Text], bool, Optional[int], Text, Text, Optional[UserProfile], bool, Text, Optional[Text], Optional[Text], bool) -> UserProfile
now = timezone_now()
email = UserManager.normalize_email(email)
user_profile = UserProfile(email=email, is_staff=False, is_active=active,
full_name=full_name, short_name=short_name,
last_login=now, date_joined=now, realm=realm,
pointer=-1, is_bot=bool(bot_type), bot_type=bot_type,
bot_owner=bot_owner, is_mirror_dummy=is_mirror_dummy,
tos_version=tos_version, timezone=timezone,
tutorial_status=tutorial_status,
enter_sends=enter_sends,
onboarding_steps=ujson.dumps([]),
default_language=realm.default_language)
if bot_type or not active:
password = None
user_profile.set_password(password)
user_profile.api_key = random_api_key()
return user_profile
def create_user(email, password, realm, full_name, short_name,
active=True, bot_type=None, bot_owner=None, tos_version=None,
timezone=u"", avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
is_mirror_dummy=False, default_sending_stream=None,
default_events_register_stream=None,
default_all_public_streams=None, user_profile_id=None):
# type: (Text, Optional[Text], Realm, Text, Text, bool, Optional[int], Optional[UserProfile], Optional[Text], Text, Text, bool, Optional[Stream], Optional[Stream], Optional[bool], Optional[int]) -> UserProfile
user_profile = create_user_profile(realm, email, password, active, bot_type,
full_name, short_name, bot_owner,
is_mirror_dummy, tos_version, timezone)
user_profile.avatar_source = avatar_source
user_profile.timezone = timezone
user_profile.default_sending_stream = default_sending_stream
user_profile.default_events_register_stream = default_events_register_stream
# Allow the ORM default to be used if not provided<|fim▁hole|> user_profile.id = user_profile_id
user_profile.save()
recipient = Recipient.objects.create(type_id=user_profile.id,
type=Recipient.PERSONAL)
Subscription.objects.create(user_profile=user_profile, recipient=recipient)
return user_profile<|fim▁end|> | if default_all_public_streams is not None:
user_profile.default_all_public_streams = default_all_public_streams
if user_profile_id is not None: |
<|file_name|>61_replace_step_location_with_plan_id.go<|end_file_name|><|fim▁begin|>package migrations
import "github.com/BurntSushi/migration"
func ReplaceStepLocationWithPlanID(tx migration.LimitedTx) error {
_, err := tx.Exec(`
ALTER TABLE containers DROP COLUMN step_location;
`)
if err != nil {
return err
}
_, err = tx.Exec(`
ALTER TABLE containers ADD COLUMN plan_id text;
`)<|fim▁hole|>}<|fim▁end|> |
return err |
<|file_name|>floatingrendererwindow.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# codimension - graphics python two-way code editor and analyzer
# Copyright (C) 2010-2019 Sergey Satskiy <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
<|fim▁hole|>
from utils.globals import GlobalData
from .qt import (QMainWindow, QTimer, QStackedWidget, QLabel, QVBoxLayout,
QWidget, QPalette, Qt, QFrame)
from .mainwindowtabwidgetbase import MainWindowTabWidgetBase
class DetachedRendererWindow(QMainWindow):
"""Detached flow ui/markdown renderer window"""
def __init__(self, settings, em):
QMainWindow.__init__(self, None)
self.settings = settings
self.em = em
self.__widgets = QStackedWidget(self)
self.__widgets.setContentsMargins(1, 1, 1, 1)
self.__noRenderLabel = QLabel('\nNo rendering available for the current tab')
self.__noRenderLabel.setFrameShape(QFrame.StyledPanel)
self.__noRenderLabel.setAlignment(Qt.AlignHCenter)
self.__noRenderLabel.setAutoFillBackground(True)
font = self.__noRenderLabel.font()
font.setPointSize(font.pointSize() + 4)
self.__noRenderLabel.setFont(font)
palette = self.__noRenderLabel.palette()
palette.setColor(QPalette.Background,
GlobalData().skin['nolexerPaper'])
self.__noRenderLabel.setPalette(palette)
self.__widgets.addWidget(self.__noRenderLabel)
self.setCentralWidget(self.__widgets)
self.__ideClosing = False
self.__initialisation = True
# The size restore is done twice to avoid huge flickering
# This one is approximate, the one in restoreWindowPosition()
# is precise
screenSize = GlobalData().application.desktop().screenGeometry()
if screenSize.width() != settings['screenwidth'] or \
screenSize.height() != settings['screenheight']:
# The screen resolution has been changed, use the default pos
defXPos, defYpos, \
defWidth, defHeight = settings.getDefaultRendererWindowGeometry()
self.resize(defWidth, defHeight)
self.move(defXPos, defYpos)
else:
# No changes in the screen resolution
self.resize(settings['rendererwidth'], settings['rendererheight'])
self.move(settings['rendererxpos'] + settings['xdelta'],
settings['rendererypos'] + settings['ydelta'])
def closeEvent(self, event):
"""Renderer is closed: explicit close via X or IDE is closed"""
if not self.__ideClosing:
# Update the IDE button and memorize the setting
self.settings['floatingRenderer'] = not self.settings['floatingRenderer']
GlobalData().mainWindow.floatingRendererButton.setChecked(False)
self.hide()
return
def __registerWidget(self, widget):
"""Registers one widget basing on info from the editors manager"""
renderLayout = QVBoxLayout()
renderLayout.setContentsMargins(0, 0, 0, 0)
renderLayout.setSpacing(0)
for wid in widget.popRenderingWidgets():
renderLayout.addWidget(wid)
renderWidget = QWidget()
renderWidget.setLayout(renderLayout)
renderWidget.setObjectName(widget.getUUID())
self.__widgets.addWidget(renderWidget)
def show(self):
"""Overwritten show method"""
self.__connectSignals()
# grab the widgets
for index in range(self.em.count()):
widget = self.em.widget(index)
if widget.getType() == MainWindowTabWidgetBase.PlainTextEditor:
self.__registerWidget(widget)
self.updateCurrent()
QMainWindow.show(self)
if self.__initialisation:
self.restoreWindowPosition()
def hide(self):
"""Overwritten hide method"""
QMainWindow.hide(self)
self.__disconnectSignals()
# return widgets
while self.__widgets.count() > 1:
widget = self.__widgets.widget(1)
uuid = widget.objectName()
toBeReturned = []
layout = widget.layout()
for index in range(layout.count()):
w = layout.itemAt(index).widget()
if w is not None:
toBeReturned.append(w)
for w in toBeReturned:
layout.removeWidget(w)
self.__widgets.removeWidget(widget)
for index in range(self.em.count()):
widget = self.em.widget(index)
if widget.getUUID() == uuid:
widget.pushRenderingWidgets(toBeReturned)
break
def __connectSignals(self):
"""Connects to all the required sugnals"""
self.em.sigTabClosed.connect(self.__onTabClosed)
self.em.currentChanged.connect(self.__onCurrentTabChanged)
self.em.sigTextEditorTabAdded.connect(self.__onTextEditorTabAdded)
self.em.sigFileTypeChanged.connect(self.__onFileTypeChanged)
self.em.sigFileUpdated.connect(self.__onFileUpdated)
self.em.sigBufferSavedAs.connect(self.__onBufferSavedAs)
def __disconnectSignals(self):
"""Disconnects the signals"""
self.em.sigBufferSavedAs.disconnect(self.__onBufferSavedAs)
self.em.sigFileUpdated.disconnect(self.__onFileUpdated)
self.em.sigTextEditorTabAdded.disconnect(self.__onTextEditorTabAdded)
self.em.currentChanged.disconnect(self.__onCurrentTabChanged)
self.em.sigTabClosed.disconnect(self.__onTabClosed)
self.em.sigFileTypeChanged.disconnect(self.__onFileTypeChanged)
def resizeEvent(self, resizeEv):
"""Triggered when the window is resized"""
del resizeEv # unused argument
QTimer.singleShot(1, self.__resizeEventdelayed)
def __resizeEventdelayed(self):
"""Memorizes the new window size"""
if self.__initialisation or self.__guessMaximized():
return
self.settings['rendererwidth'] = self.width()
self.settings['rendererheight'] = self.height()
def moveEvent(self, moveEv):
"""Triggered when the window is moved"""
del moveEv # unused argument
QTimer.singleShot(1, self.__moveEventDelayed)
def __moveEventDelayed(self):
"""Memorizes the new window position"""
if not self.__initialisation and not self.__guessMaximized():
self.settings['rendererxpos'] = self.x()
self.settings['rendererypos'] = self.y()
def __guessMaximized(self):
"""True if the window is maximized"""
# Ugly but I don't see any better way.
# It is impossible to catch the case when the main window is maximized.
# Especially when networked XServer is used (like xming)
# So, make a wild guess instead and do not save the status if
# maximized.
availGeom = GlobalData().application.desktop().availableGeometry()
if self.width() + abs(self.settings['xdelta']) > availGeom.width() or \
self.height() + abs(self.settings['ydelta']) > availGeom.height():
return True
return False
def restoreWindowPosition(self):
"""Makes sure that the window frame delta is proper"""
screenSize = GlobalData().application.desktop().screenGeometry()
if screenSize.width() != self.settings['screenwidth'] or \
screenSize.height() != self.settings['screenheight']:
# The screen resolution has been changed, save the new values
self.settings['screenwidth'] = screenSize.width()
self.settings['screenheight'] = screenSize.height()
self.settings['xdelta'] = self.settings['xpos'] - self.x()
self.settings['ydelta'] = self.settings['ypos'] - self.y()
self.settings['rendererxpos'] = self.x()
self.settings['rendererypos'] = self.y()
else:
# Screen resolution is the same as before
if self.settings['rendererxpos'] != self.x() or \
self.settings['rendererypos'] != self.y():
# The saved delta is incorrect, update it
self.settings['xdelta'] = self.settings['rendererxpos'] - self.x() + \
self.settings['xdelta']
self.settings['ydelta'] = self.settings['rendererypos'] - self.y() + \
self.settings['ydelta']
self.settings['rendererxpos'] = self.x()
self.settings['rendererypos'] = self.y()
self.__initialisation = False
def close(self):
"""Overwritten close method. Called when the IDE is closed"""
self.__ideClosing = True
while self.__widgets.count() > 0:
self.__widgets.removeWidget(self.__widgets.widget(0))
QMainWindow.close(self)
def __onTabClosed(self, tabUUID):
"""Triggered when the editor tab is closed"""
for index in range(self.__widgets.count()):
if self.__widgets.widget(index).objectName() == tabUUID:
self.__widgets.removeWidget(self.__widgets.widget(index))
self.updateCurrent()
return
def __onCurrentTabChanged(self, index):
"""Triggered when the current tab is changed"""
del index # :nused argument
self.updateCurrent()
def __onTextEditorTabAdded(self, index):
"""Triggered when a new text editor window was added"""
widget = self.em.widget(index)
if widget.getType() == MainWindowTabWidgetBase.PlainTextEditor:
self.__registerWidget(widget)
self.updateCurrent()
def __onFileTypeChanged(self, fname, uuid, mime):
"""Triggered when a file type is changed"""
for index in range(self.__widgets.count()):
if self.__widgets.widget(index).objectName() == uuid:
self.updateCurrent()
return
def __onBufferSavedAs(self, fname, uuid):
"""Triggered when the file was saved under another name"""
for index in range(self.__widgets.count()):
if self.__widgets.widget(index).objectName() == uuid:
self.updateCurrent()
return
def __onFileUpdated(self, fname, uuid):
"""Triggered when the file is overwritten"""
for index in range(self.__widgets.count()):
if self.__widgets.widget(index).objectName() == uuid:
self.updateCurrent()
return
def updateCurrent(self):
"""Updates the window title and switches to the proper widget"""
widget = self.em.widget(self.em.currentIndex())
if widget is None:
# May happened when there are no widgets in the em
return
widgetType = widget.getType()
if widgetType == MainWindowTabWidgetBase.PlainTextEditor:
editor = widget.getEditor()
isPython = editor.isPythonBuffer()
isMarkdown = editor.isMarkdownBuffer()
if isPython or isMarkdown:
title = 'Floating renderer: '
if isPython:
title += 'python buffer ('
else:
title += 'markdown buffer ('
title += widget.getShortName() + ')'
self.setWindowTitle(title)
uuid = widget.getUUID()
for index in range(self.__widgets.count()):
if self.__widgets.widget(index).objectName() == uuid:
self.__widgets.setCurrentIndex(index)
break
return
# Not python, not markdown, i.e. no renderer
self.__widgets.setCurrentIndex(0)
self.setWindowTitle('Floating renderer: no renderer for the current tab')<|fim▁end|> | """Detached renderer window"""
|
<|file_name|>material.ts<|end_file_name|><|fim▁begin|>import { css, ThemedStyledProps } from 'styled-components';
import { ripple } from '../../utils/animations';
import { Theme } from '../../themes/default';
import { StyleProps } from './default';
export const style = ({
theme,
main,
}: ThemedStyledProps<StyleProps, Theme>) => css`
display: flex;
flex: 0 0 1;
padding-left: 1px;
background-color: ${theme.base01};
width: 100%;
overflow: hidden;
${!main &&
`
border-top: 1px solid ${theme.base01};
border-bottom: 1px solid ${theme.base02};
`}
> div {
display: flex;
align-items: flex-end;
flex-wrap: nowrap;
button {
background-color: ${theme.base01};
color: ${theme.base07};
min-height: 30px;
padding: 0 2em;
${main && 'text-transform: uppercase;'}
cursor: pointer;<|fim▁hole|> text-align: center;
overflow: hidden;
outline: 0;
transition: all 0.5s;
&:hover,
&:focus {
border-bottom: 2px solid ${theme.base03};
color: ${theme.base04};
}
&.collapsed {
display: none;
}
${ripple(theme)}
}
> [data-selected] {
border-bottom: 2px solid ${theme.base0D};
}
}
> div:nth-child(2) {
display: block;
z-index: 10;
button {
display: block;
background: ${theme.base00};
width: 100%;
}
}
`;<|fim▁end|> | border: none;
border-bottom: 2px solid transparent; |
<|file_name|>lachas.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | showWord(["n. "," aktivite pou al chase bèt sovaj pou vyann nan osnon pou po. Mwen pa janm al lachas, paske mwen pa gen kè pou mwen touye okenn bèt."
]) |
<|file_name|>Counter.py<|end_file_name|><|fim▁begin|>"""Counter.py
Part of the AQuA Cesium Controller software package
author=Martin Lichtman
created=2013-10-19
modified>=2015-05-11
This file holds everything to model a National Instruments DAQmx counter.
It communicated to LabView via the higher up LabView(Instrument) class.
Saving of returned data is handled in the LabView class.
"""
from __future__ import division
__author__ = 'Martin Lichtman'
import logging
import numpy as np
from atom.api import Str, Float, Typed, Member, Bool, Int, List
from cs_instruments import Instrument
from instrument_property import Prop, ListProp
from analysis import AnalysisWithFigure
from sklearn import mixture
from scipy.optimize import curve_fit
from scipy.special import erf
import matplotlib.gridspec as gridspec
import time
logger = logging.getLogger(__name__)
gs = gridspec.GridSpec(2, 2)
gmix = mixture.GaussianMixture(n_components=2)
class Counters(Instrument):
version = '2015.05.11'
counters = Typed(ListProp)
def __init__(self, name, experiment, description=''):
super(Counters, self).__init__(name, experiment, description)
# start with a blank list of counters
self.counters = ListProp('counters', experiment, listElementType=Counter, listElementName='counter')
self.properties += ['version', 'counters']
class Counter(Prop):
"""Each individual counter has a field for the signal source, clock source, and clock rate (in Hz,
used only for internal clocking).
"""
counter_source = Str()
clock_source = Str()
clock_rate = Float()
def __init__(self, name, experiment, description=''):
super(Counter, self).__init__(name, experiment, description)
self.properties += ['counter_source', 'clock_source', 'clock_rate']
class CounterAnalysis(AnalysisWithFigure):
counter_array = Member()
binned_array = Member()
meas_analysis_path = Str()
meas_data_path = Str()
iter_analysis_path = Str()
update_lock = Bool(False)
iterationonly = Bool(False)
enable = Bool()
drops = Int(3)
bins = Int(25)
shots = Int(2)
ROIs = List([0])
graph_roi = Int(0)
def __init__(self, name, experiment, description=''):
super(CounterAnalysis, self).__init__(name, experiment, description)
self.meas_analysis_path = 'analysis/counter_data'
self.meas_data_path = 'data/counter/data'
self.iter_analysis_path = 'shotData'
self.properties += ['enable', 'drops', 'bins', 'shots', 'graph_roi','draw_fig','iterationonly']
def preIteration(self, iterationResults, experimentResults):
self.counter_array = []
self.binned_array = None
def format_data(self, array):
"""Formats raw 2D counter data into the required 4D format.
Formats raw 2D counter data with implicit stucture:
[ # counter 0
[ dropped_bins shot_time_series dropped_bins shot_time_series ... ],
# counter 1
[ dropped_bins shot_time_series dropped_bins shot_time_series ... ]
]
into the 4D format expected by the subsequent analyses"
[ # measurements, can have different lengths run-to-run
[ # shots array, fixed size
[ # roi list, shot 0
[ time_series_roi_0 ],
[ time_series_roi_1 ],
...
],
[ # roi list, shot 1
[ time_series_roi_0 ],
[ time_series_roi_1 ],
...
],
...
],
...
]
"""
rois, bins = array.shape[:2]
bins_per_shot = self.drops + self.bins # self.bins is data bins per shot
# calculate the number of shots dynamically
num_shots = int(bins/(bins_per_shot))
# calculate the number of measurements contained in the raw data
# there may be extra shots if we get branching implemented
num_meas = num_shots//self.shots
# build a mask for removing valid data
shot_mask = ([False]*self.drops + [True]*self.bins)
good_shots = self.shots*num_meas
# mask for the roi
ctr_mask = np.array(shot_mask*good_shots + 0*shot_mask*(num_shots-good_shots), dtype='bool')
# apply mask a reshape partially
array = array[:, ctr_mask].reshape((rois, num_meas, self.shots, self.bins))
array = array.swapaxes(0, 1) # swap rois and measurement axes
array = array.swapaxes(1, 2) # swap rois and shots axes
return array
def analyzeMeasurement(self, measurementResults, iterationResults, experimentResults):
if self.enable:
'''# number of shots is hard coded right now
bins_per_shot = self.drops + self.bins
num_shots = int(len(self.counter_array[-1])/bins_per_shot)
#if self.draw_fig:
# print "Number of shots: {}".format(num_shots)
# print "Bins per shot: {}".format(bins_per_shot)
# print "Length of counter array: {}".format(int(len(self.counter_array[-1])))
# counter array is appended every measurement so the counter hists can be calculated
# updated every cycle
# WARNING: counter_array only works with a single counter right now
self.binned_array = np.array([
self.counter_array[:, s*bins_per_shot + self.drops:(s+1)*bins_per_shot].sum(1)
for s in range(num_shots)
])'''
# MFE 2018/01: this analysis has been generalized such that multiple sub measurements can occur
# in the same traditional measurement
array = measurementResults[self.meas_data_path][()]
try:
# package data into an array with shape (sub measurements, shots, counters, time series data)
array = self.format_data(array)
# flatten the sub_measurements by converting top level to normal list and concatentating
self.counter_array += list(array)
except ValueError:
errmsg = "Error retrieving counter data. Offending counter data shape: {}"
logger.exception(errmsg.format(array.shape))
except:
logger.exception('Unhandled counter data exception')
# write this cycle's data into hdf5 file so that the threshold analysis can read it
# when multiple counter support is enabled, the ROIs parameter will hold the count
# Note the constant 1 is for the roi column parameter, all counters get entered in a single row
n_meas, n_shots, n_rois, bins = array.shape
sum_array = array.sum(axis=3).reshape((n_meas, n_shots, n_rois, 1))
measurementResults[self.meas_analysis_path] = sum_array
# put the sum data in the expected format for display
if self.binned_array is None:
self.binned_array = np.array([sum_array.reshape((n_meas, n_shots, n_rois))])
else:
self.binned_array = np.concatenate((
self.binned_array,
[sum_array.reshape((n_meas, n_shots, n_rois))]
))
if not self.iterationonly:
self.updateFigure()
def analyzeIteration(self, iterationResults, experimentResults):
if self.enable:
# recalculate binned_array to get rid of cut data
# iterationResults[self.iter_analysis_path] = self.binned_array
meas = map(int, iterationResults['measurements'].keys())
meas.sort()
path = 'measurements/{}/' + self.meas_analysis_path
try:
res = np.array([iterationResults[path.format(m)] for m in meas])
except KeyError:
# I was having problem with the file maybe not being ready
logger.warning("Issue reading hdf5 file. Waiting then repeating.")
time.sleep(0.1) # try again in a little
res = []
for m in meas:
try:
res.append(iterationResults[path.format(m)])
except KeyError:
msg = (
"Reading from hdf5 file during measurement `{}`"
" failed."
).format(m)
logger.exception(msg)
res = np.array(res)
total_meas = len(self.binned_array)
# drop superfluous ROI_columns dimension
self.binned_array = res.reshape(res.shape[:4])
logger.info('cut data: {}'.format(total_meas -
len(self.binned_array)))
iterationResults[self.iter_analysis_path] = self.binned_array
if self.iterationonly:
self.updateFigure()
return
def updateFigure(self):
if self.draw_fig:
if self.enable:
if not self.update_lock:
try:
self.update_lock = True
# There are two figures in an AnalysisWithFigure. Draw to the offscreen figure.
fig = self.backFigure
# Clear figure.
fig.clf()
# make one plot
# Single shot
ax = fig.add_subplot(221)
# PREVIOUS HYBRID VERSION. COMMENTING OUT IN CASE IT IS NEEDED.
# Drop first 3 bins
'''bins_per_shot = self.drops + self.bins
num_shots = int(len(self.counter_array[-1])/bins_per_shot)
dropped_array = self.counter_array[:, self.drops:self.drops+self.bins]
for i in range(1,num_shots):
dropped_array=np.append(dropped_array,self.counter_array[:, self.drops*(i+1)+self.bins*i:self.drops*i+self.bins*(i+1)],axis=1)
ax.bar(np.arange(len(dropped_array[-1])), dropped_array[-1])
ax.set_title('Shot: {}'.format(len(self.counter_array)))#Singlt shot
ax = fig.add_subplot(222)
#ax.bar(np.arange(len(self.counter_array[-1, self.drops:])), self.counter_array[:, self.drops:].mean(0))
ax.bar(np.arange(len(dropped_array[-1])), dropped_array.mean(0))
ax.set_title('Iteration average') #Average over all shots/iteration
ax = fig.add_subplot(223)
ax.plot(self.binned_array.transpose(),'.')
#ax.legend(['shot 1', 'shot 2'], fontsize='small', loc=0)'''
#merge conflict
# Average over all shots/iteration
ax2 = fig.add_subplot(222)
ptr = 0
ca = np.array(self.counter_array)
for s in range(self.shots):
xs = np.arange(ptr, ptr + self.bins)
ax.bar(xs, ca[-1, s, self.graph_roi])
ax2.bar(xs, ca[:, s, self.graph_roi].mean(0))
ptr += max(1.05*self.bins, self.bins+1)
ax.set_title('Measurement: {}'.format(len(ca)))
ax2.set_title('Iteration average')
# time series of sum data
ax = fig.add_subplot(223)
# histogram of sum data
ax2 = fig.add_subplot(224)
n_shots = self.binned_array.shape[2]
legends = []
for roi in range(self.binned_array.shape[3]):
for s in range(n_shots):
ax.plot(self.binned_array[:, :, s, roi].flatten(), '.')
# bins = max + 2 takes care of the case where all entries are 0, which casues
# an error in the plot
ax2.hist(
self.binned_array[:, :, s, roi].flatten(),
bins=80,
range=(1.0,self.binned_array[:, :, s, roi].flatten().max()),
histtype='step'
)
legends.append("c{}_s{}".format(roi, s))
#end merge conflict
ax.set_title('Binned Data')
ax2.legend(legends, fontsize='small', loc=0)
super(CounterAnalysis, self).updateFigure()
except:
logger.exception('Problem in CounterAnalysis.updateFigure()')
finally:
self.update_lock = False
class CounterHistogramAnalysis(AnalysisWithFigure):
'''
Takes in shot data, generates histograms, fits histograms,
and then plots various attributes as a function of iteration along with histograms with fit overplotted.
'''
# =====================Fit Functions================= #
def intersection(self, A0,A1,m0,m1,s0,s1):
return (m1*s0**2-m0*s1**2-np.sqrt(s0**2*s1**2*(m0**2-2*m0*m1+m1**2+2*np.log(A0/A1)*(s1**2-s0**2))))/(s0**2-s1**2)
def area(self,A0,A1,m0,m1,s0,s1):
return np.sqrt(np.pi/2)*(A0*s0+A0*s0*erf(m0/np.sqrt(2)/s0)+A1*s1+A1*s1*erf(m1/np.sqrt(2)/s1))
# Normed Overlap for arbitrary cut point
def overlap(self,xc,A0,A1,m0,m1,s0,s1):
err0=A0*np.sqrt(np.pi/2)*s0*(1-erf((xc-m0)/np.sqrt(2)/s0))
err1=A1*np.sqrt(np.pi/2)*s1*(erf((xc-m1)/np.sqrt(2)/s1)+erf(m1/np.sqrt(2)/s1))
return (err0+err1)/self.area(A0,A1,m0,m1,s0,s1)
# Relative Fraction in 1
def frac(self, A0,A1,m0,m1,s0,s1):
return 1/(1+A0*s0*(1+erf(m0/np.sqrt(2)/s0))/A1/s1/(1+erf(m1/np.sqrt(2)/s1)))
def dblgauss(self, x,A0,A1,m0,m1,s0,s1):
return A0*np.exp(-(x-m0)**2 / (2*s0**2)) + A1*np.exp(-(x-m1)**2 / (2*s1**2))
# ==================================================== #
update_lock = Bool(False)
enable = Bool(False)
hbins = Int(30)
hist1 = None
hist2 = None
def __init__(self, name, experiment, description=''):
super(CounterHistogramAnalysis, self).__init__(name, experiment, description)
self.properties += ['enable']
def preExperiment(self, experimentResults):
# self.hist_rec = np.recarray(1,)
return
def analyzeMeasurement(self, measurementResults, iterationResults, experimentResults):
return
def analyzeIteration(self, iterationResults, experimentResults):
if self.enable:
histout = [] # amplitudes, edges
# Overlap, fraction, cutoff
fitout = np.recarray(2, [('overlap', float), ('fraction', float), ('cutoff', float)])
optout = np.recarray(2, [('A0', float), ('A1', float), ('m0', float), ('m1', float), ('s0', float), ('s1', float)])
shots = iterationResults['shotData'][()]
# make shot number the primary axis
shots = shots.reshape(-1, *shots.shape[2:]).swapaxes(0, 1)
shots = shots[:, :, 0] # pick out first roi only
hbins = self.hbins
if self.hbins < 0:
hbins = np.arange(np.max(shots)+1)
for i in range(shots.shape[0]):
gmix.fit(np.array([shots[i]]).transpose())
h = np.histogram(shots[i], bins=hbins, normed=True)
histout.append((h[1][:-1], h[0]))
est = [
gmix.weights_.max()/10,
gmix.weights_.min()/10,
gmix.means_.min(),
gmix.means_.max(),
np.sqrt(gmix.means_.min()),
np.sqrt(gmix.means_.max())
]
try:
popt, pcov = curve_fit(self.dblgauss, h[1][1:], h[0], est)
# popt=[A0,A1,m0,m1,s0,s1] : Absolute value
popt = np.abs(popt)
xc = self.intersection(*popt)
if np.isnan(xc):
logger.warning('Bad Cut on Shot: {}'.format(i))
fitout[i] = np.nan, np.nan, np.nan
optout[i] = popt*np.nan
else:
fitout[i] = self.overlap(xc, *popt), self.frac(*popt), xc
optout[i] = popt
except (RuntimeError, RuntimeWarning, TypeError):
logger.exception('Bad fit on Shot: {} '.format(i))
fitout[i] = np.nan, np.nan, np.nan
optout[i] = np.ones(6)*np.nan
iterationResults['analysis/dblGaussPopt'] = optout
iterationResults['analysis/dblGaussFit'] = fitout
logger.info("histout: {}".format(histout))
iterationResults['analysis/histogram'] = np.array(histout,
dtype='uint32')
self.updateFigure(iterationResults)
return
def updateFigure(self, iterationResults):
if self.draw_fig:
if self.enable:
if not self.update_lock:
try:
self.update_lock = True
# There are two figures in an AnalysisWithFigure. Draw to the offscreen figure.
fig = self.backFigure
# Clear figure.
fig.clf()
shots = iterationResults['shotData'][()]
# flatten sub-measurement dimension
# make shot number the primary axis (not measurement)
shots = shots.reshape(-1, *shots.shape[2:]).swapaxes(0, 1)
roi = 0
shots = shots[:, :, roi] # pick out first roi only
popts = iterationResults['analysis/dblGaussPopt']
# fits = iterationResults['analysis/dblGaussFit']
# make one plot
for i in range(len(shots)):
<|fim▁hole|> hbins = np.arange(np.max(shots[i, :])+1)
h = ax.hist(shots[i], bins=hbins, histtype='step', normed=True)
ax.plot(h[1][1:]-.5, self.dblgauss(h[1][1:], *popts[i]))
if i == 1:
ax.set_yscale('log', nonposy='clip')
ax.set_ylim(10**int(-np.log10(len(shots[i]))-1), 1)
else:
ax.set_ylim(0, 1.05*np.max(h[0]))
super(CounterHistogramAnalysis, self).updateFigure()
except:
logger.exception('Problem in CounterHistogramAnalysis.updateFigure().')
finally:
self.update_lock = False<|fim▁end|> | ax = fig.add_subplot('{}1{}'.format(len(shots), 1+i))
hbins = self.hbins
if self.hbins < 0:
# use explicit bins
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import<|fim▁hole|>from .read_only import ReadOnlyCell<|fim▁end|> | # Copyright (c) 2010-2017 openpyxl
from .cell import Cell, WriteOnlyCell |
<|file_name|>test_segmenttail.py<|end_file_name|><|fim▁begin|>'''<|fim▁hole|>
from mock import Mock
from fakelargefile.segmenttail import OverlapSearcher
def test_index_iter_stop():
os = OverlapSearcher("asdf")
segment = Mock()
segment.start = 11
try:
os.index_iter(segment, stop=10).next()
except ValueError:
assert True
else:
assert False<|fim▁end|> | Created on Nov 10, 2014
@author: lauritz
''' |
<|file_name|>helpers.cpp<|end_file_name|><|fim▁begin|>#include <string.h>
#include <sys/time.h>
#include <sys/resource.h>
///////////////////////////////////////
// Function: trim - Remove "\n" //
///////////////////////////////////////
int trim(char *line) {
int end_pos = strlen(line) - 1;
if (line[end_pos] == '\n') {
line[end_pos] = '\0';
return 1;
}
return 0;
}
///////////////////////////////////////////////////////<|fim▁hole|>long get_time_cpu() {
struct rusage ru;
getrusage(RUSAGE_SELF, &ru);
return ru.ru_utime.tv_sec;
}
///////////////////////////////////////////////////////
// Function: get_time - Get time //
///////////////////////////////////////////////////////
long get_time() {
struct timeval tv;
gettimeofday(&tv, NULL);
return tv.tv_sec;
}
/////////////////////////////////////////
// Function: count_digit - count digit //
/////////////////////////////////////////
int count_digit(long num) {
int digit = 1;
int quotient;
quotient = int(num / 10);
while (quotient != 0) {
digit ++;
quotient = int(quotient / 10);
}
return digit;
}
//////////////////////////////////////////////////////
// Function: revcomp - convert to reverse complement//
//////////////////////////////////////////////////////
void revcomp(char* str) {
int i, len;
char c;
len = strlen(str);
for(i=0; i<len/2; i++) {
c = str[i];
str[i] = str[len-i-1];
str[len-i-1] = c;
}
for(i=0; i<len; i++) {
if (str[i] == 'A') {
str[i] = 'T';
} else if (str[i] == 'T') {
str[i] = 'A';
} else if (str[i] == 'G') {
str[i] = 'C';
} else if (str[i] == 'C') {
str[i] = 'G';
}
}
}<|fim▁end|> | // Function: get_time_cpu - Get CPU time //
///////////////////////////////////////////////////////
|
<|file_name|>animation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS transitions and animations.
use clock_ticks;
use flow::{self, Flow};
use gfx::display_list::OpaqueNode;
use incremental::{self, RestyleDamage};
use layout_task::{LayoutTask, LayoutTaskData};
use msg::constellation_msg::{AnimationState, Msg, PipelineId};
use script::layout_interface::Animation;
use script_traits::ConstellationControlMsg;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::sync::Arc;
use std::sync::mpsc::Sender;
use style::animation::{GetMod, PropertyAnimation};
use style::properties::ComputedValues;
/// Inserts transitions into the queue of running animations as applicable for the given style
/// difference. This is called from the layout worker threads.
pub fn start_transitions_if_applicable(new_animations_sender: &Sender<Animation>,
node: OpaqueNode,
old_style: &ComputedValues,
new_style: &mut ComputedValues) {
for i in 0..new_style.get_animation().transition_property.0.len() {
// Create any property animations, if applicable.
let property_animations = PropertyAnimation::from_transition(i, old_style, new_style);
for property_animation in property_animations {
// Set the property to the initial value.
property_animation.update(new_style, 0.0);
// Kick off the animation.
let now = clock_ticks::precise_time_s();
let animation_style = new_style.get_animation();
let start_time =
now + (animation_style.transition_delay.0.get_mod(i).seconds() as f64);
new_animations_sender.send(Animation {
node: node.id(),
property_animation: property_animation,
start_time: start_time,
end_time: start_time +
(animation_style.transition_duration.0.get_mod(i).seconds() as f64),
}).unwrap()
}
}
}
/// Processes any new animations that were discovered after style recalculation.
/// Also expire any old animations that have completed.
pub fn update_animation_state(rw_data: &mut LayoutTaskData, pipeline_id: PipelineId) {
let mut new_running_animations = Vec::new();
while let Ok(animation) = rw_data.new_animations_receiver.try_recv() {
new_running_animations.push(animation)
}
let mut running_animations_hash = (*rw_data.running_animations).clone();
// Expire old running animations.
let now = clock_ticks::precise_time_s();
let mut keys_to_remove = Vec::new();
for (key, running_animations) in &mut running_animations_hash {
running_animations.retain(|running_animation| {
now < running_animation.end_time
});
if running_animations.len() == 0 {
keys_to_remove.push(*key);
}
}
for key in keys_to_remove {
running_animations_hash.remove(&key).unwrap();
}
// Add new running animations.
for new_running_animation in new_running_animations {
match running_animations_hash.entry(OpaqueNode(new_running_animation.node)) {
Entry::Vacant(entry) => {
entry.insert(vec![new_running_animation]);
}
Entry::Occupied(mut entry) => entry.get_mut().push(new_running_animation),
}
}
rw_data.running_animations = Arc::new(running_animations_hash);<|fim▁hole|> } else {
animation_state = AnimationState::AnimationsPresent;
}
rw_data.constellation_chan
.0
.send(Msg::ChangeRunningAnimationsState(pipeline_id, animation_state))
.unwrap();
}
/// Recalculates style for a set of animations. This does *not* run with the DOM lock held.
pub fn recalc_style_for_animations(flow: &mut Flow,
animations: &HashMap<OpaqueNode, Vec<Animation>>) {
let mut damage = RestyleDamage::empty();
flow.mutate_fragments(&mut |fragment| {
if let Some(ref animations) = animations.get(&OpaqueNode(fragment.node.id())) {
for animation in *animations {
let now = clock_ticks::precise_time_s();
let mut progress = (now - animation.start_time) / animation.duration();
if progress > 1.0 {
progress = 1.0
}
if progress <= 0.0 {
continue
}
let mut new_style = fragment.style.clone();
animation.property_animation.update(&mut *Arc::make_mut(&mut new_style),
progress);
damage.insert(incremental::compute_damage(&Some(fragment.style.clone()),
&new_style));
fragment.style = new_style
}
}
});
let base = flow::mut_base(flow);
base.restyle_damage.insert(damage);
for kid in base.children.iter_mut() {
recalc_style_for_animations(kid, animations)
}
}
/// Handles animation updates.
pub fn tick_all_animations(layout_task: &LayoutTask, rw_data: &mut LayoutTaskData) {
layout_task.tick_animations(rw_data);
layout_task.script_chan
.send(ConstellationControlMsg::TickAllAnimations(layout_task.id))
.unwrap();
}<|fim▁end|> |
let animation_state;
if rw_data.running_animations.is_empty() {
animation_state = AnimationState::NoAnimationsPresent; |
<|file_name|>file-upload.js<|end_file_name|><|fim▁begin|>const mtype = require('@lib/mediatype');
const model = require('@lib/model');
const fs = require('fs');
const mcdir = require('@lib/mcdir');
module.exports = function(r) {
const db = require('../db')(r);
async function addFileToDirectoryInProject(fileToUpload, directoryId, projectId, userId) {
let fileEntry = {
// Create the id for the file being uploaded as this will determine
// the location of the uploaded file in our object store.
id: await r.uuid(),
name: fileToUpload.name,
checksum: fileToUpload.hash,
mediatype: mtype.mediaTypeDescriptionsFromMime(fileToUpload.type),
size: fileToUpload.size,
path: fileToUpload.path,
owner: userId,
parentId: '',
};
let file = await getFileByNameInDirectory(fileToUpload.name, directoryId);
if (!file) {
// There is no existing file with this name in the directory.
fileEntry.usesid = await findMatchingFileIdByChecksum(fileEntry.checksum);
return await loadNewFileIntoDirectory(fileEntry, directoryId, projectId);
} else if (file.checksum !== fileEntry.checksum) {
// There is an existing file in the directory but it has a different
// checksum, so we have to do a little book keeping to make this file
// the current file, set its parent entry back to the existing, as well
// as do the usual steps for uploading a file into the object store.
fileEntry.usesid = await findMatchingFileIdByChecksum(fileEntry.checksum);
return await loadExistingFileIntoDirectory(file, fileEntry, directoryId, projectId);
} else {
// If we are here then there is a file with the same name in the directory
// and it has the same checksum. In that case there is nothing to load
// into the database as the user is attempting to upload an existing
// file (name and checksum match the existing file).
removeFile(fileEntry.path);
return file;
}
}
// getFileByNameInDirectory will return the current file in the directory
// that matches the filename. This is used to construct multiple versions
// of a file, with only one version being the current one.
async function getFileByNameInDirectory(fileName, directoryId) {
let file = await r.table('datadir2datafile').getAll(directoryId, {index: 'datadir_id'})
.eqJoin('datafile_id', r.table('datafiles')).zip()
.filter({name: fileName, current: true});
if (file) {
return file[0]; // query returns an array of 1 entry.
}
return null;
}
async function loadNewFileIntoDirectory(fileEntry, directoryId, projectId) {
await addToObjectStore(fileEntry);
return await createFile(fileEntry, directoryId, projectId);
}
async function loadExistingFileIntoDirectory(parent, fileEntry, directoryId, projectId) {
await addToObjectStore(fileEntry);
fileEntry.parentId = parent.id;
let created = await createFile(fileEntry, directoryId, projectId);
// Parent is no longer the current file
await r.table('datafiles').get(parent.id).update({current: false});
return created;
}
async function addToObjectStore(fileEntry) {
if (fileEntry.usesid === '') {
// This is a brand new file so move into the object store.
await mcdir.moveIntoStore(fileEntry.path, fileEntry.id);
} else {
// There is already a file in the store with the same checksum
// so delete uploaded file.
removeFile(fileEntry.path);<|fim▁hole|> // was uploaded with the name checksum or "" if there is no match.
async function findMatchingFileIdByChecksum(checksum) {
let matching = await r.table('datafiles').getAll(checksum, {index: 'checksum'});
if (matching.length) {
// Multiple entries have been found that have the same checksum. In the database
// a file has a usesid which points to the original entry that was first uploaded
// with the matching checksum. So, we take the first entry in the list, it is
// either this original upload, or a file with a usesid that points to the original
// upload. We can determine this by checking if usesid === "". If it is return the
// id, otherwise return the usesid.
return matching[0].usesid === '' ? matching[0].id : matching[0].usesid;
}
// If we are here then there was no match found, so just return "" to signify no match.
return '';
}
function removeFile(path) {
try {
fs.unlinkSync(path);
} catch (e) {
return false;
}
}
async function createFile(fileEntry, directoryId, projectId) {
let file = new model.DataFile(fileEntry.name, fileEntry.owner);
file.mediatype = fileEntry.mediatype;
file.size = fileEntry.size;
file.uploaded = file.size;
file.checksum = fileEntry.checksum;
file.usesid = fileEntry.usesid;
file.id = fileEntry.id;
file.parent = fileEntry.parentId ? fileEntry.parentId : '';
let created = await db.insert('datafiles', file);
await addFileToDirectory(created.id, directoryId);
await addFileToProject(created.id, projectId);
return created;
}
async function addFileToDirectory(fileId, directoryId) {
const dd2df = new model.DataDir2DataFile(directoryId, fileId);
await r.table('datadir2datafile').insert(dd2df);
}
async function addFileToProject(fileId, projectId) {
let p2df = new model.Project2DataFile(projectId, fileId);
await r.table('project2datafile').insert(p2df);
}
return {
addFileToDirectoryInProject,
createFile,
};
};<|fim▁end|> | }
}
// findMatchingFileIdByChecksum will return the id of the file that |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>def main(i):
ins = i.split("\n")
arr = [int(i) for i in ins]
index =0
iterations = 0
while (index>=0 and index < len(arr)):
arr[index], index = arr[index] + 1, index + arr[index];
iterations = iterations+1
print(iterations)
def main2(i):
ins = i.split("\n")
arr = [int(i) for i in ins]
index =0
iterations = 0
while (index>=0 and index < len(arr)):
if arr[index] >= 3:
arr[index], index = arr[index] - 1, index + arr[index];
else:
arr[index], index = arr[index] + 1, index + arr[index];
iterations = iterations + 1
print(iterations)
i = """0
1
0
1
0
-1
0
1
2
2
-8
-7
-3
1
0
-2
-6
-7
-11
2
-11
0
-18
0
-18
-1
1
-16
-3
-28
-10
-6
-11
-6
-17
-20
-15
-31
-37
-34
-14
-35
-34
-17
-28
-20
-12
-41
-29
-8
-1
-50
-46
-26
-41
-33
-17
0
-28
-52
-38
-28
-29
-60
-23
-60
-55
-28
-43
-57
-66
-35
-48
-71
-25
-6
-27
-47
-77
-68
-21
2
-39
-82
-2
-59
-61
-67
-26
-11
0
-68
-85
-10
-62
-49
-28
-15
-34
-55
-92
-92
-37
-82
-49
-86
-25
-24
-81
-86
-6
-48
-79
-22
-30
-1
-63
-77
-64
-70
-86
-118
-36
-44
-50
-70
-76
-5
-72
-72
-84
-1
-104
-116
-18
-69
-78
-23
-99
-69
-32
-26
-4
-134
-22
-18
-70
-95
-13
-136
-73
-131
-24
-101
-136
-29
-132
-154
-108
-127
-48
-134
-122
-162
-2
-61
-9
-4
-126
-146
-161
-157
-116
-95
-83
-36
-86
-57
-42
-103
-73
1
0
-28
-156
-67
-178
-36
-169
-46
-16
-97
-86
-112
-186
-111
-69
-158
-37
-75
-109
-186
-16
-84
-73
-83
-139
-54
-89
-191
-126
-15
-158
-19
-116
-73
-13
-184
-121
-14
-116
-167
-174
-103
-66
-128
-156
-5
-174
-220
-213
-96
-139
-22
-102
-33
-118
-163
-184
-17
-76
-72
-96
-106
-203
-55
-181
-207
-40
-235
-139
-5
-127
-21
-155
-183
-51
-54
-38
-247
-218
-56
-34
-173
-241
-187
-38
-13
-172
-2
-235
-167
-191
-250
-150
-34
-151
-183
-119
-90
-21
-93
-275
-168
-160
-97
-100
-25
-273
-245
-44
-223
-201
-156
-12
-55
-189
-181
-10
-92
-152
-90
-217
-68
-81
-76
-86
-48
-287
-281
-63
-83
-66
-50
-49
-310
-254
-121
-294
-132
-53
-30
-223
-85
-297
-264
-58
-51
-294
-283
-3
0
-262
-33
-136
-14
-238
-6
-312
-17
-328
-299
-245
-266
-6
-330
-117
-172
-260
-224
-139
-156
-165
-13
-243
-173
-42
-67
-7
-148
-1
-105
-205
-223
-122
-82
-221
-317
-330
-240
-189
-12
-268
-243
-177
-120
-320
-127
-351
-178
-219
-351
-128
-28
-227
-188
-195
-205
-204
-283
-316
-276
-319
-312
-337
-318
-136
-33
-307
-397
-387
-303
-12
-347
-112
-171
-222
-358
-215
-71
-99
-108
-24
-291
-344
-97
-99
-6
-270
-327
-32
-387
-402
-13
-175
-243
-374
-422
-382
-152
-420
-266
-326
-37
-215
-357
-423
-16
-272
-357
-87
-184
-21
-351
-300
-219
-390
-12
-15
-78
-69
-35
-308
-303
-300
-265
-440
-19
-117
-87
-218
-163
-317
-42
-55
-185
-245
-196
-183
-327
-467
-102
-432
-162
-202
-39
-179
-301
-237
-299
-33
-198
-127
-138
-454
-46
-87
-362
-448
-382
-42
-358
-475
-350
-50
-380
-316
-380
-463
-108
-405
-139
-480
-30
-212
-308
-239
-223
-306
-81
-89
-172
-304
-87
-380
-394
-507
-392
-98
-403
-155
-13
-197
-66
-244
-401
-278
-391
-64
-460
-368
-178
-145
-440
-49
-369
-418
-332
-200
-294
-495
-104
-5
-261
-168
-392
-230
-154
-472
-404
-472
-307
-256
-169
-330
-500
-365
-146
-133
-84
-336
-405
-555
-74
-68
-354
-552
-108
-80
-406
-164
-119
-487
-151
-113
-244
-471
-80
-312
-495
-556
-76
-24
-546
-493
-340
-464
-328
-7
-474
-246
-237
-40
-199
-346
-330
-139
-284
-435
-83
-210
-423
-361
-56
-271
-140
-162
-232
-391
-42
-99
-590
2
-271
-101
-114
-117
-310
-502
-287
-319
-323
-362
-551
-439
-533
-183
-404
-401
-343
-36
-89
-454
-128
-611
-6
-619
-110
-389
-290
-270
-375
-283
-472
-65
-195
-129
-61
-548
-151
-74
-612
-156
-371
-42
-447
-565
-394
-550
-476
-592
-262
-96
-529
-395
-204
-491
-167
-186
-527
-508
-245
-455
-552
-672
-338
-269
-104
-240
-77
-303
-227
-453
-126
-294
-572
-8
-527
-361
-438
-457
-513
-560
-442
-649
-321
-123
-52
-166
-320
-301
-570
-684
-325
-515
-547
-52
-221
-488
-182
-618
-109
-497
-167
-288
-358
-334
-313
-288
-102
-409
-143
-204
-216
-681
-512
-245
-301
-35
-262
-239
-405
-682
-715
-438
-314
-179
-611
-667
-622
-511
-463
-370
-338
-434
-580
-637
-201
-213
-357
-443
-382
-315
-483
-399
-624
-318
-226
-652
-638
-743
-330
-647
-146
-138
-698
-511
-173
-663
-333
-564
-160
-239
-243
-91
-65
-468
-256
-197
-210
-575
-420
-715
-681
-454
-226
-226
-339
-473
-737
-62
-149
-351
-770
-313
-216
-491
-511
-269
-628
-391
-429
-110
-199
-409
-516
-7
-433
-405
-792
-685
-615
-287
-385
-627
-527
-426
-626
-164
-767
-794
-115
-483
-323
-371
-679
-772
-808
-2
-16
-459
-749
-569
-139
-7
-555
-161
-613
-230
-771
-825
-241
-579
-710
-73
-790
-653
-655
-394
-218
-711
-467
-774
-694
-664
-357
-29
-121
-643
-742
-388
-633
-440
-755
-581
-661
-653
-536
-596
-10
-796
-230
-813
-125
-540
-584
-389
-144
-346
-213
-444
-205
-712
-651
-670
-139
-60
-620
-49
-284
-212
-452
-520
-243
-356
-348
-442
-585
-202
-207
-222
-47
-49
-408
-571
-154
-695
-802
-524
-523
-617
-615
-571
-92
-344
-675
-613
-759
-29
-833
-662
-223
-46
-156
-373
-412
-848
-93
-695
-250
-810
-477
-150
-282
-789
-193
-443
-193
-159
-840
-755
-508
-404
-307
-80
-320
-14<|fim▁hole|>-552
-323
-366
-45
-16
-335
-852
-46
-459
-461
-537
-547
-180
-842
-213
-447
-712
-633
-362
-953
-407
-47
0
-466
-107
-648
-528
-413
-828
-217
-484
-969
-121
-858
-208
-618
-384
-16
-91
-662
-348
-675
-63
-713
-966
-678
-293
-827
-445
-387
-212
-763
-847
-756
-299
-443
-80
-286
-954
-521
-394
-357
-861
-530
-649
-671
-437
-884
-606
-73
-452
-354
-729
-927
-248
-2
-738
-521
-440
-435
-291
-104
-402
-375
-875
-686
-812
-539
-934
-536
-924
-924
-365"""
# i = """0
# 3
# 0
# 1
# -3"""
main(i)
main2(i)<|fim▁end|> | -245
-746
-610
-855 |
<|file_name|>xensm.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import flags
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import volumeops
import nova.volume.driver
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
class XenSMDriver(nova.volume.driver.VolumeDriver):
def _convert_config_params(self, conf_str):
params = dict([item.split("=") for item in conf_str.split()])
return params
def _get_introduce_sr_keys(self, params):
if 'name_label' in params:
del params['name_label']
keys = params.keys()
keys.append('sr_type')
return keys
def _create_storage_repo(self, context, backend_ref):
"""Either creates or introduces SR on host
depending on whether it exists in xapi db."""
params = self._convert_config_params(backend_ref['config_params'])
if 'name_label' in params:
label = params['name_label']
del params['name_label']
else:
label = 'SR-' + str(backend_ref['id'])
params['sr_type'] = backend_ref['sr_type']
if backend_ref['sr_uuid'] is None:
# run the sr create command
try:
LOG.debug(_('SR name = %s') % label)
LOG.debug(_('Params: %s') % str(params))
sr_uuid = self._volumeops.create_sr(label, params)
# update sr_uuid and created in db
except Exception as ex:
LOG.debug(_("Failed to create sr %s...continuing") %
str(backend_ref['id']))
msg = _('Create failed')
raise exception.VolumeBackendAPIException(data=msg)
LOG.debug(_('SR UUID of new SR is: %s') % sr_uuid)
try:
self.db.sm_backend_conf_update(context,
backend_ref['id'],
dict(sr_uuid=sr_uuid))
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to update db")
raise exception.VolumeBackendAPIException(data=msg)
else:
# sr introduce, if not already done
try:
self._volumeops.introduce_sr(backend_ref['sr_uuid'], label,
params)
except Exception as ex:
LOG.exception(ex)
LOG.debug(_("Failed to introduce sr %s...continuing")
% str(backend_ref['id']))
def _create_storage_repos(self, context):
"""Create/Introduce storage repositories at start."""
backends = self.db.sm_backend_conf_get_all(context)
for backend in backends:
try:
self._create_storage_repo(context, backend)
except Exception as ex:
LOG.exception(ex)
msg = _('Failed to reach backend %d') % backend['id']
raise exception.VolumeBackendAPIException(data=msg)
def __init__(self, *args, **kwargs):
"""Connect to the hypervisor."""
# This driver leverages Xen storage manager, and hence requires
# hypervisor to be Xen
if FLAGS.connection_type != 'xenapi':
msg = _('XenSMDriver requires xenapi connection')
raise exception.VolumeBackendAPIException(data=msg)
url = FLAGS.xenapi_connection_url
username = FLAGS.xenapi_connection_username
password = FLAGS.xenapi_connection_password
try:
session = xenapi_conn.XenAPISession(url, username, password)
self._volumeops = volumeops.VolumeOps(session)
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to initiate session")
raise exception.VolumeBackendAPIException(data=msg)
super(XenSMDriver, self).__init__(execute=utils.execute,
sync_exec=utils.execute,
*args, **kwargs)
def do_setup(self, ctxt):
"""Setup includes creating or introducing storage repos
existing in the database and destroying deleted ones."""
# TODO(renukaapte) purge storage repos
self.ctxt = ctxt
self._create_storage_repos(ctxt)
def create_volume(self, volume):
"""Creates a logical volume. Can optionally return a Dictionary of
changes to the volume object to be persisted."""
# For now the scheduling logic will be to try to fit the volume in
# the first available backend.
# TODO(renukaapte) better scheduling once APIs are in place
sm_vol_rec = None
backends = self.db.sm_backend_conf_get_all(self.ctxt)
for backend in backends:
# Ensure that storage repo exists, if not create.
# This needs to be done because if nova compute and
# volume are both running on this host, then, as a
# part of detach_volume, compute could potentially forget SR
self._create_storage_repo(self.ctxt, backend)
sm_vol_rec = self._volumeops.create_volume_for_sm(volume,
backend['sr_uuid'])
if sm_vol_rec:
LOG.debug(_('Volume will be created in backend - %d')
% backend['id'])
break
if sm_vol_rec:
# Update db
sm_vol_rec['id'] = volume['id']
sm_vol_rec['backend_id'] = backend['id']
try:
self.db.sm_volume_create(self.ctxt, sm_vol_rec)
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to update volume in db")
raise exception.VolumeBackendAPIException(data=msg)
else:
msg = _('Unable to create volume')
raise exception.VolumeBackendAPIException(data=msg)
def delete_volume(self, volume):
vol_rec = self.db.sm_volume_get(self.ctxt, volume['id'])
if not vol_rec:
raise exception.NotFound(_("Volume %s does not exist"),
volume['id'])
try:
# If compute runs on this node, detach could have disconnected SR
backend_ref = self.db.sm_backend_conf_get(self.ctxt,
vol_rec['backend_id'])
self._create_storage_repo(self.ctxt, backend_ref)
self._volumeops.delete_volume_for_sm(vol_rec['vdi_uuid'])
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to delete vdi")
raise exception.VolumeBackendAPIException(data=msg)
try:
self.db.sm_volume_delete(self.ctxt, volume['id'])
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to delete volume in db")
raise exception.VolumeBackendAPIException(data=msg)
def local_path(self, volume):
return str(volume['id'])
def undiscover_volume(self, volume):
"""Undiscover volume on a remote host."""
pass
def discover_volume(self, context, volume):
return str(volume['id'])
def check_for_setup_error(self):
pass
def create_export(self, context, volume):
"""Exports the volume."""
pass<|fim▁hole|>
def ensure_export(self, context, volume):
"""Safely, synchronously recreates an export for a logical volume."""
pass
def initialize_connection(self, volume, connector):
try:
xensm_properties = dict(self.db.sm_volume_get(self.ctxt,
volume['id']))
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to find volume in db")
raise exception.VolumeBackendAPIException(data=msg)
# Keep the volume id key consistent with what ISCSI driver calls it
xensm_properties['volume_id'] = xensm_properties['id']
del xensm_properties['id']
try:
backend_conf = self.db.sm_backend_conf_get(self.ctxt,
xensm_properties['backend_id'])
except Exception as ex:
LOG.exception(ex)
msg = _("Failed to find backend in db")
raise exception.VolumeBackendAPIException(data=msg)
params = self._convert_config_params(backend_conf['config_params'])
xensm_properties['flavor_id'] = backend_conf['flavor_id']
xensm_properties['sr_uuid'] = backend_conf['sr_uuid']
xensm_properties['sr_type'] = backend_conf['sr_type']
xensm_properties.update(params)
_introduce_sr_keys = self._get_introduce_sr_keys(params)
xensm_properties['introduce_sr_keys'] = _introduce_sr_keys
return {
'driver_volume_type': 'xensm',
'data': xensm_properties
}
def terminate_connection(self, volume, connector):
pass<|fim▁end|> |
def remove_export(self, context, volume):
"""Removes an export for a logical volume."""
pass |
<|file_name|>win_msg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Jon Hawkesworth (@jhawkesworth) <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name<|fim▁hole|>
DOCUMENTATION = r'''
---
module: win_msg
version_added: "2.3"
short_description: Sends a message to logged in users on Windows hosts.
description:
- Wraps the msg.exe command in order to send messages to Windows hosts.
options:
to:
description:
- Who to send the message to. Can be a username, sessionname or sessionid.
default: '*'
display_seconds:
description:
- How long to wait for receiver to acknowledge message, in seconds.
default: 10
wait:
description:
- Whether to wait for users to respond. Module will only wait for the number of seconds specified in display_seconds or 10 seconds if not specified.
However, if I(wait) is true, the message is sent to each logged on user in turn, waiting for the user to either press 'ok' or for
the timeout to elapse before moving on to the next user.
type: bool
default: 'no'
msg:
description:
- The text of the message to be displayed.
- The message must be less than 256 characters.
default: Hello world!
author:
- Jon Hawkesworth (@jhawkesworth)
notes:
- This module must run on a windows host, so ensure your play targets windows
hosts, or delegates to a windows host.
- Messages are only sent to the local host where the module is run.
- The module does not support sending to users listed in a file.
- Setting wait to true can result in long run times on systems with many logged in users.
'''
EXAMPLES = r'''
- name: Warn logged in users of impending upgrade
win_msg:
display_seconds: 60
msg: Automated upgrade about to start. Please save your work and log off before {{ deployment_start_time }}
'''
RETURN = r'''
msg:
description: Test of the message that was sent.
returned: changed
type: string
sample: Automated upgrade about to start. Please save your work and log off before 22 July 2016 18:00:00
display_seconds:
description: Value of display_seconds module parameter.
returned: success
type: string
sample: 10
rc:
description: The return code of the API call
returned: always
type: int
sample: 0
runtime_seconds:
description: How long the module took to run on the remote windows host.
returned: success
type: string
sample: 22 July 2016 17:45:51
sent_localtime:
description: local time from windows host when the message was sent.
returned: success
type: string
sample: 22 July 2016 17:45:51
wait:
description: Value of wait module parameter.
returned: success
type: boolean
sample: false
'''<|fim▁end|> |
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'} |
<|file_name|>tag_ports_during_bulk_creation.py<|end_file_name|><|fim▁begin|># Copyright (c) 2019 Verizon Media
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ALIAS = 'tag-ports-during-bulk-creation'
IS_SHIM_EXTENSION = True
IS_STANDARD_ATTR_EXTENSION = False
NAME = 'Tag Ports During Bulk Creation'
DESCRIPTION = 'Allow to tag ports during bulk creation'
UPDATED_TIMESTAMP = '2019-12-29T19:00:00-00:00'
RESOURCE_ATTRIBUTE_MAP = {}
SUB_RESOURCE_ATTRIBUTE_MAP = {}
ACTION_MAP = {}<|fim▁hole|>REQUIRED_EXTENSIONS = []
OPTIONAL_EXTENSIONS = []
ACTION_STATUS = {}<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from models import Post, PostForm, Department
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404, render, redirect
from django.template.loader import render_to_string
import os
from django.core.mail import send_mail
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import urllib
from timboektu.books.config import NOTIFY_THRESHOLD, DELETE_THRESHOLD
from django.db.models import Count
import sys
# TODO combine with index, optional department id
def department(request, department_id):
department = get_object_or_404(Department, pk=department_id)
return index(request, department)
def index(request, department = None):
query = request.POST.get('query')
if not query:
query = request.GET.get('query')
order_by = request.GET.get('order_by')
if not order_by:
order_by = '-crdate'
page = request.GET.get('page')
# Get posts for query
#TODO extend .order_by for case insensitivity: .extra(select={'title': 'lower(title)'})
posts = []
if query:
posts = Post.objects.query_filter(query).order_by(order_by)
else:
posts = Post.objects.all().order_by(order_by)
# Filter for department
if department:
posts = posts.filter(departments__id=department.id)
# Paging
num_per_page = 15 if query or department else 5
paginator = Paginator(posts, num_per_page)
try:
posts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
posts = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
posts = paginator.page(paginator.num_pages)
return render(request, 'index.html', {
'posts': posts,
'departments': Department.objects.annotate(my_count=Count('post')),
'current_department': department,
'query' : query,
'title_order_by' : '-title' if order_by == 'title' else 'title',
'title_order_class' : 'dec' if order_by == 'title' else 'asc' if order_by == '-title' else '',
'price_order_by' : '-price' if order_by == 'price' else 'price',
'price_order_class' : 'dec' if order_by == 'price' else 'asc' if order_by == '-price' else '',
})
def detail(request, post_id):
p = get_object_or_404(Post, pk=post_id)
email = urllib.quote(render_to_string('emails/purchase.html', {'post': p}))
subject = urllib.quote("Interest in your advertisement on TimBoekTU")
mailto = p.email + '?subject=' + subject + '&body=' + email
return render(request, 'detail.html', {'post': p, 'mailto': mailto })
def edit(request, post_hash):
p = get_object_or_404(Post, hash=post_hash)
# Update
if request.method == 'POST':
form = PostForm(request.POST, instance=p)
if form.is_valid():
p.set_isbn_int()
p.save()
return HttpResponseRedirect(reverse('timboektu.books.views.detail', kwargs={'post_id': p.id}))
# Edit
else:
form = PostForm(instance=p)
return render(request, 'edit.html', {
'form' : form,
'post' : p,
'delete' : DELETE_THRESHOLD
})
def new(request):
# Create
if request.method == 'POST':
form = PostForm(request.POST)
if form.is_valid():
p = form.save()
p.hash = (os.urandom(16)).encode('hex')
p.set_isbn_int()
# Send edit link to user
send_mail(
'TimBoekTU edit link for ' + p.title,
render_to_string('emails/edit.html', {'post' : p}),
'[email protected]',
[p.email],
fail_silently=True)
p.save()
return HttpResponseRedirect(reverse('timboektu.books.views.confirm', kwargs={'post_hash': p.hash}))
# New
else:
form = PostForm()
return render(request, 'edit.html', {
'form' : form,
'delete' : DELETE_THRESHOLD
})
def confirm(request, post_hash):
p = get_object_or_404(Post, hash=post_hash)
return render(request, 'confirm.html', {
'post' : p,
})
def renew(request, post_hash):
p = get_object_or_404(Post, hash=post_hash)
p.save() # Updates mdate, notified
return render(request, 'renew.html', {
'post' : p,
})
def delete(request):
post_hash = request.GET.get('hash')
p = get_object_or_404(Post, hash=post_hash)
p.delete()
return render(request, 'delete.html')
def about(request):
return render(request, 'about.html')
<|fim▁hole|> return render(request, 'contribute.html')
def locations(request):
return render(request, 'locations.html')<|fim▁end|> | def contribute(request): |
<|file_name|>feature-gate-const_eval_limit.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//~^ ERROR the `#[const_eval_limit]` attribute is an experimental feature [E0658]
const CONSTANT: usize = limit();
fn main() {
assert_eq!(CONSTANT, 1764);
}
const fn limit() -> usize {
let x = 42;
x * 42
}<|fim▁end|> | #![const_eval_limit="42"] |
<|file_name|>segos_mv.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
FanFilm Add-on
Copyright (C) 2016 mrknow
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse, json, base64
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://segos.es'
self.search_link = '/?search=%s'
#self.episode_link = '-Season-%01d-Episode-%01d'
def get_movie(self, imdb, title, year):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query2(title)))
query = urlparse.urljoin(self.base_link, query)
result = client.request(query)
title = cleantitle.movie(title)
result = client.parseDOM(result, 'div', attrs={'style':'overflow: hidden; margin-top: 15px;'})
result = [(
client.parseDOM(i, 'a', ret='href')[0],
client.parseDOM(i, 'a')[1],
str(re.findall(r"(\d{4})", client.parseDOM(i, 'a')[1])[0])) for i in result]
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [i for i in result if title in cleantitle.movie(i[1])]
result = [i[0] for i in result if any(x in i[2] for x in years)][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
control.log('Segos URL %s' % url)
return url
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.moviesearch_link % (urllib.unquote(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = client.source(query)
result = json.loads(result)
tvshowtitle = cleantitle.tv(tvshowtitle)
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'h2', ret='title')[0], client.parseDOM(i, 'span', attrs = {'itemprop': 'copyrightYear'})) for i in result]
result = [i for i in result if len(i[2]) > 0]
result = [i for i in result if tvshowtitle == cleantitle.tv(i[1])]
result = [i[0] for i in result if any(x in i[2][0] for x in years)][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
if url == None: return
url += self.episode_link % (int(season), int(episode))
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = client.request(url)
vtype = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Język</b>:(.*?)\.*</div>',result)[0].strip()
q = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Jakość</b>:(.*?)\.*</div>', result)[0].strip()
quality = 'SD'
if '720' in q: quality = 'HD'
if '1080' in q: quality = '1080p'
links = client.parseDOM(result, 'div', attrs={'id':'Film'})
links = [client.parseDOM(i, 'a', ret='href', attrs={'target':'_blank'})[0] for i in links]
for i in links:
try:
host = urlparse.urlparse(i).netloc
host = host.split('.')
host = host[-2]+"."+host[-1]
host = host.lower()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'provider': 'SEGOS', 'url': i, 'vtype':vtype})
except:
pass
return sources
except:<|fim▁hole|>
def resolve(self, url):
control.log('CDA-ONLINE RESOLVE URL %s' % url)
try:
url = resolvers.request(url)
return url
except:
return<|fim▁end|> | return sources |
<|file_name|>ShapefileIndexReader.py<|end_file_name|><|fim▁begin|>"""
This script exposes a class used to read the Shapefile Index format
used in conjunction with a shapefile. The Index file gives the record
number and content length for every record stored in the main shapefile.
This is useful if you need to extract specific features from a shapefile
without reading the entire file.
How to use:
from ShapefileIndexReader import ShapefileIndex
shx = ShapefileIndex(Path/To/index.shx)
shx.read()
<|fim▁hole|> 1) Path - the path given to the shapefile, if it exists
2) Offsets - an array of byte offsets for each record in the main shapefile
3) Lengths - an array of 16-bit word lengths for each record in the main shapefile
"""
import os
__author__ = 'Sean Taylor Hutchison'
__license__ = 'MIT'
__version__ = '0.1.0'
__maintainer__ = 'Sean Taylor Hutchison'
__email__ = '[email protected]'
__status__ = 'Development'
class ShapefileIndex:
Records = []
def __bytes_to_index_records(self,file_bytes):
file_length = len(file_bytes)
num_records = int((file_length - 100) / 8)
for record_counter in range(0,num_records):
byte_position = 100 + (record_counter * 8)
offset = int.from_bytes(file_bytes[byte_position:byte_position+4], byteorder='big')
length = int.from_bytes(file_bytes[byte_position+4:byte_position+8], byteorder='big')
self.Records.append([offset,length])
def read(self):
with open(self.Path, 'rb') as shpindex:
self.__bytes_to_index_records(shpindex.read())
def __init__(self, path=None):
if path and os.path.exists(path) and os.path.splitext(path)[1] == '.shx':
self.Path = path
else:
raise FileNotFoundError<|fim▁end|> | The 'shx' object will expose three properties |
<|file_name|>Shannon.py<|end_file_name|><|fim▁begin|># This script will calculate Shannon entropy from a MSA.
# Dependencies:
# Biopython, Matplotlib, Math
"""
Shannon's entropy equation (latex format):
H=-\sum_{i=1}^{M} P_i\,log_2\,P_i
Entropy is a measure of the uncertainty of a probability distribution (p1, ..... , pM)
https://stepic.org/lesson/Scoring-Motifs-157/step/7?course=Bioinformatics-Algorithms&unit=436
Where, Pi is the fraction of nuleotide bases of nuleotide base type i,
and M is the number of nuleotide base types (A, T, G or C)
H ranges from 0 (only one base/residue in present at that position) to 4.322 (all 20 residues are equally
represented in that position).
Typically, positions with H >2.0 are considerered variable, whereas those with H < 2 are consider conserved.
Highly conserved positions are those with H <1.0 (Litwin and Jores, 1992).
A minimum number of sequences is however required (~100) for H to describe the diversity of a protein family.
"""
import os
import sys
import warnings
import traceback
__author__ = "Joe R. J. Healey"
__version__ = "1.0.0"
__title__ = "ShannonMSA"
__license__ = "GPLv3"
__author_email__ = "[email protected]"
def parseArgs():
"""Parse command line arguments"""
import argparse
try:
parser = argparse.ArgumentParser(
description='Compute per base/residue Shannon entropy of a Multiple Sequence Alignment.')
parser.add_argument('-a',
'--alignment',
action='store',
required=True,
help='The multiple sequence alignment (MSA) in any of the formats supported by Biopython\'s AlignIO.')
parser.add_argument('-f',
'--alnformat',
action='store',
default='fasta',
help='Specify the format of the input MSA to be passed in to AlignIO.')
parser.add_argument('-v',
'--verbose',
action='count',
default=0,
help='Verbose behaviour, printing parameters of the script.')
parser.add_argument('-m',
'--runningmean',
action='store',
type=int,
default=0,
help='Return the running mean (a.k.a moving average) of the MSAs Shannon Entropy. Makes for slightly smoother plots. Providing the number of points to average over switches this on.')
parser.add_argument('--makeplot',
action='store_true',
help='Plot the results via Matplotlib.')
except:
print "An exception occurred with argument parsing. Check your provided options."
traceback.print_exc()
return parser.parse_args()
def parseMSA(msa, alnformat, verbose):
"""Parse in the MSA file using Biopython's AlignIO"""
from Bio import AlignIO
alignment = AlignIO.read(msa, alnformat)
# Do a little sanity checking:
seq_lengths_list = []
for record in alignment:
seq_lengths_list.append(len(record))
seq_lengths = set(seq_lengths_list)
if verbose > 0: print("Alignment length is:" + str(list(seq_lengths)))
if len(seq_lengths) != 1:
sys.stderr.write("Your alignment lengths aren't equal. Check your alignment file.")
sys.exit(1)
index = range(1, list(seq_lengths)[0]+1)
return alignment, list(seq_lengths), index
##################################################################<|fim▁hole|># Gaps and N's are included in the calculation
##################################################################
def shannon_entropy(list_input):
"""Calculate Shannon's Entropy per column of the alignment (H=-\sum_{i=1}^{M} P_i\,log_2\,P_i)"""
import math
unique_base = set(list_input)
M = len(list_input)
entropy_list = []
# Number of residues in column
for base in unique_base:
n_i = list_input.count(base) # Number of residues of type i
P_i = n_i/float(M) # n_i(Number of residues of type i) / M(Number of residues in column)
entropy_i = P_i*(math.log(P_i,2))
entropy_list.append(entropy_i)
sh_entropy = -(sum(entropy_list))
return sh_entropy
def shannon_entropy_list_msa(alignment):
"""Calculate Shannon Entropy across the whole MSA"""
shannon_entropy_list = []
for col_no in xrange(len(list(alignment[0]))):
list_input = list(alignment[:, col_no])
shannon_entropy_list.append(shannon_entropy(list_input))
return shannon_entropy_list
def plot(index, sel, verbose):
""""Create a quick plot via matplotlib to visualise the extended spectrum"""
import matplotlib.pyplot as plt
if verbose > 0: print("Plotting data...")
plt.plot(index, sel)
plt.xlabel('MSA Position Index', fontsize=16)
plt.ylabel('Shannon Entropy', fontsize=16)
plt.show()
def running_mean(l, N):
sum = 0
result = list(0 for x in l)
for i in range( 0, N ):
sum = sum + l[i]
result[i] = sum / (i+1)
for i in range( N, len(l) ):
sum = sum - l[i-N] + l[i]
result[i] = sum / N
return result
def main():
"""Compute Shannon Entropy from a provided MSA."""
# Parse arguments
args = parseArgs()
# Convert object elements to standard variables for functions
msa = args.alignment
alnformat = args.alnformat
verbose = args.verbose
makeplot = args.makeplot
runningmean = args.runningmean
# Start calling functions to do the heavy lifting
alignment, seq_lengths, index = parseMSA(msa, alnformat, verbose)
sel = shannon_entropy_list_msa(alignment)
if runningmean > 0:
sel = running_mean(sel, runningmean)
if makeplot is True:
plot(index, sel, verbose)
if verbose > 0: print("Index" + '\t' + "Entropy")
for c1, c2 in zip(index, sel):
print(str(c1) + '\t' + str(c2))
if __name__ == '__main__':
main()<|fim▁end|> | # Function to calcuate the Shannon's entropy per alignment column
# H=-\sum_{i=1}^{M} P_i\,log_2\,P_i (http://imed.med.ucm.es/Tools/svs_help.html) |
<|file_name|>httpapi.py<|end_file_name|><|fim▁begin|># (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author: Ansible Networking Team
connection: httpapi
short_description: Use httpapi to run command on network appliances
description:
- This connection plugin provides a connection to remote devices over a
HTTP(S)-based api.
version_added: "2.6"
options:
host:
description:
- Specifies the remote device FQDN or IP address to establish the HTTP(S)
connection to.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device to listening for connections
when establishing the HTTP(S) connection.
When unspecified, will pick 80 or 443 based on the value of use_ssl
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_httpapi_port
network_os:
description:
- Configures the device platform network operating system. This value is
used to load the correct httpapi and cliconf plugins to communicate
with the remote device
vars:
- name: ansible_network_os
remote_user:
description:
- The username used to authenticate to the remote device when the API
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured form the CLI via the C(--user) or C(-u) options
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Secret used to authenticate
vars:
- name: ansible_password
- name: ansible_httpapi_pass
use_ssl:
description:
- Whether to connect using SSL (HTTPS) or not (HTTP)
default: False
vars:
- name: ansible_httpapi_use_ssl
validate_certs:
version_added: '2.7'
description:
- Whether to validate SSL certificates
default: True
vars:
- name: ansible_httpapi_validate_certs
timeout:
type: int
description:
- Sets the connection time, in seconds, for the communicating with the
remote device. This timeout is used as the default timeout value for
commands when issuing a command to the network CLI. If the command
does not return in timeout seconds, the an error is generated.
default: 120
become:
type: boolean
description:
- The become option will instruct the CLI session to attempt privilege
escalation on platforms that support it. Normally this means
transitioning from user mode to C(enable) mode in the CLI session.
If become is set to True and the remote device does not support
privilege escalation or the privilege has already been elevated, then
this option is silently ignored
- Can be configured form the CLI via the C(--become) or C(-b) options
default: False
ini:
section: privilege_escalation
key: become
env:
- name: ANSIBLE_BECOME
vars:
- name: ansible_become
become_method:
description:
- This option allows the become method to be specified in for handling
privilege escalation. Typically the become_method value is set to
C(enable) but could be defined as other values.
default: sudo
ini:
section: privilege_escalation
key: become_method
env:
- name: ANSIBLE_BECOME_METHOD
vars:
- name: ansible_become_method
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish a persistent connection. If this value expires<|fim▁hole|> before the connection to the remote device is completed, the connection
will fail
default: 30
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
persistent_command_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait for a command to
return from the remote device. If this timer is exceeded before the
command returns, the connection plugin will raise an exception and
close
default: 10
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
"""
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import PY3, BytesIO
from ansible.module_utils.six.moves import cPickle
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import open_url
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import cliconf_loader, httpapi_loader
from ansible.plugins.connection import NetworkConnectionBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(NetworkConnectionBase):
'''Network API connection'''
transport = 'httpapi'
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._url = None
self._auth = None
if not self._network_os:
raise AnsibleConnectionFailure(
'Unable to automatically determine host network os. Please '
'manually configure ansible_network_os value for this host'
)
display.display('network_os is set to %s' % self._network_os, log_only=True)
def update_play_context(self, pc_data):
"""Updates the play context information for the connection"""
pc_data = to_bytes(pc_data)
if PY3:
pc_data = cPickle.loads(pc_data, encoding='bytes')
else:
pc_data = cPickle.loads(pc_data)
play_context = PlayContext()
play_context.deserialize(pc_data)
messages = ['updating play_context for connection']
if self._play_context.become ^ play_context.become:
self.set_become(play_context)
if play_context.become is True:
messages.append('authorizing connection')
else:
messages.append('deauthorizing connection')
self._play_context = play_context
return messages
def _connect(self):
if not self.connected:
protocol = 'https' if self.get_option('use_ssl') else 'http'
host = self.get_option('host')
port = self.get_option('port') or (443 if protocol == 'https' else 80)
self._url = '%s://%s:%s' % (protocol, host, port)
httpapi = httpapi_loader.get(self._network_os, self)
if httpapi:
display.vvvv('loaded API plugin for network_os %s' % self._network_os, host=self._play_context.remote_addr)
self._implementation_plugins.append(httpapi)
httpapi.set_become(self._play_context)
httpapi.login(self.get_option('remote_user'), self.get_option('password'))
else:
raise AnsibleConnectionFailure('unable to load API plugin for network_os %s' % self._network_os)
cliconf = cliconf_loader.get(self._network_os, self)
if cliconf:
display.vvvv('loaded cliconf plugin for network_os %s' % self._network_os, host=host)
self._implementation_plugins.append(cliconf)
else:
display.vvvv('unable to load cliconf for network_os %s' % self._network_os)
self._connected = True
def close(self):
'''
Close the active session to the device
'''
# only close the connection if its connected.
if self._connected:
display.vvvv("closing http(s) connection to device", host=self._play_context.remote_addr)
self.logout()
super(Connection, self).close()
def send(self, path, data, **kwargs):
'''
Sends the command to the device over api
'''
url_kwargs = dict(
timeout=self.get_option('timeout'), validate_certs=self.get_option('validate_certs'),
headers={},
)
url_kwargs.update(kwargs)
if self._auth:
# Avoid modifying passed-in headers
headers = dict(kwargs.get('headers', {}))
headers.update(self._auth)
url_kwargs['headers'] = headers
else:
url_kwargs['url_username'] = self.get_option('remote_user')
url_kwargs['url_password'] = self.get_option('password')
try:
response = open_url(self._url + path, data=data, **url_kwargs)
except HTTPError as exc:
is_handled = self.handle_httperror(exc)
if is_handled is True:
return self.send(path, data, **kwargs)
elif is_handled is False:
raise AnsibleConnectionFailure('Could not connect to {0}: {1}'.format(self._url + path, exc.reason))
else:
raise
except URLError as exc:
raise AnsibleConnectionFailure('Could not connect to {0}: {1}'.format(self._url + path, exc.reason))
response_buffer = BytesIO()
response_buffer.write(response.read())
# Try to assign a new auth token if one is given
self._auth = self.update_auth(response, response_buffer) or self._auth
return response, response_buffer<|fim▁end|> | |
<|file_name|>MapManager.js<|end_file_name|><|fim▁begin|>//Create an event node
var Event = (function($) { return function(properties) {
this.properties = properties;
this.blip = null;
this.listing = null;
this.className = properties.event_type_name.replace(/[^\w]/ig,"-").toLowerCase();
this.LatLng = [parseFloat(this.properties.latitude),
parseFloat(this.properties.longitude)];
this.startTime = moment(this.properties.start_dt)._d;
this.endTime = this.properties.end_dt ? moment(this.properties.end_dt)._d : null;
this.visible = true;
if (this.properties.capacity) {
this.properties.capacity = parseInt(this.properties.capacity);
}
if (this.properties.attendee_count) {
this.properties.attendee_count = parseInt(this.properties.attendee_count);
}
this.isFull = this.properties.attendee_count &&
this.properties.capacity > 0 &&
this.properties.attendee_count >= this.properties.capacity;
this.render = function (distance, zipcode) {
var that = this;
var moreThan5RSVP = that.properties.attendee_count && parseInt(that.properties.attendee_count) > 5 ? true : false;
if (!that.properties.attendee_count) { moreThan5RSVP = false; }
var datetime = that.properties.id_obfuscated && that.properties.id_obfuscated == '4gw5k' ? 'Mar 20 (Sun) 11:00am' : moment(that.properties.start_dt).format("MMM DD (ddd) h:mma")
var lat = that.properties.latitude
var lon = that.properties.longitude
var endtime = that.endTime ? moment(that.endTime).format("h:mma") : null;
var shiftElems = null;
if ( that.properties.shift_details ) {
var shiftList = that.properties.shift_details.map(
function(item) {
var current = moment();
var start = moment(item.start);
var end = moment(item.end);
if (end.isBefore(current)) { return; }
return $("<li />")
.append($("<input type='checkbox' value='" + item.event_shift_id + "' id='" + item.event_shift_id + "' name='shift_id[]'>"))
.append("<label for='" + item.event_shift_id + "'>" + start.format("h:mma") + " - " + end.format("h:mma"))
}
);
shiftElems = $("<div class='shift-details'/>")
.append("<h5>Shifts</h5>")
.append($("<ul/>").append(shiftList))
} // end of creating shift items
var rendered = $("<div class='lato'/>")
.addClass('event-item ' + that.className)
.append($("<div />").addClass('event-item lato ' + that.className+'').attr("lat",lat).attr("lon",lon) //appended lat-lon attributes to this class for marker highlighting
.append(that.properties.is_campaign_office ? $("<a class='office-image' href='" + (that.properties.opening_event ? that.properties.opening_event : that.properties.url) + "' />").append($("<img src='" + that.properties.image + "'>")) : "")
.append($("<h5 class='time-info'/>").html((distance ? ("<span class='time-info-dist'>" + distance + "mi </span>") : "") + datetime + (endtime ? " - " + endtime : "" )))
.append($("<h3/>").html("<a target='_blank' href='" + (that.properties.opening_event ? that.properties.opening_event : that.properties.url) + "'>" + that.properties.name + "</a>"))
.append(that.properties.is_official ? $("<h5 class='official-tag'/>").text("Official Event") : "")
.append($("<span/>").addClass("label-icon"))
.append($("<h5 class='event-type'/>").text(that.properties.event_type_name))
.append($("<p/>").html(that.properties.location))
.append(that.properties.phone && that.properties.phone != "-" ? $("<p/>").text("Phone: " + that.properties.phone) : "")
.append(that.properties.notes ? that.properties.notes : "")
//Append RSVP Form
.append($("<div class='event-rsvp-activity' />")
.append($("<form class='event-form lato'>")
.append($("<h4/>").html("RSVP to <strong>" + that.properties.name + "</strong>"))
.append($("<div class='event-error' />"))
.append(shiftElems ? shiftElems : "")
// .append($("<input type='text' name='name' placeholder='Name'/>"))
.append($("<input type='hidden' name='has_shift'/>").val(shiftElems != null))
.append($("<input type='hidden' name='zipcode'/>").val(zipcode?zipcode:that.properties.venue_zip))
.append($("<input type='hidden' name='id_obfuscated'/>").val(that.properties.id_obfuscated))
.append($("<input type='text' name='phone' placeholder='Phone Number'/>"))
.append($("<input type='text' name='email' placeholder='Email Address'/>"))
.append($("<input type='submit' class='lato' value='Confirm RSVP' />"))
)
)
.append(
$("<div class='social-area' />")
.addClass(moreThan5RSVP ? "more-than-5" : "")
.append(
$("<a class='rsvp-link'/>")
.attr("href", that.properties.is_campaign_office ? (that.properties.opening_event ? that.properties.opening_event : that.properties.url) : "javascript: void(null) ")
.attr("onclick", that.properties.is_campaign_office ? null: "$('.event-rsvp-activity').hide(); $(document).trigger('show-event-form', [this])")
// .attr('target', 'blank')
// .attr("href", that.properties.is_campaign_office ? (that.properties.opening_event ? that.properties.opening_event : that.properties.url) : that.properties.url)
.attr("data-id", that.properties.id_obfuscated)
.attr("data-url", (that.properties.opening_event ? that.properties.opening_event : that.properties.url))
.text(that.isFull ? "FULL" : that.properties.is_campaign_office ? (that.properties.opening_event ? "RSVP" : "Get Directions") : "RSVP")
)
.append(
$("<span class='rsvp-count'/>").text(that.properties.attendee_count + " SIGN UPS")
)
)
.append($("<div class='rsvp-attending'/>").html('<a href="https://go.berniesanders.com/page/event/myevents" target="_blank">You are attending this event</a>'))
);
return rendered.html();
};
}
})(jQuery); //End of events
// /****
// * Campaign Offices
// */
// var CampaignOffices = (function($) {
// return function(properties) {
// this.properties = properties;
// this.render = function (distance) {
// var that = this;
// var moreThan5RSVP = that.properties.attendee_count && parseInt(that.properties.attendee_count) > 5 ? true : false;
// if (!that.properties.attendee_count) { moreThan5RSVP = false; }
// var datetime = moment(that.properties.start_dt).format("MMM DD (ddd) h:mma")
// var rendered = $("<div class='lato'/>")
// .addClass('event-item ' + that.className)
// .append($("<h5 class='time-info'/>").html((distance ? (distance + "mi ") : "") + datetime))
// .append($("<h3/>").html("<a target='_blank' href='" + that.properties.url + "'>" + that.properties.name + "</a>"))
// .append(that.properties.is_official ? $("<h5 class='official-tag'/>").text("Official Event") : "")
// .append($("<span/>").addClass("label-icon"))
// .append($("<h5 class='event-type'/>").text(that.properties.event_type_name))
// .append($("<p/>").text(that.properties.location))
// .append(
// $("<div class='social-area'/>")
// .addClass(moreThan5RSVP ? "more-than-5" : "")
// .append(
// $("<a class='rsvp-link' target='_blank'/>")
// .attr("href", that.properties.url)
// .text(that.isFull ? "FULL" : "RSVP")
// )
// .append(
// $("<span class='rsvp-count'/>").text(that.properties.attendee_count + " SIGN UPS")
// )
// );
// return rendered.html();
// };
// };
// })(jQuery);
/****
* MapManager proper
*/
var MapManager = (function($, d3, leaflet) {
return (
function(eventData, campaignOffices, zipcodes, options) {
var allFilters = window.eventTypeFilters.map(function(i) { return i.id; });
var popup = L.popup();
var options = options;
var zipcodes = zipcodes.reduce(function(zips, item) { zips[item.zip] = item; return zips; }, {});
var current_filters = [], current_zipcode = "", current_distance = "", current_sort = "";
var originalEventList = eventData.map(function(d) { return new Event(d); });
var eventsList = originalEventList.slice(0);
// var officeList = campaignOffices.map(function(d) { return new CampaignOffices(d); });
leaflet.mapbox.accessToken = "pk.eyJ1IjoiemFja2V4bGV5IiwiYSI6Ijc2OWFhOTE0ZDllODZiMTUyNDYyOGM5MTk1Y2NmZmEyIn0.mfl6MGaSrMmNv5o5D5WBKw";
var mapboxTiles = leaflet.tileLayer('http://{s}.tiles.mapbox.com/v4/mapbox.streets/{z}/{x}/{y}.png?access_token=' + leaflet.mapbox.accessToken, { attribution: '<a href="http://www.openstreetmap.org/copyright" target="_blank">© OpenStreetMap contributors</a>'});
var CAMPAIGN_OFFICE_ICON = L.icon({
iconUrl: '//dcxc7a0ls04u1.cloudfront.net/img/icon/star.png',
iconSize: [17, 14], // size of the icon
// shadowSize: [50, 64], // size of the shadow
// iconAnchor: [22, 94], // point of the icon which will correspond to marker's location
// shadowAnchor: [4, 62], // the same for the shadow
// popupAnchor: [-3, -76] // point from which the popup should open relative to the iconAnchor
});
var GOTV_CENTER_ICON = L.icon({
iconUrl: '//dcxc7a0ls04u1.cloudfront.net/img/icon/gotv-star.png',
iconSize: [13, 10], // size of the icon
});
var defaultCoord = options&&options.defaultCoord ? options.defaultCoord : {center: [37.8, -96.9], zoom: 4};
var centralMap = new leaflet
.Map("map-container", window.customMapCoord ? window.customMapCoord : defaultCoord)
.addLayer(mapboxTiles);
if(centralMap) {}
var overlays = L.layerGroup().addTo(centralMap);
var offices = L.layerGroup().addTo(centralMap);
var gotvCenter = L.layerGroup().addTo(centralMap);
var campaignOfficeLayer = L.layerGroup().addTo(centralMap);
//initialize map
var filteredEvents = [];
var module = {};
var _popupEvents = function(event) {
var target = event.target._latlng;
var filtered = eventsList.filter(function(d) {
return target.lat == d.LatLng[0] &&
target.lng == d.LatLng[1] &&
(!current_filters || current_filters.length == 0
|| $(d.properties.filters).not(current_filters).length != d.properties.filters.length);
}).sort(function(a, b) { return a.startTime - b.startTime; });
var div = $("<div />")
.append(filtered.length > 1 ? "<h3 class='sched-count'>" + filtered.length + " Scheduled Events</h3>" : "")
.append(
$("<div class='popup-list-container'/>")
.append($("<ul class='popup-list'>")
.append(
filtered.map(function(d) {
return $("<li class='lato'/>")
.attr('data-attending', (function(prop) {
var email = Cookies.get('map.bernie.email');
var events_attended_raw = Cookies.get('map.bernie.eventsJoined.' + email);
var events_attended = events_attended_raw ? JSON.parse(events_attended_raw) : [];
return $.inArray(prop.id_obfuscated, events_attended) > -1;
})(d.properties))
.addClass(d.isFull?"is-full":"not-full")
.addClass(d.visible ? "is-visible" : "not-visible")
.append(d.render());
})
)
)
);
setTimeout(
function() { L.popup()
.setLatLng(event.target._latlng)
.setContent(div.html())
.openOn(centralMap);
}
, 100);
};
/***
* Initialization
*/
var initialize = function() {
var uniqueLocs = eventsList.reduce(function(arr, item){
var className = item.properties.filters.join(" ");
if ( arr.indexOf(item.properties.latitude + "||" + item.properties.longitude + "||" + className) >= 0 ) {
return arr;
} else {
arr.push(item.properties.latitude + "||" + item.properties.longitude + "||" + className);
return arr;
}
}, []);
uniqueLocs = uniqueLocs.map(function(d) {
var split = d.split("||");
return { latLng: [ parseFloat(split[0]), parseFloat(split[1])],
className: split[2] };
});
uniqueLocs.forEach(function(item) {
// setTimeout(function() {
if (item.className == "campaign-office") {
L.marker(item.latLng, {icon: CAMPAIGN_OFFICE_ICON, className: item.className})
.on('click', function(e) { _popupEvents(e); })
.addTo(offices);
} else if (item.className == "gotv-center") {
L.marker(item.latLng, {icon: GOTV_CENTER_ICON, className: item.className})
.on('click', function(e) { _popupEvents(e); })
.addTo(gotvCenter);
}else if (item.className.match(/bernie\-event/ig)) {
L.circleMarker(item.latLng, { radius: 12, className: item.className, color: 'white', fillColor: '#F55B5B', opacity: 0.8, fillOpacity: 0.7, weight: 2 })
.on('click', function(e) { _popupEvents(e); })
.addTo(overlays);
} else {
L.circleMarker(item.latLng, { radius: 5, className: item.className, color: 'white', fillColor: '#1462A2', opacity: 0.8, fillOpacity: 0.7, weight: 2 })
.on('click', function(e) { _popupEvents(e); })
.addTo(overlays);
}
// }, 10);
});
// $(".leaflet-overlay-pane").find(".bernie-event").parent().prependTo('.leaflet-zoom-animated');
}; // End of initialize
var toMile = function(meter) { return meter * 0.00062137; };
var filterEventsByCoords = function (center, distance, filterTypes) {
var zipLatLng = leaflet.latLng(center);
var filtered = eventsList.filter(function(d) {
var dist = toMile(zipLatLng.distanceTo(d.LatLng));
if (dist < distance) {
d.distance = Math.round(dist*10)/10;
//If no filter was a match on the current filter
if (options && options.defaultCoord && !filterTypes) {
return true;
}
if($(d.properties.filters).not(filterTypes).length == d.properties.filters.length) {
return false;
}
return true;
}
return false;
});
return filtered;
};
var filterEvents = function (zipcode, distance, filterTypes) {
return filterEventsByCoords([parseFloat(zipcode.lat), parseFloat(zipcode.lon)], distance, filterTypes)
};
var sortEvents = function(filteredEvents, sortType) {
switch (sortType) {
case 'distance':
filteredEvents = filteredEvents.sort(function(a,b) { return a.distance - b.distance; });
break;
default:
filteredEvents = filteredEvents.sort(function(a,b) { return a.startTime - b.startTime; });
break;
}
// filteredEvents = filteredEvents.sort(function(a, b) {
// var aFull = a.isFull();
// var bFull = b.isFull();
// if (aFull && bFull) { return 0; }
// else if (aFull && !bFull) { return 1; }
// else if (!aFull && bFull) { return -1; }
// });
//sort by fullness;
//..
return filteredEvents;
};
setTimeout(function(){
initialize();
}, 10);
module._eventsList = eventsList;
module._zipcodes = zipcodes;
module._options = options;
/*
* Refresh map with new events map
*/
var _refreshMap = function() {
overlays.clearLayers();
initialize();
};
module.filterByType = function(type) {
if ($(filters).not(type).length != 0 || $(type).not(filters).length != 0) {
current_filters = type;
//Filter only items in the list<|fim▁hole|> // return unmatch.length != eventItem.properties.filters.length;
// });
// var target = type.map(function(i) { return "." + i }).join(",");
// $(".leaflet-overlay-pane").find("path:not("+type.map(function(i) { return "." + i }).join(",") + ")")
var toHide = $(allFilters).not(type);
if (toHide && toHide.length > 0) {
toHide = toHide.splice(0,toHide.length);
$(".leaflet-overlay-pane").find("." + toHide.join(",.")).hide();
}
if (type && type.length > 0) {
$(".leaflet-overlay-pane").find("." + type.join(",.")).show();
// _refreshMap();
}
//Specifically for campaign office
if (!type) {
centralMap.removeLayer(offices);
} else if (type && type.indexOf('campaign-office') < 0) {
centralMap.removeLayer(offices);
} else {
centralMap.addLayer(offices);
}
//For gotv-centers
if (!type) {
centralMap.removeLayer(gotvCenter);
} else if (type && type.indexOf('gotv-center') < 0) {
centralMap.removeLayer(gotvCenter);
} else {
centralMap.addLayer(gotvCenter);
}
}
return;
};
module.filterByCoords = function(coords, distance, sort, filterTypes) {
//Remove list
d3.select("#event-list")
.selectAll("li").remove();
var filtered = filterEventsByCoords(coords, parseInt(distance), filterTypes);
//Sort event
filtered = sortEvents(filtered, sort, filterTypes);
//Check cookies
var email = Cookies.get('map.bernie.email');
var events_attended_raw = Cookies.get('map.bernie.eventsJoined.' + email);
var events_attended = events_attended_raw ? JSON.parse(events_attended_raw) : [];
//Render event
var eventList = d3.select("#event-list")
.selectAll("li")
.data(filtered, function(d){ return d.properties.id_obfuscated; });
eventList.enter()
.append("li")
.attr("data-attending", function(d, id) { return $.inArray(d.properties.id_obfuscated, events_attended) > -1; })
.attr("class", function(d) { return (d.isFull ? 'is-full' : 'not-full') + " " + (this.visible ? "is-visible" : "not-visible")})
.classed("lato", true)
.html(function(d){ return d.render(d.distance); });
eventList.exit().remove();
//add a highlighted marker
function addhighlightedMarker(lat,lon){
var highlightedMarker = new L.circleMarker([lat,lon],{radius: 5, color: '#ea504e', fillColor: '#1462A2', opacity: 0.8, fillOpacity: 0.7, weight: 2}).addTo(centralMap);
// event listener to remove highlighted markers
$(".not-full").mouseout(function(){
centralMap.removeLayer(highlightedMarker)
})
}
// event listener to get the mouseover
$(".not-full" ).mouseover(function(){
$(this).toggleClass("highlight")
var cMarkerLat = $(this).children('div').attr('lat')
var cMarkerLon = $(this).children('div').attr('lon')
// function call to add highlighted marker
addhighlightedMarker(cMarkerLat,cMarkerLon);
})
//Push all full items to end of list
$("div#event-list-container ul#event-list li.is-full").appendTo("div#event-list-container ul#event-list");
//Move campaign offices to
var officeCount = $("div#event-list-container ul#event-list li .campaign-office").length;
$("#hide-show-office").attr("data-count", officeCount);
$("#campaign-off-count").text(officeCount);
$("section#campaign-offices ul#campaign-office-list *").remove();
$("div#event-list-container ul#event-list li .campaign-office").parent().appendTo("section#campaign-offices ul#campaign-office-list");
}
/***
* FILTER() -- When the user submits query, we will look at this.
*/
module.filter = function(zipcode, distance, sort, filterTypes) {
//Check type filter
if (!zipcode || zipcode == "") { return; };
//Start if other filters changed
var targetZipcode = zipcodes[zipcode];
//Remove list
d3.select("#event-list")
.selectAll("li").remove();
if (targetZipcode == undefined || !targetZipcode) {
$("#event-list").append("<li class='error lato'>Zipcode does not exist. <a href=\"https://go.berniesanders.com/page/event/search_results?orderby=zip_radius&zip_radius%5b0%5d=" + zipcode + "&zip_radius%5b1%5d=100&country=US&radius_unit=mi\">Try our events page</a></li>");
return;
}
//Calibrate map
var zoom = 4;
switch(parseInt(distance))
{
case 5 : zoom = 12; break;
case 10: zoom = 11; break;
case 20: zoom = 10; break;
case 50: zoom = 9; break;
case 100: zoom = 8; break;
case 250: zoom = 7; break;
case 500: zoom = 5; break;
case 750: zoom = 5; break;
case 1000: zoom = 4; break;
case 2000: zoom = 4; break;
case 3000: zoom = 3; break;
}
if (!(targetZipcode.lat && targetZipcode.lat != "")) {
return;
}
if (current_zipcode != zipcode || current_distance != distance) {
current_zipcode = zipcode;
current_distance = distance;
centralMap.setView([parseFloat(targetZipcode.lat), parseFloat(targetZipcode.lon)], zoom);
}
var filtered = filterEvents(targetZipcode, parseInt(distance), filterTypes);
//Sort event
filtered = sortEvents(filtered, sort, filterTypes);
//Check cookies
var email = Cookies.get('map.bernie.email');
var events_attended_raw = Cookies.get('map.bernie.eventsJoined.' + email);
var events_attended = events_attended_raw ? JSON.parse(events_attended_raw) : [];
//Render event
var eventList = d3.select("#event-list")
.selectAll("li")
.data(filtered, function(d){ return d.properties.id_obfuscated; });
eventList.enter()
.append("li")
.attr("data-attending", function(d, id) { return $.inArray(d.properties.id_obfuscated, events_attended) > -1; })
.attr("class", function(d) { return (d.isFull ? 'is-full' : 'not-full') + " " + (this.visible ? "is-visible" : "not-visible")})
.classed("lato", true)
.html(function(d){ return d.render(d.distance); });
eventList.exit().remove();
//add a highlighted marker
function addhighlightedMarker(lat,lon){
var highlightedMarker = new L.circleMarker([lat,lon],{radius: 5, color: '#ea504e', fillColor: '#1462A2', opacity: 0.8, fillOpacity: 0.7, weight: 2}).addTo(centralMap);
// event listener to remove highlighted markers
$(".not-full").mouseout(function(){
centralMap.removeLayer(highlightedMarker)
})
}
// event listener to get the mouseover
$(".not-full" ).mouseover(function(){
$(this).toggleClass("highlight")
var cMarkerLat = $(this).children('div').attr('lat')
var cMarkerLon = $(this).children('div').attr('lon')
// function call to add highlighted marker
addhighlightedMarker(cMarkerLat,cMarkerLon);
})
//Push all full items to end of list
$("div#event-list-container ul#event-list li.is-full").appendTo("div#event-list-container ul#event-list");
//Move campaign offices to
var officeCount = $("div#event-list-container ul#event-list li .campaign-office").length;
$("#hide-show-office").attr("data-count", officeCount);
$("#campaign-off-count").text(officeCount);
$("section#campaign-offices ul#campaign-office-list *").remove();
$("div#event-list-container ul#event-list li .campaign-office").parent().appendTo("section#campaign-offices ul#campaign-office-list");
};
module.toMapView = function () {
$("body").removeClass("list-view").addClass("map-view");
centralMap.invalidateSize();
centralMap._onResize();
}
module.toListView = function () {
$("body").removeClass("map-view").addClass("list-view");
}
module.getMap = function() {
return centralMap;
}
return module;
});
})(jQuery, d3, L);
var VotingInfoManager = (function($) {
return (function(votingInfo) {
var votingInfo = votingInfo;
var module = {};
function buildRegistrationMessage(state) {
var $msg = $("<div class='registration-msg'/>").append($("<h3/>").text("Registration deadline: " + moment(new Date(state.registration_deadline)).format("MMM D")))
.append($("<p />").html(state.name + " has <strong>" + state.is_open + " " + state.type + "</strong>. " + state.you_must))
.append($("<p />").html("Find out where and how to register at <a target='_blank' href='https://vote.berniesanders.com/" + state.state + "'>vote.berniesanders.com</a>"))
return $msg;
}
function buildPrimaryInfo(state) {
var $msg = $("<div class='registration-msg'/>").append($("<h3/>").text("Primary day: " + moment(new Date(state.voting_day)).format("MMM D")))
.append($("<p />").html(state.name + " has <strong>" + state.is_open + " " + state.type + "</strong>. " + state.you_must))
.append($("<p />").html("Find out where and how to vote at <a target='_blank' href='https://vote.berniesanders.com/" + state.state + "'>vote.berniesanders.com</a>"))
return $msg;
}
function buildCaucusInfo(state) {
var $msg = $("<div class='registration-msg'/>").append($("<h3/>").text("Caucus day: " + moment(new Date(state.voting_day)).format("MMM D")))
.append($("<p />").html(state.name + " has <strong>" + state.is_open + " " + state.type + "</strong>. " + state.you_must))
.append($("<p />").html("Find out where and how to caucus at <a target='_blank' href='https://vote.berniesanders.com/" + state.state + "'>vote.berniesanders.com</a>"))
return $msg;
}
module.getInfo = function(state) {
var targetState = votingInfo.filter(function(d) { return d.state == state })[0]; //return first
if(!targetState) return null;
var today = new Date();
today.setDate(today.getDate() - 1);
if(today <= new Date(targetState.registration_deadline)) {
return buildRegistrationMessage(targetState);
} else if (today <= new Date(targetState.voting_day)) {
if (targetState.type == "primaries") {
return buildPrimaryInfo(targetState);
} else { //
return buildCaucusInfo(targetState);
}
} else {
return null;
}
}
return module;
});
})(jQuery);
// More events
(function($) {
$(document).on("click", function(event, params) {
$(".event-rsvp-activity").hide();
});
$(document).on("click", ".rsvp-link, .event-rsvp-activity", function(event, params) {
event.stopPropagation();
});
//Show email
$(document).on("show-event-form", function(events, target) {
var form = $(target).closest(".event-item").find(".event-rsvp-activity");
if (Cookies.get('map.bernie.email')) {
form.find("input[name=email]").val(Cookies.get('map.bernie.email'));
}
if (Cookies.get('map.bernie.phone')) {
form.find("input[name=phone]").val(Cookies.get('map.bernie.phone'));
}
// var params = $.deparam(window.location.hash.substring(1) || "");
// form.find("input[name=zipcode]").val(params.zipcode ? params.zipcode : Cookies.get('map.bernie.zipcode'));
form.fadeIn(100);
});
$(document).on("submit", "form.event-form", function() {
var query = $.deparam($(this).serialize());
var params = $.deparam(window.location.hash.substring(1) || "");
query['zipcode'] = params['zipcode'] || query['zipcode'];
var $error = $(this).find(".event-error");
var $container = $(this).closest(".event-rsvp-activity");
if (query['has_shift'] == 'true' && (!query['shift_id'] || query['shift_id'].length == 0)) {
$error.text("You must pick a shift").show();
return false;
}
var shifts = null;
var guests = 0;
if (query['shift_id']) {
shifts = query['shift_id'].join();
}
if (!query['phone'] || query['phone'] == '') {
$error.text("Phone number is required").show();
return false;
}
if (!query['email'] || query['email'] == '') {
$error.text("Email is required").show();
return false;
}
if (!query['email'].toUpperCase().match(/[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}$/)) {
$error.text("Please input valid email").show();
return false;
}
// if (!query['name'] || query['name'] == "") {
// $error.text("Please include your name").show();
// return false;
// }
$(this).find(".event-error").hide();
var $this = $(this)
$.ajax({
type: 'POST',
url: 'https://organize.berniesanders.com/events/add-rsvp',
// url: 'https://bernie-ground-control-staging.herokuapp.com/events/add-rsvp',
crossDomain: true,
dataType: 'json',
data: {
// name: query['name'],
phone: query['phone'],
email: query['email'],
zip: query['zipcode'],
shift_ids: shifts,
event_id_obfuscated: query['id_obfuscated']
},
success: function(data) {
Cookies.set('map.bernie.zipcode', query['zipcode'], {expires: 7});
Cookies.set('map.bernie.email', query['email'], {expires: 7});
Cookies.set('map.bernie.name', query['name'], {expires: 7});
if (query['phone'] != '') {
Cookies.set('map.bernie.phone', query['phone'], {expires: 7});
}
//Storing the events joined
var events_joined = JSON.parse(Cookies.get('map.bernie.eventsJoined.' + query['email']) || "[]") || [];
events_joined.push(query['id_obfuscated']);
Cookies.set('map.bernie.eventsJoined.' + query['email'], events_joined, {expires: 7});
$this.closest("li").attr("data-attending", true);
$this.html("<h4 style='border-bottom: none'>RSVP Successful! Thank you for joining to this event!</h4>");
$container.delay(1000).fadeOut('fast')
}
})
return false;
});
})(jQuery);<|fim▁end|> | // eventsList = originalEventList.filter(function(eventItem) {
// var unmatch = $(eventItem.properties.filters).not(filters); |
<|file_name|>ListGroupItem.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { CSSModule } from './index';
export interface ListGroupItemProps extends React.HTMLAttributes<HTMLElement> {
[key: string]: any;
tag?: React.ElementType;
active?: boolean;
disabled?: boolean;
color?: string;
action?: boolean;<|fim▁hole|> cssModule?: CSSModule;
href?: string;
}
declare class ListGroupItem extends React.Component<ListGroupItemProps> {}
export default ListGroupItem;<|fim▁end|> | |
<|file_name|>benches.rs<|end_file_name|><|fim▁begin|>#![feature(plugin,test)]
#![plugin(power_assert)]
extern crate ip2location;
extern crate test;
use ip2location::Ip2Location;<|fim▁hole|>use test::Bencher;
#[bench] fn ip2country (bencher: &mut Bencher) {
let ip2location = Ip2Location::open ("/usr/share/ip2location/IP2LOCATION-LITE-DB1.BIN") .expect ("!open");
bencher.iter (|| {
assert_eq! (ip2location.ip2country ("8.8.8.8") .expect ("!ip2country"), Some (*b"US"));})}
#[bench] fn location (bencher: &mut Bencher) {
let ip2location = Ip2Location::open ("/usr/share/ip2location/IP-COUNTRY-REGION-CITY-LATITUDE-LONGITUDE-SAMPLE.BIN") .expect ("!open");
bencher.iter (|| {
let location = ip2location.location ("8.8.8.8") .expect ("!location") .expect ("!8.8.8.8");
power_assert! (location.longitude.round() == -122.0);})}<|fim▁end|> | |
<|file_name|>parserTest.py<|end_file_name|><|fim▁begin|>import unittest
from rmgpy.molecule.molecule import Molecule
from .parser import *
class ParserTest(unittest.TestCase):
def test_fromAugmentedInChI(self):
aug_inchi = 'InChI=1S/CH4/h1H4'
mol = fromAugmentedInChI(Molecule(), aug_inchi)
self.assertTrue(not mol.InChI == '')
aug_inchi = 'InChI=1/CH4/h1H4'
mol = fromAugmentedInChI(Molecule(), aug_inchi)
self.assertTrue(not mol.InChI == '')
def test_toRDKitMol(self):
"""
Test that toRDKitMol returns correct indices and atom mappings.
"""
bondOrderDict = {'SINGLE':'S','DOUBLE':'D','TRIPLE':'T','AROMATIC':'B'}
mol = fromSMILES(Molecule(), 'C1CCC=C1C=O')
rdkitmol, rdAtomIndices = mol.toRDKitMol(removeHs=False, returnMapping=True, sanitize=True)
for atom in mol.atoms:
# Check that all atoms are found in mapping
self.assertTrue(atom in rdAtomIndices)
# Check that all bonds are in rdkitmol with correct mapping and order
for connectedAtom, bond in atom.bonds.iteritems():
bondType = str(rdkitmol.GetBondBetweenAtoms(rdAtomIndices[atom],rdAtomIndices[connectedAtom]).GetBondType())
rdkitBondOrder = bondOrderDict[bondType]
self.assertEqual(bond.order, rdkitBondOrder)
# Test for removeHs = True
rdkitmol2, rdAtomIndices2 = mol.toRDKitMol(removeHs=True, returnMapping=True, sanitize=True)
for atom in mol.atoms:
# Check that all non-hydrogen atoms are found in mapping
if atom.symbol != 'H':<|fim▁hole|> # Check that all bonds connected to non-hydrogen have the correct mapping and order
for connectedAtom, bond in atom.bonds.iteritems():
if connectedAtom.symbol != 'H':
bondType = str(rdkitmol.GetBondBetweenAtoms(rdAtomIndices[atom],rdAtomIndices[connectedAtom]).GetBondType())
rdkitBondOrder = bondOrderDict[bondType]
self.assertEqual(bond.order, rdkitBondOrder)
class ResetLonePairsTest(unittest.TestCase):
def test_Methane(self):
smi = 'C'
mol = Molecule().fromSMILES(smi)
p_indices = []
reset_lone_pairs(mol, p_indices)
for at in mol.atoms:
self.assertEquals(at.lonePairs, 0)
def test_SingletMethylene(self):
adjlist = """
multiplicity 1
1 C u0 p1 c0 {2,S} {3,S}
2 H u0 p0 c0 {1,S}
3 H u0 p0 c0 {1,S}
"""
mol = Molecule().fromAdjacencyList(adjlist)
p_indices = [1]
reset_lone_pairs(mol, p_indices)
for at in mol.atoms:
if at.symbol == 'C':
self.assertEquals(at.lonePairs, 1)
else:
self.assertEquals(at.lonePairs, 0)<|fim▁end|> | self.assertTrue(atom in rdAtomIndices) |
<|file_name|>OglofusProtectionStaff.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2015 Nikos Grammatikos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://raw.githubusercontent.com/nikosgram13/OglofusProtection/master/LICENSE
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.nikosgram.oglofus.protection;
import com.google.common.base.Optional;
import com.sk89q.intake.argument.ArgumentException;
import com.sk89q.intake.argument.ArgumentParseException;
import com.sk89q.intake.argument.CommandArgs;
import com.sk89q.intake.parametric.ProvisionException;
import me.nikosgram.oglofus.protection.api.ActionResponse;
import me.nikosgram.oglofus.protection.api.CommandExecutor;
import me.nikosgram.oglofus.protection.api.entity.User;
import me.nikosgram.oglofus.protection.api.message.MessageType;
import me.nikosgram.oglofus.protection.api.region.ProtectionRank;
import me.nikosgram.oglofus.protection.api.region.ProtectionRegion;
import me.nikosgram.oglofus.protection.api.region.ProtectionStaff;
import org.apache.commons.lang3.ClassUtils;
import org.spongepowered.api.entity.player.Player;
import org.spongepowered.api.service.user.UserStorage;
import org.spongepowered.api.util.command.CommandSource;
import javax.annotation.Nullable;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;<|fim▁hole|>import java.util.List;
import java.util.Map;
import java.util.UUID;
public class OglofusProtectionStaff implements ProtectionStaff {
private final List<User> staff = new ArrayList<User>();
private final Map<UUID, ProtectionRank> ranks = new HashMap<UUID, ProtectionRank>();
private final User owner;
private final ProtectionRegion region;
private final OglofusSponge sponge;
protected OglofusProtectionStaff(ProtectionRegion region, OglofusSponge sponge) {
this.region = region;
this.sponge = sponge;
owner = sponge.getUserManager().getUser(UUID.fromString(sponge.connector.getString(
"oglofus_regions", "uuid", region.getUuid().toString(), "owner"
).get())).get();
Map<String, String> staff = sponge.connector.getStringMap(
"oglofus_regions", "uuid", region.getUuid().toString(), new String[]{"player", "rank"}
);
for (String uid : staff.keySet()) {
UUID uuid = UUID.fromString(uid);
this.staff.add(sponge.getUserManager().getUser(uuid).get());
ranks.put(uuid, ProtectionRank.valueOf(staff.get(uid)));
}
}
@Override
public UUID getOwnerUuid() {
return owner.getUuid();
}
@Override
public User getOwner() {
return owner;
}
@Override
@SuppressWarnings("unchecked")
public <T> Optional<T> getOwnerAs(Class<T> tClass) {
if (ClassUtils.isAssignable(tClass, Player.class)) {
return (Optional<T>) sponge.server.getPlayer(owner.getUuid());
} else if (ClassUtils.isAssignable(tClass, User.class)) {
UserStorage storage;
if ((storage = sponge.game.getServiceManager().provide(UserStorage.class).orNull()) !=
null) {
return (Optional<T>) storage.get(owner.getUuid()).orNull();
}
}
return Optional.absent();
}
@Override
@SuppressWarnings("unchecked")
public <T> Collection<T> getOfficersAs(Class<T> tClass) {
List<T> returned = new ArrayList<T>();
if (ClassUtils.isAssignable(tClass, Player.class)) {
for (UUID uuid : getOfficersUuid()) {
Player player;
if ((player = sponge.server.getPlayer(uuid).orNull()) != null) {
returned.add((T) player);
}
}
}
return returned;
}
@Override
public Collection<UUID> getOfficersUuid() {
List<UUID> returned = new ArrayList<UUID>();
for (User user : getOfficers()) {
returned.add(user.getUuid());
}
return returned;
}
@Override
public Collection<User> getOfficers() {
List<User> returned = new ArrayList<User>();
for (User user : this) {
if (ranks.get(user.getUuid()).equals(ProtectionRank.Officer)) {
returned.add(user);
}
}
return returned;
}
@Override
@SuppressWarnings("unchecked")
public <T> Collection<T> getMembersAs(Class<T> tClass) {
List<T> returned = new ArrayList<T>();
if (ClassUtils.isAssignable(tClass, Player.class)) {
for (UUID uuid : getMembersUuid()) {
Player player;
if ((player = sponge.server.getPlayer(uuid).orNull()) != null) {
returned.add((T) player);
}
}
}
return returned;
}
@Override
public Collection<UUID> getMembersUuid() {
List<UUID> returned = new ArrayList<UUID>();
for (User user : getMembers()) {
returned.add(user.getUuid());
}
return returned;
}
@Override
public Collection<User> getMembers() {
List<User> returned = new ArrayList<User>();
for (User user : this) {
if (ranks.get(user.getUuid()).equals(ProtectionRank.Member)) {
returned.add(user);
}
}
return returned;
}
@Override
@SuppressWarnings("unchecked")
public <T> Collection<T> getStaffAs(Class<T> tClass) {
List<T> returned = new ArrayList<T>();
if (ClassUtils.isAssignable(tClass, Player.class)) {
for (User user : this) {
Player player;
if ((player = sponge.server.getPlayer(user.getUuid()).orNull()) != null) {
returned.add((T) player);
}
}
}
return returned;
}
@Override
public Collection<UUID> getStaffUuid() {
Collection<UUID> returned = new ArrayList<UUID>();
for (User user : this) {
returned.add(user.getUuid());
}
return returned;
}
@Override
public boolean isOwner(UUID target) {
return owner.getUuid().equals(target);
}
@Override
public boolean isOwner(User target) {
return owner.getUuid().equals(target.getUuid());
}
@Override
public boolean isOfficer(UUID target) {
return ranks.containsKey(target) && ranks.get(target).equals(ProtectionRank.Officer);
}
@Override
public boolean isOfficer(User target) {
return ranks.containsKey(target.getUuid()) && ranks.get(target.getUuid()).equals(ProtectionRank.Officer);
}
@Override
public boolean isMember(UUID target) {
return ranks.containsKey(target) && ranks.get(target).equals(ProtectionRank.Member);
}
@Override
public boolean isMember(User target) {
return ranks.containsKey(target.getUuid()) && ranks.get(target.getUuid()).equals(ProtectionRank.Member);
}
@Override
public boolean isStaff(UUID target) {
return ranks.containsKey(target);
}
@Override
public boolean isStaff(User target) {
return ranks.containsKey(target.getUuid());
}
@Override
public boolean hasOwnerAccess(UUID target) {
return isOwner(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.owner");
}
@Override
public boolean hasOwnerAccess(User target) {
return isOwner(target) || target.hasPermission("oglofus.protection.bypass.owner");
}
@Override
public boolean hasOfficerAccess(UUID target) {
return isOfficer(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.officer");
}
@Override
public boolean hasOfficerAccess(User target) {
return isOfficer(target) || target.hasPermission("oglofus.protection.bypass.officer");
}
@Override
public boolean hasMemberAccess(UUID target) {
return isMember(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.officer");
}
@Override
public boolean hasMemberAccess(User target) {
return isMember(target) || target.hasPermission("oglofus.protection.bypass.member");
}
@Override
public ProtectionRank getRank(UUID target) {
return ranks.containsKey(target) ? ranks.get(target) : ProtectionRank.None;
}
@Override
public ProtectionRank getRank(User target) {
return ranks.containsKey(target.getUuid()) ? ranks.get(target.getUuid()) : ProtectionRank.None;
}
@Override
public void broadcast(String message) {
broadcast(MessageType.CHAT, message);
}
@Override
public void broadcast(String message, ProtectionRank rank) {
broadcast(MessageType.CHAT, message, rank);
}
@Override
public void broadcast(MessageType type, String message) {
for (User user : this) {
user.sendMessage(type, message);
}
}
@Override
public void broadcast(MessageType type, String message, ProtectionRank rank) {
switch (rank) {
case Member:
for (User user : getMembers()) {
user.sendMessage(type, message);
}
break;
case Officer:
for (User user : getOfficers()) {
user.sendMessage(type, message);
}
break;
case Owner:
owner.sendMessage(type, message);
break;
}
}
@Override
public void broadcastRaw(Object message) {
for (User user : this) {
user.sendMessage(message);
}
}
@Override
public void broadcastRaw(Object message, ProtectionRank rank) {
switch (rank) {
case Member:
for (User user : getMembers()) {
user.sendMessage(message);
}
break;
case Officer:
for (User user : getOfficers()) {
user.sendMessage(message);
}
break;
case Owner:
owner.sendMessage(message);
break;
}
}
@Override
public void broadcastRaw(MessageType type, Object message) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void broadcastRaw(MessageType type, Object message, ProtectionRank rank) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public ActionResponse reFlag() {
//TODO: make it.
return null;
}
@Override
public ActionResponse invite(Object sender, UUID target) {
return sponge.getUserManager().invite(sender, target, region);
}
@Override
public ActionResponse invite(CommandExecutor sender, UUID target) {
return null;
}
@Override
public ActionResponse invite(Object sender, User target) {
return null;
}
@Override
public ActionResponse invite(CommandExecutor sender, User target) {
return null;
}
@Override
public ActionResponse invite(UUID target) {
return sponge.getUserManager().invite(target, region);
}
@Override
public ActionResponse invite(User target) {
return null;
}
@Override
public ActionResponse kick(Object sender, UUID target) {
if (sender instanceof CommandSource) {
if (sender instanceof Player) {
if (region.getProtectionStaff().hasOwnerAccess(((Player) sender).getUniqueId())) {
//TODO: call the handler PlayerKickHandler.
return kick(target);
}
return ActionResponse.Failure.setMessage("access");
}
if (((CommandSource) sender).hasPermission("oglofus.protection.bypass")) {
return kick(target);
}
return ActionResponse.Failure.setMessage("access");
}
return ActionResponse.Failure.setMessage("object");
}
@Override
public ActionResponse kick(CommandExecutor sender, UUID target) {
return null;
}
@Override
public ActionResponse kick(Object sender, User target) {
return null;
}
@Override
public ActionResponse kick(CommandExecutor sender, User target) {
return null;
}
@Override
public ActionResponse kick(UUID target) {
//TODO: call the handler PlayerKickHandler.
return null;
}
@Override
public ActionResponse kick(User target) {
return null;
}
@Override
public ActionResponse promote(Object sender, UUID target) {
return null;
}
@Override
public ActionResponse promote(CommandExecutor sender, UUID target) {
return null;
}
@Override
public ActionResponse promote(Object sender, User target) {
return null;
}
@Override
public ActionResponse promote(CommandExecutor sender, User target) {
return null;
}
@Override
public ActionResponse promote(UUID target) {
return null;
}
@Override
public ActionResponse promote(User target) {
return null;
}
@Override
public ActionResponse demote(Object sender, UUID target) {
return null;
}
@Override
public ActionResponse demote(CommandExecutor sender, UUID target) {
return null;
}
@Override
public ActionResponse demote(Object sender, User target) {
return null;
}
@Override
public ActionResponse demote(CommandExecutor sender, User target) {
return null;
}
@Override
public ActionResponse demote(UUID target) {
return null;
}
@Override
public ActionResponse demote(User target) {
return null;
}
@Override
public ActionResponse changeRank(Object sender, UUID target, ProtectionRank rank) {
return null;
}
@Override
public ActionResponse changeRank(CommandExecutor sender, UUID target, ProtectionRank rank) {
return null;
}
@Override
public ActionResponse changeRank(Object sender, User target, ProtectionRank rank) {
return null;
}
@Override
public ActionResponse changeRank(CommandExecutor sender, User target, ProtectionRank rank) {
return null;
}
@Override
public ActionResponse changeRank(UUID target, ProtectionRank rank) {
return null;
}
@Override
public ActionResponse changeRank(User target, ProtectionRank rank) {
return null;
}
@Override
public Iterator<User> iterator() {
return staff.iterator();
}
@Override
public boolean isProvided() {
return false;
}
@Nullable
@Override
public User get(CommandArgs arguments, List<? extends Annotation> modifiers) throws ArgumentException, ProvisionException {
String name = arguments.next();
Optional<User> user = sponge.getUserManager().getUser(name);
if (user.isPresent() && isStaff(user.get())) {
return user.get();
} else {
throw new ArgumentParseException(String.format("I can't find the Staff with name '%s'.", name));
}
}
@Override
public List<String> getSuggestions(String prefix) {
List<String> returned = new ArrayList<String>();
for (User user : this) {
if (user.getName().startsWith(prefix)) {
returned.add(user.getName());
}
}
return returned;
}
}<|fim▁end|> | import java.util.Iterator; |
<|file_name|>access.py<|end_file_name|><|fim▁begin|>"""Accessors for Amber TI datasets.
"""
from os.path import dirname, join
from glob import glob
from .. import Bunch
def load_bace_improper():
"""Load Amber Bace improper solvated vdw example
Returns
-------
data: Bunch
Dictionary-like object, the interesting attributes are:
- 'data' : the data files for improper solvated vdw alchemical leg
"""
module_path = dirname(__file__)
data = {'vdw': glob(join(module_path, 'bace_improper/solvated/vdw/*/ti-*.out.bz2'))}
with open(join(module_path, 'bace_improper', 'descr.rst')) as rst_file:
fdescr = rst_file.read()
return Bunch(data=data,
DESCR=fdescr)
def load_bace_example():
"""Load Amber Bace example perturbation.
Returns
-------
data: Bunch
Dictionary-like object, the interesting attributes are:
- 'data' : the data files by system and alchemical leg
"""
module_path = dirname(__file__)
data = {'complex':
{'decharge': glob(join(module_path, 'bace_CAT-13d~CAT-17a/complex/decharge/*/ti-*.out.bz2')),
'recharge': glob(join(module_path, 'bace_CAT-13d~CAT-17a/complex/recharge/*/ti-*.out.bz2')),
'vdw': glob(join(module_path, 'bace_CAT-13d~CAT-17a/complex/vdw/*/ti-*.out.bz2'))
},
'solvated':
{'decharge': glob(join(module_path, 'bace_CAT-13d~CAT-17a/solvated/decharge/*/ti-*.out.bz2')),
'recharge': glob(join(module_path, 'bace_CAT-13d~CAT-17a/solvated/recharge/*/ti-*.out.bz2')),
'vdw': glob(join(module_path, 'bace_CAT-13d~CAT-17a/solvated/vdw/*/ti-*.out.bz2'))
}
}
with open(join(module_path, 'bace_CAT-13d~CAT-17a', 'descr.rst')) as rst_file:
fdescr = rst_file.read()
return Bunch(data=data,
DESCR=fdescr)
def load_simplesolvated():
"""Load the Amber solvated dataset.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
- 'data' : the data files by alchemical leg
- 'DESCR': the full description of the dataset
"""
module_path = dirname(__file__)
data = {'charge': glob(join(module_path, 'simplesolvated/charge/*/ti-*.out')),
'vdw': glob(join(module_path, 'simplesolvated/vdw/*/ti-*.out'))}
with open(join(module_path, 'simplesolvated', 'descr.rst')) as rst_file:
fdescr = rst_file.read()
return Bunch(data=data,
DESCR=fdescr)
def load_invalidfiles():
"""Load the invalid files.
Returns<|fim▁hole|> data : Bunch
Dictionary-like object, the interesting attributes are:
- 'data' : the example of invalid data files
- 'DESCR': the full description of the dataset
"""
module_path = dirname(__file__)
data = [glob(join(module_path, 'invalidfiles/*.out.bz2'))]
with open(join(module_path, 'invalidfiles', 'descr.rst')) as rst_file:
fdescr = rst_file.read()
return Bunch(data=data,
DESCR=fdescr)<|fim▁end|> | ------- |
<|file_name|>size.py<|end_file_name|><|fim▁begin|># ============================================================================
# FILE: size.py
# AUTHOR: Shougo Matsushita <Shougo.Matsu at gmail.com>
# License: MIT license
# ============================================================================
from defx.base.column import Base, Highlights
from defx.context import Context
from defx.util import Nvim, readable, Candidate
import typing
class Column(Base):
def __init__(self, vim: Nvim) -> None:
super().__init__(vim)
self.name = 'size'
self.has_get_with_highlights = True
self._length = 9
def get_with_highlights(
self, context: Context, candidate: Candidate
) -> typing.Tuple[str, Highlights]:
path = candidate['action__path']
if not readable(path) or path.is_dir():
return (' ' * self._length, [])
size = self._get_size(path.stat().st_size)
text = '{:>6s}{:>3s}'.format(size[0], size[1])
return (text, [(self.highlight_name, self.start, self._length)])
def _get_size(self, size: float) -> typing.Tuple[str, str]:
multiple = 1024
suffixes = ['KB', 'MB', 'GB', 'TB']<|fim▁hole|> if size < multiple:
return (str(size), 'B')
for suffix in suffixes:
size /= multiple
if size < multiple:
return ('{:.1f}'.format(size), suffix)
return ('INF', '')
def length(self, context: Context) -> int:
return self._length
def highlight_commands(self) -> typing.List[str]:
commands: typing.List[str] = []
commands.append(
f'highlight default link {self.highlight_name} Constant')
return commands<|fim▁end|> | |
<|file_name|>TrueRangeValueSource.java<|end_file_name|><|fim▁begin|>/*
###############################
# Copyright (C) 2012 Jon Schang
#
# This file is part of jSchangLib, released under the LGPLv3
#
# jSchangLib is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# jSchangLib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with jSchangLib. If not, see <http://www.gnu.org/licenses/>.
###############################
*/
package com.jonschang.investing.valuesource;
import com.jonschang.investing.model.*;
/**
* determine the Average True Range of the stock period
* @author schang
*/
@SuppressWarnings(value={"unchecked"})
public class TrueRangeValueSource<Q extends Quote<I>,I extends Quotable>
extends AbstractQuoteValueSource<Q,I> {
/**
* the average true range of a stock requires only the last two quotes
*/
public int getPeriods() {
return 2;
}
/**
* get the average true range of the current quote
*/
public double getValue() throws TooFewQuotesException
{
if( this.quotes.size()<2 )
throw new TooFewQuotesException("there were too few quotes: "+this.quotes.size()+" available, 2 quotes needed.");
Quote today = this.quotes.get( this.quotes.size()-1 );
Quote yesterday = this.quotes.get( this.quotes.size()-2 );
double diffCurHighCurLow = today.getPriceHigh() - today.getPriceLow();
double diffCurHighPrevClose = today.getPriceHigh() - yesterday.getPriceClose();
double diffCurLowPrevClose = today.getPriceLow() - yesterday.getPriceClose();
double atr = 0;
atr = diffCurHighCurLow > atr ? diffCurHighCurLow : atr;<|fim▁hole|> }
}<|fim▁end|> | atr = diffCurHighPrevClose > atr ? diffCurHighPrevClose : atr;
atr = diffCurLowPrevClose > atr ? diffCurLowPrevClose : atr;
return atr; |
<|file_name|>visuals.js<|end_file_name|><|fim▁begin|>var map, layer, districtLayer;
var projectAPIKey = 'AIzaSyDdCELFax8-q-dUCHt9hn5Fbf_7ywY6yvA';// Personal Account
// var citiesTableID = '1CU4KNOJYGWoCkUZWrxgvleGq-k6PFFUO6qfqTCid';
var citiesTableID = '1cKfYbbWs6JJujJPk-lJfdBLWVaRRSMxfXNx6K6_y';
var districtsTableID = '1BYUolX-kQGfeEckoXMMBDk1Xh2llj8dhf-XpzJ7i';
var attributeNameX = "M-Achievement (percentage)";
var attributeNameY = "ULB Name";
var mode = "grade";
var districtName = "ANANTAPUR";
var regionName = "ANANTAPUR";
var gradeName = "G3";
var chartType = "ColumnChart";
var vizType = "split";
var sortType = "";
var unitOfIndicator = "";
var dataTable;
var tooltipValue, tooltipValueCol;
var centerAfterQuery, zoomAfterQuery;
var markers = [];
var chartInfoBubble1;
var chartInfoBubble2;
var chartInfoMarker;
var searchBarULBs;
var COLUMN_STYLES = {};
var timer;
var mainIndic = 1,
subMainIndic = 0;
var placeChartQuery = "";
var cumulativeValues = [];
var globalBucket;
var titleText = "";
var reportTitleText = "";
var generateReportQuery = "";
var multiSeries = false;
var subIndicators = [];
var overallSelected = false;
function initialize() {
var eGovAttributeList = [
"UPM-Target",
"UPM-Achievement",
"UPM-Achievement (percentage)",
"UPM-Marks",
"M-Target",
"M-Achievement",
"M-Achievement (percentage)",
"M-Marks",
"C-Target",
"C-Achievement",
"C-Achievement (percentage)",
"C-Marks",
"UPM-Rank",
"M-Rank",
"C-Rank",
"Annual Target"
];
var mapStyles3 = [{
"featureType": "road",
"stylers": [{
"visibility": "off"
}]
}, {
"featureType": "poi",
"stylers": [{
"visibility": "off"
}]
}, {
"featureType": "administrative",
"stylers": [{
"visibility": "off"
}]
}];
var styleOptions2 = [{
'min': 0,
'max': 100,
// 'color': '#FF1700',
'color': '#e74c3c',
'opacity': 1
}, {
'min': 100,
'max': 200,
// 'color': '#FFC200',
'color': '#f1c40f',
'opacity': 1
}, {
'min': 200,
'max': 500,
// 'color': '#27E833',
'color': '#2ecc71',
'opacity': 1
}];
for (var i = 0; i < eGovAttributeList.length; i++) {
COLUMN_STYLES[eGovAttributeList[i]] = styleOptions2;
}
var styledMap = new google.maps.StyledMapType(mapStyles3, {
name: "Styled Map"
});
var andhra = new google.maps.LatLng(16.0000, 80.6400);
map = new google.maps.Map(document.getElementById('map-canvas'), {
center: andhra,
scrollwheel: false,
zoomControl: true,
zoomControlOptions: {
style: google.maps.ZoomControlStyle.LARGE,
position: google.maps.ControlPosition.LEFT_CENTER
},
streetViewControl: false,
mapTypeControl: false,
disableDefaultUI: true,
mapTypeId: google.maps.MapTypeId.TERRAIN,
zoom: 6
});
// map.mapTypes.set('map_style_2', styledMap);
// map.setMapTypeId('map_style_2');
zoomAfterQuery = 6;
centerAfterQuery = andhra;
chartInfoBubble1 = new InfoBubble({
map: map,
shadowStyle: 1,
padding: 10,
backgroundColor: 'rgb(255,255,255)',
borderRadius: 0,
arrowSize: 25,
minWidth: 300,
borderWidth: 0,
borderColor: '#2c2c2c',
disableAutoPan: false,
hideCloseButton: false,
arrowPosition: 50,
arrowStyle: 0
});
chartInfoBubble2 = new InfoBubble({
map: map,
shadowStyle: 1,
padding: 10,
backgroundColor: 'rgb(255,255,255)',
borderRadius: 0,
arrowSize: 25,
minWidth: 300,
borderWidth: 0,
borderColor: '#2c2c2c',
disableAutoPan: false,
hideCloseButton: false,
arrowPosition: 50,
arrowStyle: 0
});
chartInfoBubble1 = new InfoBubble({
minHeight: 160
});
chartInfoBubble2 = new InfoBubble({
minHeight: 225
});
chartInfoMarker = new google.maps.Marker({
map: map,
icon: 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|0000FF'
});
layer = new google.maps.FusionTablesLayer({
query: {
select: 'Polygon',
from: citiesTableID
},
map: map,
suppressInfoWindows: false
});
districtLayer = new google.maps.FusionTablesLayer({
query: {
select: '\'Geocodable address\'',
from: districtsTableID
},
map: map,
styleId: 1,
suppressInfoWindows: false
});
google.maps.event.addListener(layer, 'click', function(e) {
var cityChosen = e.row['ULB Name'].value.toString();
e.infoWindowHtml = "<div ><font size='2'>"
e.infoWindowHtml += "<b>" + cityChosen + "</b><br>" + "District: <b>" + e.row['District'].value.toString() + "</b><br>" + "Region: <b>" + e.row['Region'].value.toString() + "</b>";
e.infoWindowHtml += "</font></div>";
});
layer.setMap(map);
//layer.setMap(null);
google.maps.event.addListener(districtLayer, 'click', function(e) {
var districtTooltip = e.row['DISTRICT_2011'].value.toString();
var regionTooltip = e.row['Region'].value.toString();
e.infoWindowHtml = "<div ><font size='2'>"
e.infoWindowHtml += "District: <b>" + districtTooltip + "</b><br>" + "Region: <b>" + regionTooltip + "</b>";
e.infoWindowHtml += "</font></div>";
});
//drawChart();
applyStyle(layer, attributeNameX);
}
function drawChart() {
if (gradeName == "regwise") {
if(attributeNameX == 'UPM-Achievement (percentage)')
{
attributeNameX = 'UPM-Marks';
}else if(attributeNameX == 'M-Achievement (percentage)')
{
attributeNameX = 'M-Marks';
}else if(attributeNameX == 'C-Achievement (percentage)')
{
attributeNameX = 'C-Marks';
}
}
if (chartType == "Table" || chartType == "PieChart") {
document.getElementById('chartControl').style.display = "none";
} else {
document.getElementById('chartControl').style.display = "block";
}
placeChartQuery = "";
var chartModeTitle = "All ULBs";
var cumulativeChartQuery = "";
if (chartType == "MultiChart") {
sortType = "ORDER BY '" + attributeNameY + "' ASC";
}
if (chartType == "MultiChart" && gradeName == 'regwise') {
sortType = "ORDER BY 'Region' ASC";
}else if(gradeName == 'regwise'){
sortType = "ORDER BY AVERAGE('" + attributeNameX + "') DESC";
}
if (mode == "city") {
placeChartQuery = "SELECT '" + attributeNameY + "','" + attributeNameX + "' FROM " + citiesTableID + " " + sortType;
chartModeTitle = "All ULBs";
layer.setOptions({
query: {
select: 'Polygon',
from: citiesTableID
}
});
districtLayer.setOptions({
query: {
select: '\'Geocodable address\'',
from: districtsTableID
}
});
}
if (mode == "district") {
chartModeTitle = "District: " + districtName;
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'District' = '" + districtName + "'" + sortType;
layer.setOptions({
query: {
select: 'Polygon',
from: citiesTableID,
where: "District = '" + districtName + "'"
}
});
districtLayer.setOptions({
query: {
select: '\'Geocodable address\'',
from: districtsTableID,
where: "'DISTRICT_2011' = '" + districtName + "'"
}
});
}
if (mode == "region") {
chartModeTitle = "Region: " + regionName;
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Region' = '" + regionName + "'" + sortType;
layer.setOptions({
query: {
select: 'Polygon',
from: citiesTableID,
where: "Region = '" + regionName + "'"
}
});
districtLayer.setOptions({
query: {
select: '\'Geocodable address\'',
from: districtsTableID,
where: "'Region' = '" + regionName + "'"
}
});
}
var layerWhereClause;
if (mode == "grade") {
if (gradeName == "G1") {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Grade' IN ('Special','Selection')" + sortType;
chartModeTitle = "Grade: Special, Selection";
layerWhereClause = "'Grade' IN ('Special','Selection')";
} else if (gradeName == "G2") {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Grade' IN ('I','II')" + sortType;
chartModeTitle = "Grade: I, II";
layerWhereClause = "'Grade' IN ('I','II')";
} else if (gradeName == "G3") {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Grade' IN ('III','NP')" + sortType;
chartModeTitle = "Grade: III, NP";
layerWhereClause = "'Grade' IN ('III','NP')";
} else if (gradeName == "G4") {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Grade' = 'Corp'" + sortType;
chartModeTitle = "Grade: Corp";
layerWhereClause = "'Grade' = 'Corp'";
} else if (gradeName == "elevenulb") {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'ULB Name' IN ('TIRUPATI','KURNOOL','VISAKHAPATNAM','SRIKAKULAM','GUNTUR','KAKINADA','NELLIMARLA','RAJAM NP','KANDUKUR','ONGOLE CORP.','RAJAMPET')" + sortType;
chartModeTitle = "11 ULBs";
layerWhereClause = "'ULB Name' IN ('TIRUPATI','KURNOOL','VISAKHAPATNAM','SRIKAKULAM','GUNTUR','KAKINADA','NELLIMARLA','RAJAM NP','KANDUKUR','ONGOLE CORP.','RAJAMPET')";
} else if (gradeName == "regwise") {
placeChartQuery = "SELECT 'Region', AVERAGE('" + attributeNameX + "') FROM " + citiesTableID + " WHERE 'Region' IN ('ANANTAPUR','GUNTUR','RAJAHMUNDRY','VISAKHAPATNAM') GROUP BY 'Region' " + sortType;
chartModeTitle = "Region-wise";
layerWhereClause = "'Region' IN ('ANANTAPUR','GUNTUR','RAJAHMUNDRY','VISAKHAPATNAM') GROUP BY 'Region' ";
} else {
placeChartQuery = "SELECT '" + attributeNameY + "', '" + attributeNameX + "' FROM " + citiesTableID + " WHERE 'Grade' = '" + gradeName + "'" + sortType;
chartModeTitle = "Grade: " + gradeName;
layerWhereClause = "'Grade' = '" + gradeName + "'";
}
layer.setOptions({
query: {
select: 'Polygon',
from: citiesTableID,
where: layerWhereClause
}
});
districtLayer.setOptions({
query: {
select: '\'Geocodable address\'',
from: districtsTableID
}
});
}
cumulativeChartQuery = placeChartQuery.substring(placeChartQuery.indexOf("FROM"));
if (gradeName == "regwise") {
cumulativeChartQuery = "SELECT 'Region', AVERAGE('C-Marks') " + cumulativeChartQuery;
}else{
cumulativeChartQuery = "SELECT '" + attributeNameY + "','C-Achievement (percentage)' " + cumulativeChartQuery;
}
generateReportQuery = placeChartQuery.substring(placeChartQuery.indexOf("FROM"), placeChartQuery.indexOf("ORDER"));
//console.log(generateReportQuery);
if (!overallSelected) {
if (chartType == "MultiChart") {
if(gradeName == 'regwise'){
generateReportQuery = "SELECT 'Region',AVERAGE('Annual Target'),AVERAGE('C-Target'),AVERAGE('C-Achievement'),AVERAGE('C-Achievement (percentage)'),AVERAGE('C-Marks'),AVERAGE('Max Marks'),AVERAGE('M-Target'),AVERAGE('M-Achievement'),AVERAGE('M-Achievement (percentage)'),AVERAGE('M-Marks') " + generateReportQuery + "ORDER BY 'Region' ASC ";
}else{
generateReportQuery = "SELECT '" + attributeNameY + "','Annual Target','C-Target','C-Achievement','C-Achievement (percentage)','C-Marks','Max Marks', 'C-Rank','M-Target','M-Achievement','M-Achievement (percentage)','M-Marks','M-Rank' " + generateReportQuery + " ORDER BY '" + attributeNameY + "' ASC";
}
multiSeries = true;
} else {
if(gradeName == 'regwise'){
if (attributeNameX.indexOf("C-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('Annual Target'),AVERAGE('C-Target'),AVERAGE('C-Achievement'),AVERAGE('C-Achievement (percentage)'),AVERAGE('C-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
} else if (attributeNameX.indexOf("UPM-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('Annual Target'),AVERAGE('UPM-Target'),AVERAGE('UPM-Achievement'),AVERAGE('UPM-Achievement (percentage)'),AVERAGE('UPM-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
} else if (attributeNameX.indexOf("M-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('Annual Target'),AVERAGE('M-Target'),AVERAGE('M-Achievement'),AVERAGE('M-Achievement (percentage)'),AVERAGE('M-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
}
}else{
<|fim▁hole|> generateReportQuery = "SELECT '" + attributeNameY + "','Annual Target','UPM-Target','UPM-Achievement','UPM-Achievement (percentage)','UPM-Marks','Max Marks','UPM-Rank' " + generateReportQuery + " ORDER BY 'UPM-Rank' ASC";
} else if (attributeNameX.indexOf("M-") > -1) {
generateReportQuery = "SELECT '" + attributeNameY + "','Annual Target','M-Target','M-Achievement','M-Achievement (percentage)','M-Marks','Max Marks','M-Rank' " + generateReportQuery + " ORDER BY 'M-Rank' ASC";
}
}
}
} else {
if (chartType == "MultiChart") {
if(gradeName == 'regwise'){
generateReportQuery = "SELECT 'Region',AVERAGE('C-Marks'),AVERAGE('M-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY 'Region' ASC "
}else{
generateReportQuery = "SELECT '" + attributeNameY + "','C-Marks','C-Rank','M-Marks','M-Rank','Max Marks' " + generateReportQuery + " ORDER BY '" + attributeNameY + "' ASC";
}
multiSeries = true;
} else {
if(gradeName == 'regwise'){
if (attributeNameX.indexOf("C-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('C-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
} else if (attributeNameX.indexOf("UPM-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('UPM-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
} else if (attributeNameX.indexOf("M-") > -1) {
generateReportQuery = "SELECT 'Region',AVERAGE('M-Marks'),AVERAGE('Max Marks') " + generateReportQuery + "ORDER BY AVERAGE('" + attributeNameX + "') DESC ";
}
}else{
if (attributeNameX.indexOf("C-") > -1) {
generateReportQuery = "SELECT '" + attributeNameY + "','C-Marks','Max Marks','C-Rank' " + generateReportQuery + " ORDER BY 'C-Rank' ASC";
} else if (attributeNameX.indexOf("UPM-") > -1) {
generateReportQuery = "SELECT '" + attributeNameY + "','UPM-Marks','Max Marks','UPM-Rank' " + generateReportQuery + " ORDER BY 'UPM-Rank' ASC";
} else if (attributeNameX.indexOf("M-") > -1) {
generateReportQuery = "SELECT '" + attributeNameY + "','M-Marks','Max Marks','M-Rank' " + generateReportQuery + " ORDER BY 'M-Rank' ASC";
}
}
}
}
var query = new google.visualization.Query('http://www.google.com/fusiontables/gvizdata?tq=');
//console.log('cumulativeChartQuery:'+cumulativeChartQuery);
query.setQuery(cumulativeChartQuery);
query.send(getCumulativeValues);
function getCumulativeValues(response) {
cumulativeValues = [];
//console.log('Error in query: ' + response.getMessage() + ' ' + response.getDetailedMessage());
//console.log('First Response is:'+JSON.stringify(response));
for (var i = 0; i < response.getDataTable().getNumberOfRows(); i++) {
cumulativeValues.push(response.getDataTable().getValue(i, 1));
}
var query = new google.visualization.Query('http://www.google.com/fusiontables/gvizdata?tq=');
// Apply query language statement.
//console.log('placeChartQuery:'+placeChartQuery);
query.setQuery(placeChartQuery);
// Send the query with a callback function.
query.send(handleQueryResponse);
}
function handleQueryResponse(response) {
/* if (response.isError()) {
alert('Error in query: ' + response.getMessage() + ' ' + response.getDetailedMessage());
return;
}
*/
dataTable = response.getDataTable();
//console.log('chartType:'+chartType);
if (chartType != "MultiChart") {
dataTable.addColumn({
type: 'string',
role: 'style'
});
//alert(parseFloat(globalBucket[0][1]) + "," + globalBucket[1][1]);
var lastColorColumn = parseInt(dataTable.getNumberOfColumns());
var numberOfRowsInQuery = parseInt(dataTable.getNumberOfRows());
//console.log('Rows and columns is:'+numberOfRowsInQuery+'<--->'+lastColorColumn);
if(gradeName == 'regwise'){
for (var i = 0; i < numberOfRowsInQuery; i++) {
var color = 'rgb(0, 0, 0)';
if (dataTable.getValue(i, 0) == 'RAJAHMUNDRY') {
color = 'rgb(231, 76, 60)'; //red
} else if (dataTable.getValue(i, 0) == 'VISAKHAPATNAM') {
color = 'rgb(241, 196, 15)'; //amber
} else if (dataTable.getValue(i, 0) == 'GUNTUR') {
color = 'rgb(46, 204, 113)'; //green
} else if (dataTable.getValue(i, 0) == 'ANANTAPUR'){
color = 'rgb(0, 153, 204)'; //blue
}
dataTable.setValue(i, lastColorColumn - 2, (dataTable.getValue(i, 1)).toFixed(2));
dataTable.setValue(i, lastColorColumn - 1, color);
}
}else{
for (var i = 0; i < numberOfRowsInQuery; i++) {
var color = 'rgb(0, 0, 0)';
if (dataTable.getValue(i, 1) < globalBucket[0][1]) {
color = 'rgb(231, 76, 60)'; //red
} else if (dataTable.getValue(i, 1) < globalBucket[1][1]) {
color = 'rgb(241, 196, 15)'; //amber
} else {
color = 'rgb(46, 204, 113)'; //green
}
dataTable.setValue(i, lastColorColumn - 1, color);
}
}
} else {
//Multichart
if(gradeName == 'regwise'){
dataTable.addColumn('number', 'AVERAGE(C-Marks)');
}else{
dataTable.addColumn('number', 'Cumulative Achievement (percentage)');
}
var lastColumn = parseInt(dataTable.getNumberOfColumns());
var numberOfRowsInQuery = parseInt(dataTable.getNumberOfRows());
for (var i = 0; i < numberOfRowsInQuery; i++) {
dataTable.setValue(i, lastColumn - 2, dataTable.getValue(i,1).toFixed(2));
dataTable.setValue(i, lastColumn - 1, cumulativeValues[i].toFixed(2));
}
chartType = "ColumnChart";
}
var MAX;
var MIN;
if (attributeNameY == "ULB Name") {
MAX = dataTable.getNumberOfRows();
} else {
MAX = dataTable.getColumnRange(0).max;
}
if (MAX < 4) {
MIN = 2;
} else if (MAX < 10) {
MIN = 5;
} else if (MAX < 30) {
MIN = 10;
} else if (MAX < 50) {
MIN = 15;
} else if (MAX < 115) {
MIN = 30;
} else if (MAX < 1000) {
MIN = 250;
} else if (MAX < 10000) {
MIN = 2500;
} else if (MAX < 50000) {
MIN = 12500;
} else if (MAX < 100000) {
MIN = 25000;
} else if (MAX < 200000) {
MIN = 50000;
} else if (MAX < 500000) {
MIN = 100000;
} else {
MIN = 150000;
}
var prevButton = document.getElementById('chartPrev');
var nextButton = document.getElementById('chartNext');
var changeZoomButton = document.getElementById('chartZoom');
prevButton.disabled = true;
nextButton.disabled = true;
changeZoomButton.disabled = true;
//alert($('#chartVizDiv').width());
var chartDivWidth = $('#chartVizDiv').width() - 100;
var chartDivHeight = $('#chartVizDiv').height() - 215;
var myOptions = {
chartArea: {
top: 70,
left: 85,
width: chartDivWidth,
height: chartDivHeight
},
axisTitlesPosition: 'out',
// title: chartModeTitle,
title: '',
series: {
0: {
color: '#F5861F'
},
1: {
color: '#6B4F2C'
},
2: {
color: '#17365D'
},
3: {
color: '#FFC000'
}
},
titlePosition: 'start',
titleTextStyle: {
color: '#000',
fontSize: 20,
fontName: 'Open Sans'
},
animation: {
duration: 1500,
easing: 'linear',
startup: true
},
annotations: {
alwaysOutside: true,
/* boxStyle: {
// Color of the box outline.
stroke: '#000',
// Thickness of the box outline.
strokeWidth: 1,
gradient: {
color1: '#FFFFFF',
color2: '#FFFFFF',
x1: '0%', y1: '0%',
x2: '100%', y2: '100%',
useObjectBoundingBoxUnits: true
}
},*/
textStyle: {
fontName: 'Times-Roman',
fontSize: 14,
bold: true,
opacity: 1
}
},
/* explorer: {
keepInBounds:true
},*/
vAxis: {
title: '',
textStyle: {
fontSize: 14,
color: '#000',
fontName: 'Open Sans'
},
titleTextStyle: {
color: '#000',
fontSize: 18,
italic: false,
bold: false
},
baselineColor: '#000',
gridlines: {
count: 5
},
viewWindowMode: 'pretty'
},
trendlines: {
0: {
type: 'linear',
visibleInLegend: true,
color: 'purple',
lineWidth: 3,
opacity: 1,
showR2: true
}
},
hAxis: {
title: '',
viewWindow: {
min: 0,
max: MAX
},
textStyle: {
fontSize: 14,
color: '#000',
fontName: 'Open Sans'
},
titleTextStyle: {
color: '#000',
fontSize: 18,
italic: false,
bold: false
},
baselineColor: '#000',
gridlines: {
count: 5
},
viewWindowMode: 'pretty'
},
tooltip: {
isHtml: false
},
areaOpacity: 0.5,
backgroundColor: '#FFFFFF',
legend: {
textStyle: {
color: 'black',
fontSize: 14,
fontName: 'Open Sans'
},
position: 'none',
alignment: 'end'
}
};
var hisBarOptions = {
chartArea: {
// left:10,
// top:100,
left: 75,
width: '90%',
height: '70%'
},
// title: chartModeTitle,
title: '',
titleTextStyle: {
color: '#000000',
fontSize: 20,
fontName: 'Open Sans'
},
animation: {
duration: 1500,
easing: 'linear',
startup: true
},
vAxis: {
title: attributeNameY,
viewWindow: {
min: 0,
max: MAX
},
textStyle: {
fontSize: 14,
color: '#000000',
fontName: 'Open Sans'
},
titleTextStyle: {
color: '#000000',
fontSize: 18
}
},
hAxis: {
title: attributeNameX,
textStyle: {
fontSize: 14,
color: '#000000',
fontName: 'Open Sans'
},
titleTextStyle: {
color: '#000000',
fontSize: 18
}
},
tooltip: {
isHtml: false
},
backgroundColor: '#FFFFFF',
legend: {
textStyle: {
color: 'black',
fontSize: 14,
fontName: 'Open Sans'
},
position: 'none',
alignment: 'end'
}
};
var wrapper = new google.visualization.ChartWrapper({
containerId: "chartVizDiv",
//dataSourceUrl: "http://www.google.com/fusiontables/gvizdata?tq=",
//query: placeChartQuery,
dataTable: dataTable,
chartType: chartType,
options: (chartType == "Histogram" || chartType == "BarChart") ? hisBarOptions : myOptions
});
google.visualization.events.addListener(wrapper, 'ready', onReady);
wrapper.draw();
function onReady() {
google.visualization.events.addListener(wrapper.getChart(), 'onmouseover', barMouseOver);
google.visualization.events.addListener(wrapper.getChart(), 'onmouseout', barMouseOut);
google.visualization.events.addListener(wrapper.getChart(), 'select', barSelect);
prevButton.disabled = hisBarOptions.vAxis.viewWindow.min <= 0;
nextButton.disabled = hisBarOptions.vAxis.viewWindow.max >= MAX;
prevButton.disabled = myOptions.hAxis.viewWindow.min <= 0;
nextButton.disabled = myOptions.hAxis.viewWindow.max >= MAX;
changeZoomButton.disabled = false;
}
prevButton.onclick = function() {
myOptions.hAxis.viewWindow.min -= MIN - 1;
myOptions.hAxis.viewWindow.max -= MIN - 1;
hisBarOptions.vAxis.viewWindow.min -= MIN - 1;
hisBarOptions.vAxis.viewWindow.max -= MIN - 1;
wrapper.draw();
}
nextButton.onclick = function() {
myOptions.hAxis.viewWindow.min += MIN - 1;
myOptions.hAxis.viewWindow.max += MIN - 1;
hisBarOptions.vAxis.viewWindow.min += MIN - 1;
hisBarOptions.vAxis.viewWindow.max += MIN - 1;
wrapper.draw();
}
var zoomed = true;
changeZoomButton.onclick = function() {
if (zoomed) {
myOptions.hAxis.viewWindow.min = 0;
myOptions.hAxis.viewWindow.max = MIN;
hisBarOptions.vAxis.viewWindow.min = 0;
hisBarOptions.vAxis.viewWindow.max = MIN;
} else {
myOptions.hAxis.viewWindow.min = 0;
myOptions.hAxis.viewWindow.max = MAX;
hisBarOptions.vAxis.viewWindow.min = 0;
hisBarOptions.vAxis.viewWindow.max = MAX;
}
zoomed = !zoomed;
wrapper.draw();
}
function barSelect(e) {
//alert(tooltipValue);
//var selectedItem = wrapper.getChart().getSelection()[0];
//tooltipValue = dataTable.getValue(e.row, 0);
//addTooltipScript('https://www.googleapis.com/fusiontables/v2/query?sql=');
}
function barMouseOver(e) {
timer = setTimeout(function() {
// do your stuff here
if (e.row < dataTable.getNumberOfRows()) {
tooltipValue = dataTable.getValue(e.row, 0);
tooltipValueCol = dataTable.getValue(e.row, 1);
//setMapOnAll(null);
//if overall and combined show report in modal else show in marker
if(mainIndic == 0 || (mainIndic == 1 && subMainIndic == 0) || (mainIndic == 3 && subMainIndic == 0) || (mainIndic == 4 && subMainIndic == 0) || (mainIndic == 5 && subMainIndic == 0) || (mainIndic == 6 && subMainIndic == 0)|| (mainIndic == 8 && subMainIndic == 0)|| (mainIndic == 9 && subMainIndic == 0)|| (mainIndic == 12 && subMainIndic == 0)){
ulbsummary('https://www.googleapis.com/fusiontables/v2/query?sql=',tooltipValue);
}else{
addTooltipScript('https://www.googleapis.com/fusiontables/v2/query?sql=', false);
}
$("#chosenNames").attr("placeholder", "Search ULBs").val("").focus().blur();
$('#chosenNames').chosen().trigger("chosen:updated");
}
}, 1500);
}
function barMouseOut(e) {
// on mouse out, cancel the timer
clearTimeout(timer);
chartInfoBubble2.close();
chartInfoMarker.setMap(null);
//setMapOnAll(map);
}
}
}
var rankAttri;
var targetAttri;
var achievedAttri;
var marksObtained;
function ulbsummary(src,ulb){
$('#ulbsummary-title').html('<b>'+ulb+' - '+titleText.split('-')[0]+'</b>');
$('#ulbreport_table').hide();
$('#ulbreport').modal('show');
$('#loadingIndicatorforreport').show();
if (attributeNameX.indexOf("C-") > -1) {
rankAttri = "C-Rank";
targetAttri = "C-Target";
achievedAttri = "C-Achievement";
marksObtained = "C-Marks";
} else if (attributeNameX.indexOf("UPM-") > -1) {
rankAttri = "UPM-Rank";
targetAttri = "UPM-Target";
achievedAttri = "UPM-Achievement";
marksObtained = "UPM-Marks";
} else if (attributeNameX.indexOf("M-") > -1) {
rankAttri = "M-Rank";
targetAttri = "M-Target";
achievedAttri = "M-Achievement";
marksObtained = "M-Marks";
}
var selectText = "SELECT+'ULB Name',Centroid,'" + attributeNameX + "'" + ",'" + attributeNameY + "','Grade','Annual Target','" + rankAttri + "','" + targetAttri + "','" + achievedAttri + "','" + marksObtained + "','Max Marks'";
var tableIDString = "+from+" + citiesTableID;
var whereText = "+where+'" + attributeNameY + "'='" + ulb + "'";
var key_callback_string = "&key=" + projectAPIKey;
var source = src + selectText + tableIDString + whereText + key_callback_string;
//console.log(src);
var ulbPoint;
$.ajax({url: source, async: false, success: function(response){
ulbPoint = response.rows[0];
}});
if(gradeName == 'regwise'){
}else{
$('#ulbreport_table').html('');
$('#ulbreport_table').append('<table class="table table-bordered table-hover"> <tbody> <tr> <th>Parameter</th> <th>Annual Target</th> <th>Target</th> <th>Achievement</th> <th>Marks</th> <th>Weightage</th> <th>Rank</th> </tr> <tr> <td>'+titleText.split('-')[1]+'</td><td>'+((typeof(ulbPoint[5]) == 'number') ? (ulbPoint[5].toFixed(2)) : ulbPoint[5])+'</td> <td>'+((typeof(ulbPoint[7]) == 'number') ? (ulbPoint[7].toFixed(2)) : ulbPoint[7])+'</td> <td>'+((typeof(ulbPoint[8]) == 'number') ? (ulbPoint[8].toFixed(2)) : ulbPoint[8])+'</td> <td>'+((typeof(ulbPoint[9]) == 'number') ? (ulbPoint[9].toFixed(2)) : ulbPoint[9])+'</td> <td>'+((typeof(ulbPoint[10]) == 'number') ? (ulbPoint[10].toFixed(2)) : ulbPoint[10])+'</td> <td>'+((typeof(ulbPoint[6]) == 'number') ? (ulbPoint[6].toFixed(2)) : ulbPoint[6])+'</td> </tr> </tbody> </table>');
//Show Parameter Data
if(mainIndic == 3 && subMainIndic == 0){//Solid Waste Management
var tablearray = [{"tableid":"1_nR3f6Z1TzTgCJ5UT0Do6QYf9Ok0hVfxkKf2vAfG","text":"Door To Door Collection"},
{"tableid":"1HlptexkOhseTkl7ujc13LYb7uELXJBQduRM6QmLu","text":"Garbage Lifting"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 4 && subMainIndic == 0){//Property Tax
var tablearray = [{"tableid":"1Ft7BVfp-V8hpucsmWoW3Zal7p1qc5o6FwPSw3i4O","text":"Collection Efficiency"},
{"tableid":"175Ocis9sGqWTBLhXd2wVIawnlddbpKE1fvB-j_SZ","text":"Demand Increase"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 5 && subMainIndic == 0){//Citizen Services
var tablearray = [{"tableid":"1K6vPTSthe2-X__IHsi42Roq5RReNZ9xy-nVTcgMc","text":"Citizen Charter"},
{"tableid":"1SbLuxSFUquS7q-mmLKp8_zYeKbdwvbbV3fMVmL5W","text":"Grievance Redressal"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 6 && subMainIndic == 0){//Finance
var tablearray = [{"tableid":"1t3_EJG6Ppn4apIrONT0Wz1b6OYMix1OZkenzEcOd","text":"Double Entry Accounting"},
{"tableid":"10591kbl5tAaWG4Kamh9QCQ1HWjY4-ESWRDQ1GQZ0","text":"Pending Accounts and Audits"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 0){//Combined
if(hod == 1){//DMA - Combined
var tablearray = [{"tableid":"1BgiIsyij_n9vB7cuCFRn6UgE9Cq0rgCZ57FePIWm","text":"Solid Waste Management"},
{"tableid":"1gidez_jsV4mxBSZ0a_lfo6cwunZXsSUxlRpNb_Ut","text":"Property Tax"},
{"tableid":"1XXalUDbRkTKbNbv7Dntueqd-BB7Pz5y_-ZxRqDvF","text":"Citizen Services"},
{"tableid":"1q7GNaD1WoY8g2acTpXq9DbOggJnW-crbIxd7ixRY","text":"Finance"},
{"tableid":"1UOltn1AicEOL-FkG4mKsay6pEi8SZQKmf5y5xX9m","text":"Education"}];
queryhandling(tablearray,src,ulb);
}else if(hod == 2){//CE - Combined
var tablearray = [{"tableid":"1KVFlQd2zfJ5soZv_kJrMsxZNPEzZSdCzvJoKAGlE","text":"Water Supply"},
{"tableid":"1WjL0SBK8k3NgOMS8YjiirnuA1JgnqOuQjAAfSKZ-","text":"Street Lighting"}];
queryhandling(tablearray,src,ulb);
}else if(hod == 3){//DTCP - Combined
//No need
}else if(hod == 4){//MEPMA - Combined
//No need
}else if(hod == 5){//Swach Andhra - Combined
//No need
}else if(hod == 6){//Greening Corporation - Combined
//No need
}else if(hod == 7){//Combined - Combined
var tablearray = [{"tableid":"15PCNLfKkPZGc35wtThugjW0FBTlK2U9hCKIFNLTL","text":"DMA"},
{"tableid":"1AMkLyA2vz2xNXTHTX5JnxOZwnrVS6PNqu9xkhS7L","text":"CE"},
{"tableid":"1xCuO37vnXEN0Ake02ErGetRTZUo8W6mueNugmdhq","text":"DTCP"},
{"tableid":"1ufZzYeUN40B-5u0Msggo8UIHddJ-jQMvES8IAqWL","text":"MEPMA"},
{"tableid":"10DDREC-__XHoPjL1FFVZ5G6Beh-Bs3yzuP59t5hL","text":"Swacha Andhra"},
{"tableid":"13zBQvJvzrdj8vf63MnvUcJOgo5pG8MYcqYP1hVjh","text":"Greening Corp."},];
queryhandling(tablearray,src,ulb);
}
}else if(mainIndic == 8 && subMainIndic == 0){//Water Supply
var tablearray = [{"tableid":"1dHEUFs9Edz-pfbBmX7dDczXZXdvHyhIT50681RiI","text":"Connections Coverage"},
{"tableid":"1f6ZA4wqY7V3gJAOhz3M2jMi9VMpVtQFGG6_ExJH-","text":"Cost Recovery"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 9 && subMainIndic == 0){//Street Lighting
var tablearray = [{"tableid":"1XiO6lKhyPdCLTR6E_9ltEBaM20wQWDgt3X0E6Xqk","text":"LED Coverage"},
{"tableid":"1SJZL2t_DchzylwR2zoSE-Zk1NOPVrQ-hitSn8KXx","text":"Additional Fixtures"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 12 && subMainIndic == 0){//Community Development
var tablearray = [{"tableid":"1ShLFRlL4D_O05ant_kRkkSprShJPYb_nQ8S4MCvT","text":"SHG Bank Linkage"},
{"tableid":"1QjN7go-OdeLVtKnart_yuwWuKavxEJP_lSy9tyV4","text":"Liveihood"},
{"tableid":"1Oua3hYGMx3knhsK7yf36TspEvV_rJbE2lsCEWqLT","text":"Skill Training Programmes"}];
queryhandling(tablearray,src,ulb);
}else if(mainIndic == 1 && subMainIndic == 0){//Swach Andhra
var tablearray = [{"tableid":"1VlRSa6bRH67nzwoZNNg5Hi7RrADs6nrpL9XGKZxk","text":"Household Toilet Coverage"},
{"tableid":"1gEkwIO7LC2ga5nS7fNiSZjrFaUyVcgQORdMAHs0d","text":"Community Toilet Coverage"}];
queryhandling(tablearray,src,ulb);
}
$('#loadingIndicatorforreport').hide();
$('#ulbreport_table').show();
}
}
function queryhandling(tablearray,src,ulb){
//Common Query
var selectText = "SELECT+'ULB Name',Centroid,'" + attributeNameX + "'" + ",'" + attributeNameY + "','Grade','Annual Target','" + rankAttri + "','" + targetAttri + "','" + achievedAttri + "','" + marksObtained + "','Max Marks'";
var whereText = "+where+'" + attributeNameY + "'='" + ulb + "'";
var key_callback_string = "&key=" + projectAPIKey;
$.each(tablearray, function(k, v) {
//display the key and value pair
var ts = v.tableid;
var tableIDString = "from " + ts;
var parameter_src = src + selectText + tableIDString + whereText + key_callback_string;
parameter_report(parameter_src, v.text);//Function call
});
}
function parameter_report(source, parameter_text){
var ulbpoint;
$.ajax({url: source, async: false, success: function(response){
ulbPoint = response.rows[0];
}});
$('#ulbreport_table table tbody').append('<tr> <td>'+parameter_text+'</td> <td>'+((typeof(ulbPoint[5]) == 'number') ? (ulbPoint[5].toFixed(2)) : ulbPoint[5])+'</td> <td>'+((typeof(ulbPoint[7]) == 'number') ? (ulbPoint[7].toFixed(2)) : ulbPoint[7])+'</td> <td>'+((typeof(ulbPoint[8]) == 'number') ? (ulbPoint[8].toFixed(2)) : ulbPoint[8])+'</td> <td>'+((typeof(ulbPoint[9]) == 'number') ? (ulbPoint[9].toFixed(2)) : ulbPoint[9])+'</td> <td>'+((typeof(ulbPoint[10]) == 'number') ? (ulbPoint[10].toFixed(2)) : ulbPoint[10])+'</td> <td>'+((typeof(ulbPoint[6]) == 'number') ? (ulbPoint[6].toFixed(2)) : ulbPoint[6])+'</td> </tr>');
}
function addTooltipScript(src, fromSearchBar) {
var rankAttri;
var targetAttri;
var achievedAttri;
var marksObtained;
if (attributeNameX.indexOf("C-") > -1) {
rankAttri = "C-Rank";
targetAttri = "C-Target";
achievedAttri = "C-Achievement";
marksObtained = "C-Marks";
} else if (attributeNameX.indexOf("UPM-") > -1) {
rankAttri = "UPM-Rank";
targetAttri = "UPM-Target";
achievedAttri = "UPM-Achievement";
marksObtained = "UPM-Marks";
} else if (attributeNameX.indexOf("M-") > -1) {
rankAttri = "M-Rank";
targetAttri = "M-Target";
achievedAttri = "M-Achievement";
marksObtained = "M-Marks";
}
var selectText = "SELECT+'ULB Name',Centroid,'" + attributeNameX + "'" + ",'" + attributeNameY + "','Grade','Annual Target','" + rankAttri + "','" + targetAttri + "','" + achievedAttri + "','" + marksObtained + "','Max Marks'";
var tableIDString = "+from+" + citiesTableID;
var whereText = "+where+'" + attributeNameY + "'='" + tooltipValue + "'";
if (fromSearchBar) {
whereText = "+where+'" + 'ULB Name' + "'='" + tooltipValue + "'";
}
var key_callback_string = "&key=" + projectAPIKey + "&callback=tooltiphandler";
src = src + selectText + tableIDString + whereText + key_callback_string;
var s = document.createElement('script');
s.setAttribute('src', src);
document.body.appendChild(s);
}
function tooltiphandler(response) {
var ulbPoint = response.rows[0];
if(gradeName == 'regwise'){
}else{
showULBonMap(ulbPoint[0], ulbPoint[1], ulbPoint[2], ulbPoint[3], ulbPoint[4], ulbPoint[5], ulbPoint[6], ulbPoint[7], ulbPoint[8], ulbPoint[9], ulbPoint[10]);
}
}
function showULBonMap(ulb, centroid, ttValueY, ttValueX, grad, annualTar, rank, target, achieved, marks, maxMarks) {
var bubbleLoc = centroid.split(',');
if (chartInfoBubble1.isOpen()) { // unless the InfoBubble is open, we open it
chartInfoBubble1.close();
}
if (unitOfIndicator == "-") {
target = " NA";
achieved = " NA";
annualTar = " NA";
}
chartInfoBubble1.setContent("<div ><font size='2'><b>" + ulb + "</b><br/>Grade: <b>" + grad + "</b><br/>Unit of indicator: <b>" + unitOfIndicator + "</b></br><table border='1' class='infoWindowTable' style='width=100%;'><tr><th>Annual Target</th><th>Target</th><th>Achievement</th><th>" + attributeNameX + "</th><th>Marks (" + maxMarks + ")</th><th>Rank</th></tr><tr><td><b>" + ((typeof(annualTar) == 'number') ? (annualTar.toFixed(2)) : annualTar) + "</b></td><td><b>" + ((typeof(target) == 'number') ? (target.toFixed(2)) : target) + "</b></td><td><b>" + ((typeof(achieved) == 'number') ? (achieved.toFixed(2)) : achieved) + "</b></td><td><b>" + ((typeof(ttValueY) == 'number') ? (ttValueY.toFixed(2)) : ttValueY) + "</b></td><td><b>" + ((typeof(marks) == 'number') ? (marks.toFixed(2)) : marks) + "</b></td><td><b>" + rank + "</b></td></tr></table></font></div>");
chartInfoBubble1.setPosition(new google.maps.LatLng(parseFloat(bubbleLoc[1]) + zoomLevelBasedBubblePos(), parseFloat(bubbleLoc[0])));
if (!chartInfoBubble1.isOpen()) { // unless the InfoBubble is open, we open it
chartInfoBubble1.open(map);
}
}
function zoomLevelBasedBubblePos() {
switch (map.getZoom()) {
case 6:
return 0.7;
case 7:
return 0.35;
case 8:
return 0.18;
}
return 0.04;
}
function setMapOnAll(map) {
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(map);
}
}
function bucketGenerator(myar) {
myar.sort(function(a, b) {
return a - b;
});
var array1 = [];
for (i = 0; i < myar.length; i++) {
if (myar[i] != null)
array1.push(myar[i]);
}
var array = [];
for (i = 0; i < array1.length; i++) {
array.push(array1[i]);
while (array1[i] == 0)
i++;
}
var buck = 3;
var len1 = Math.floor(array.length * (0.2));
var len2 = Math.ceil(array.length * (0.6));
var len3 = array.length - (len1 + len2);
var bucket = [];
var bucketMin = [];
var bucketMax = [];
var max1 = array
// if(array[0]==0)
// bucket.push([0.01, array[len1 - 1]]);
// else
bucket.push([array[0], array[len1 - 1]]);
bucket.push([array[len1], array[len1 + len2 - 1]]);
bucket.push([array[len1 + len2], array[array.length - 1]]);
var bucketF = [];
var max;
var min = parseFloat(bucket[0][0], 3);
for (i = 0; i < buck; i++) {
if (i < (buck - 1)) {
var x = parseFloat(bucket[i + 1][0], 3);
max = Math.ceil(parseFloat((parseFloat(bucket[i][1], 3) + parseFloat(bucket[i + 1][0], 3)) / 2));
if (x < max) {
var max1 = (parseFloat(bucket[i][1], 3) + parseFloat(bucket[i + 1][0], 3)) / 2;
max = parseFloat(max1).toFixed(2);
}
} else
max = Math.ceil(parseFloat(bucket[i][1], 3));
bucketF.push([min, max]);
min = max;
}
return bucketF;
}
function applyStyle(layer, column) {
addScript('https://www.googleapis.com/fusiontables/v2/query?sql=');
}
function addScript(src) {
var selectText = "SELECT '" + attributeNameX + "',Centroid,'ULB Name','Grade','" + attributeNameY + "','" + "Annual Target','C-Rank','M-Rank','UPM-Rank','C-Marks','UPM-Marks','M-Marks','Max Marks','Region'";
var whereText;
if (mode == "city") {
whereText = '';
} else if (mode == 'district') {
whereText = " WHERE " + "District" + "='" + districtName + "'";
} else if (mode == 'region') {
whereText = " WHERE " + "Region" + "='" + regionName + "'";
} else if (mode == 'grade') {
if (gradeName == "G1") {
whereText = " WHERE " + "'Grade' IN ('Special','Selection')";
} else if (gradeName == "G2") {
whereText = " WHERE " + "'Grade' IN ('I','II')";
} else if (gradeName == "G3") {
whereText = " WHERE " + "'Grade' IN ('III','NP')";
} else if (gradeName == "G4") {
whereText = " WHERE " + "Grade" + "='Corp'";
} else if (gradeName == "elevenulb") {
whereText = " WHERE " + "'ULB Name' IN ('TIRUPATI','KURNOOL','VISAKHAPATNAM','SRIKAKULAM','GUNTUR','KAKINADA','NELLIMARLA','RAJAM NP','KANDUKUR','ONGOLE CORP.','RAJAMPET')";
}else if (gradeName == "regwise") {
whereText = " WHERE " + "'Region' IN ('ANANTAPUR','GUNTUR','RAJAHMUNDRY','VISAKHAPATNAM')";
} else {
whereText = " WHERE " + "Grade" + "='" + gradeName + "'";
}
} else {
whereText = '';
}
var tableIDString = " from " + citiesTableID;
var key_callback_string = "&key=" + projectAPIKey + "&callback=handler";
src = src + selectText + tableIDString + whereText + key_callback_string;
var s = document.createElement('script');
s.setAttribute('src', src);
document.body.appendChild(s);
}
//var dummycount = 0;
function handler(response) {
markerLocations = [];
searchBarULBs = [];
var anantapur_count = 0, guntur_count = 0, raj_count = 0, vizag_count = 0;
var anantapur=0, guntur=0, rajahmundry=0, vizag=0;
var hisArray = [];
for (var i = 0; i < response.rows.length; i++) {
var attValX = response.rows[i][0];//timeframe
hisArray.push(attValX);
var pos = response.rows[i][1].toString().split(",");//latlong
var lat = pos[1];
var lon = pos[0];
var city = (response.rows[i])[4].toString();//ulbname
var grad = (response.rows[i])[3].toString();
var attValY = (response.rows[i])[4];//ulbname
var annualTarget = (response.rows[i])[5];
var cRank = (response.rows[i])[6];
var mRank = (response.rows[i])[7];
var upmRank = (response.rows[i])[8];
var cMarks = (response.rows[i])[9];
var mMarks = (response.rows[i])[10];
var upmMarks = (response.rows[i])[11];
var maxMarks = (response.rows[i])[12];
var ulb_region = (response.rows[i])[13];
/*if((response.rows[i])[10] == 0 )
dummycount+= 1;*/
//do it only when region wise chart
/*if (gradeName == "regwise") {
if(ulb_region == 'ANANTAPUR'){
anantapur+=attValX;
anantapur_count++;//total sum of 'attValX' by this anantapur_count
}else if(ulb_region == 'GUNTUR'){
guntur+=attValX;
guntur_count++;//total sum of 'attValX' by this guntur_count
}else if(ulb_region == 'RAJAHMUNDRY'){
rajahmundry+=attValX;
raj_count++;//total sum of 'attValX' by this raj_count
}else if(ulb_region == 'VISAKHAPATNAM'){
vizag+=attValX;
vizag_count++;//total sum of 'attValX' by this vizag_count
}
markerLocations.push([lat, lon, city, grad, attValX, attValY, annualTarget, cRank, mRank, upmRank, cMarks, upmMarks, mMarks, maxMarks,ulb_region]);
searchBarULBs.push(city);
}else{*/
markerLocations.push([lat, lon, city, grad, attValX, attValY, annualTarget, cRank, mRank, upmRank, cMarks, upmMarks, mMarks, maxMarks,ulb_region]);
searchBarULBs.push(city);
//}
}
//console.log('dummycount'+dummycount);
/*if((attributeNameX.indexOf("UPM-") > -1) && (dummycount == 110))
document.getElementById("titleText").innerHTML = "<b style='color:red'>Since financial year starts from April. Previous month data won't be available for this month</b>";
*/
//console.log(anantapur_count+'<-->'+guntur_count+'<-->'+raj_count+'<-->'+vizag_count);
globalBucket = bucketGenerator(hisArray);
//console.log(globalBucket);
if (globalBucket[2][1] == 0 || isNaN(globalBucket[2][1])) {
globalBucket[2][1] = 100;
}
if (isNaN(globalBucket[2][0])) {
globalBucket[2][0] = globalBucket[2][1];
}
if (isNaN(globalBucket[1][1])) {
globalBucket[1][1] = globalBucket[2][1];
}
if (isNaN(globalBucket[0][0])) {
globalBucket[0][0] = 0;
}
if (isNaN(globalBucket[0][1])) {
globalBucket[0][1] = 0;
}
if (isNaN(globalBucket[1][0])) {
globalBucket[1][0] = 0;
}
/*
if (globalBucket[1][1] == 100.00) {
globalBucket[1][1] = 99.99;
globalBucket[2][0] = 100;
globalBucket[2][1] = 100;
}
*/
var select = document.getElementById("chosenNames");
select.options.length = 0;
//$('#chosenNames').append("<option value='"+searchBarULBs.length+"'>"+searchBarULBs.length+" ULBs</option>");
for (var i = 0; i < searchBarULBs.length; i++) {
$('#chosenNames').append("<option value='" + searchBarULBs[i] + "'>" + searchBarULBs[i] + "</option>");
}
$("#chosenNames").attr("placeholder", "Search ULBs").val("").focus().blur();
$('#chosenNames').chosen().trigger("chosen:updated");
drop(markerLocations, globalBucket);
/*
var columnStyle = COLUMN_STYLES[attributeNameX]; // was column previously
var styles = [];
for (var i in columnStyle) {
var style = columnStyle[i];
style.min = bucket[i][0]-0.0001;
style.max = bucket[i][1]+0.0001;
styles.push({
where: generateWhere(attributeNameX, style.min, style.max),
polygonOptions: {
fillColor: style.color,
fillOpacity: style.opacity ? style.opacity : 0.8
}
});
}
*/
var styles = [];
for (var i in COLUMN_STYLES[attributeNameX]) {
var style = COLUMN_STYLES[attributeNameX][i];
// style.min = parseFloat(bucket[i][0],2) - 0.01;
// style.max = parseFloat(bucket[i][1],2) + 0.01;
style.min = parseFloat(globalBucket[i][0], 2) - 0.01;
style.max = parseFloat(globalBucket[i][1], 2) + 0.01;
styles.push({
where: generateWhere(attributeNameX, style.min, style.max),
polygonOptions: {
fillColor: style.color,
fillOpacity: style.opacity ? style.opacity : 0.8
}
});
}
if(gradeName == 'regwise'){
layer.setMap(null);
$('#searchChosenDiv').hide();
}else{
layer.set('styles', styles);
}
changeMap();
drawChart();
}
function generateWhere(columnName, low, high) {
var whereClause = [];
whereClause.push("'");
whereClause.push(columnName);
whereClause.push("' > ");
whereClause.push(low);
whereClause.push(" AND '");
whereClause.push(columnName);
whereClause.push("' <= ");
whereClause.push(high);
return whereClause.join('');
}
function changeMap() {
var query = "";
if (mode == "city") {
query = "SELECT 'geometry' FROM " + districtsTableID;
districtLayer.setOptions({
query: {
from: districtsTableID,
select: 'geometry'
},
styleId: 1,
});
}
if (mode == "district") {
query = "SELECT 'geometry' FROM " + districtsTableID + " WHERE 'DISTRICT_2011' = '" + districtName + "'";
districtLayer.setOptions({
query: {
from: districtsTableID,
select: 'geometry',
where: "'DISTRICT_2011' = '" + districtName + "'"
},
styleId: 1
});
}
if (mode == "region") {
query = "SELECT 'geometry' FROM " + districtsTableID + " WHERE 'Region' = '" + regionName + "'";
districtLayer.setOptions({
query: {
from: districtsTableID,
select: 'geometry',
where: "'Region' = '" + regionName + "'"
},
styleId: 1
});
}
if (mode == "grade") {
query = "SELECT 'geometry' FROM " + districtsTableID;
districtLayer.setOptions({
query: {
from: districtsTableID,
select: 'geometry'
},
styleId: 1
});
}
zoom2query(query);
}
function zoom2query(query) {
map.controls[google.maps.ControlPosition.RIGHT_BOTTOM].push(document.getElementById('legendWrapper'));
var column = attributeNameX;
$('#legendTitle').html(column);
var columnStyle = COLUMN_STYLES[column];
var style = columnStyle[0];
$('#legendItem1Content').html((parseFloat(style.min, 2) + 0.01).toFixed(2) + ' - ' + (parseFloat(style.max, 2) - 0.01).toFixed(2));
style = columnStyle[1];
$('#legendItem2Content').html((parseFloat(style.min, 2) + 0.01).toFixed(2) + ' - ' + (parseFloat(style.max, 2) - 0.01).toFixed(2));
style = columnStyle[2];
$('#legendItem3Content').html((parseFloat(style.min, 2) + 0.01).toFixed(2) + ' - ' + (parseFloat(style.max, 2) - 0.01).toFixed(2));
$('#legendItem4Content').html(0 + ' or ' + 'undefined');
$('#legendWrapper').show();
// zoom and center map on query results
//set the query using the parameter
var queryText = encodeURIComponent(query);
var query = new google.visualization.Query('http://www.google.com/fusiontables/gvizdata?tq=' + queryText);
//set the callback function
query.send(zoomTo);
}
function centerMap() {
map.setZoom(zoomAfterQuery);
map.panTo(centerAfterQuery);
//wrapper.getChart().setSelection([{row:0, column:null}]);
//updateLegend(attributeNameX);
}
function zoomTo(response) {
if (!response) {
//alert('no response');
return;
}
if (response.isError()) {
//alert('Error in query: ' + response.getMessage() + ' ' + response.getDetailedMessage());
return;
}
FTresponse = response;
//for more information on the response object, see the documentation
//http://code.google.com/apis/visualization/documentation/reference.html#QueryResponse
numRows = response.getDataTable().getNumberOfRows();
numCols = response.getDataTable().getNumberOfColumns();
// handle multiple matches
var bounds = new google.maps.LatLngBounds();
for (var i = 0; i < numRows; i++) {
var kml = FTresponse.getDataTable().getValue(i, 0);
// create a geoXml3 parser for the click handlers
var geoXml = new geoXML3.parser({
map: map,
zoom: false
});
geoXml.parseKmlString("<Placemark>" + kml + "</Placemark>");
// handle all possible kml placmarks
if (geoXml.docs[0].gpolylines.length > 0) {
geoXml.docs[0].gpolylines[0].setMap(null);
if (i == 0) var bounds = geoXml.docs[0].gpolylines[0].bounds;
else bounds.union(geoXml.docs[0].gpolylines[0].bounds);
} else if (geoXml.docs[0].markers.length > 0) {
geoXml.docs[0].markers[0].setMap(null);
if (i == 0) bounds.extend(geoXml.docs[0].markers[0].getPosition());
else bounds.extend(geoXml.docs[0].markers[0].getPosition());
} else if (geoXml.docs[0].gpolygons.length > 0) {
geoXml.docs[0].gpolygons[0].setMap(null);
if (i == 0) var bounds = geoXml.docs[0].gpolygons[0].bounds;
else bounds.union(geoXml.docs[0].gpolygons[0].bounds);
}
}
map.fitBounds(bounds);
centerAfterQuery = map.getCenter();
zoomAfterQuery = map.getZoom();
}
function getQueryStrings() {
var assoc = {};
var decode = function(s) {
return decodeURIComponent(s.replace(/\+/g, " "));
};
var queryString = location.search.substring(1);
var keyValues = queryString.split('&');
for (var i in keyValues) {
var key = keyValues[i].split('=');
if (key.length > 1) {
assoc[decode(key[0])] = decode(key[1]);
}
}
return assoc;
}
function fetchDataFromBrowserQueryAndUpdateUI() {
var qs = getQueryStrings();
var status = qs["status"]; //if you want to use ahead
/* if (status=="init") {
parent.document.getElementById('visuals').src = "visuals.html?X=C-Achievement&Y=ULB Name&area=grade&grade=G3&chart=ColumnChart&sort=XDESC&viz=split&status=start";
return;
}
*/
var sort = qs["sort"];
attributeNameX = qs["X"];
attributeNameY = qs["Y"];
chartType = qs["chart"];
mode = qs["area"];
districtName = qs["dis"];
regionName = qs["reg"];
gradeName = qs["grade"];
vizType = qs["viz"];
mainIndic = qs["main"];
subMainIndic = qs["sub"];
hod = qs["hod"];
//document.getElementById("titleText").textContent = attributeNameX + " for " + attributeNameY;
overallSelected = false;
if (mainIndic == 0 || (mainIndic == 1 && subMainIndic == 0) || (mainIndic == 3 && subMainIndic == 0) || (mainIndic == 4 && subMainIndic == 0) || (mainIndic == 5 && subMainIndic == 0) || (mainIndic == 6 && subMainIndic == 0) || (mainIndic == 8 && subMainIndic == 0) || (mainIndic == 9 && subMainIndic == 0) || (mainIndic == 12 && subMainIndic == 0) ) {
overallSelected = true;
}
if (sort == "XASC") {
sortType = "ORDER BY '" + attributeNameX + "' ASC";
} else if (sort == "XDESC") {
sortType = "ORDER BY '" + attributeNameX + "' DESC";
} else if (sort == "YASC") {
sortType = "ORDER BY '" + attributeNameY + "' ASC";
} else if (sort == "YDESC") {
sortType = "ORDER BY '" + attributeNameY + "' DESC";
} else if (sort == "NOSORT") {
sortType = "";
}
if (vizType == "split") {
$('.mapsection').show().removeClass('col-xs-12').addClass('col-xs-6');;
$('.chartsection').show().removeClass('col-xs-12').addClass('col-xs-6');;
} else if (vizType == "map") {
$('.mapsection').show().removeClass('col-xs-6').addClass('col-xs-12');
$('.chartsection').hide();
} else if (vizType == "chart") {
$('.mapsection').hide();
$('.chartsection').show().removeClass('col-xs-6').addClass('col-xs-12');
}
// alert(mainIndic+","+subMainIndic);
citiesTableID = '';
if (mainIndic == 0) {
if(hod == 1){
citiesTableID = '15PCNLfKkPZGc35wtThugjW0FBTlK2U9hCKIFNLTL';
titleText = "DMA Overall - Combined";
unitOfIndicator = "-";
}else if(hod == 2){
citiesTableID = '1AMkLyA2vz2xNXTHTX5JnxOZwnrVS6PNqu9xkhS7L';
titleText = "CE Overall - Combined";
unitOfIndicator = "-";
}else if(hod == 7){
citiesTableID = '1oh1JDpEiha7iByWwEo96IQpc_K3zkfdmJx-aE-Li';
titleText = "Combined - Combined";
unitOfIndicator = "-";
}
}else if (mainIndic == 1) {
if (subMainIndic == 0) {
citiesTableID = '10DDREC-__XHoPjL1FFVZ5G6Beh-Bs3yzuP59t5hL';
titleText = "Swachcha Andhra - Overall";
unitOfIndicator = "-";
}else if (subMainIndic == 1) {
citiesTableID = '1VlRSa6bRH67nzwoZNNg5Hi7RrADs6nrpL9XGKZxk';
titleText = "Swachcha Andhra - Individual Household Toilets (IHT) coverage";
unitOfIndicator = "No. of Toilets";
}else if (subMainIndic == 2) {
citiesTableID = '1gEkwIO7LC2ga5nS7fNiSZjrFaUyVcgQORdMAHs0d';
titleText = "Swachcha Andhra - Community Toilets coverage";
unitOfIndicator = "No. of Toilets";
}
}else if (mainIndic == 2) {
if (subMainIndic == 0) {
citiesTableID = '13zBQvJvzrdj8vf63MnvUcJOgo5pG8MYcqYP1hVjh';
titleText = "Greenery - Tree Plantation";
unitOfIndicator = "No. of plantations";
}
}else if (mainIndic == 3) {
if (subMainIndic == 0) {
citiesTableID = '1BgiIsyij_n9vB7cuCFRn6UgE9Cq0rgCZ57FePIWm';
titleText = "Solid Waste Management - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1_nR3f6Z1TzTgCJ5UT0Do6QYf9Ok0hVfxkKf2vAfG';
titleText = "Solid Waste Management - Door to Door Garbage Collection";
unitOfIndicator = "No. of Households";
} else if (subMainIndic == 2) {
citiesTableID = '1HlptexkOhseTkl7ujc13LYb7uELXJBQduRM6QmLu';
titleText = "Solid Waste Management - Garbage Lifting";
unitOfIndicator = "Metric tonnes";
}
}else if (mainIndic == 4) {
if (subMainIndic == 0) {
citiesTableID = '1gidez_jsV4mxBSZ0a_lfo6cwunZXsSUxlRpNb_Ut';
titleText = "Property Tax - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1Ft7BVfp-V8hpucsmWoW3Zal7p1qc5o6FwPSw3i4O';
titleText = "Property Tax - Collection Efficiency";
unitOfIndicator = "Rupees (in lakhs)";
} else if (subMainIndic == 2) {
citiesTableID = '175Ocis9sGqWTBLhXd2wVIawnlddbpKE1fvB-j_SZ';
titleText = "Property Tax - Demand Increase";
unitOfIndicator = "Rupees (in lakhs)";
}
}else if (mainIndic == 5) {
if (subMainIndic == 0) {
citiesTableID = '1XXalUDbRkTKbNbv7Dntueqd-BB7Pz5y_-ZxRqDvF';
titleText = "Citizen Services - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1K6vPTSthe2-X__IHsi42Roq5RReNZ9xy-nVTcgMc';
titleText = "Citizen Services - Citizen Charter (office)";
unitOfIndicator = "No. of applications";
} else if (subMainIndic == 2) {
citiesTableID = '1SbLuxSFUquS7q-mmLKp8_zYeKbdwvbbV3fMVmL5W';
titleText = "Citizen Services - Grievances Redressal (field)";
unitOfIndicator = "No. of grievances";
}
}else if (mainIndic == 6) {
if (subMainIndic == 0) {
citiesTableID = '1q7GNaD1WoY8g2acTpXq9DbOggJnW-crbIxd7ixRY';
titleText = "Finance - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1t3_EJG6Ppn4apIrONT0Wz1b6OYMix1OZkenzEcOd';
titleText = "Finance - Double Entry Accounting";
unitOfIndicator = "No. of days";
} else if (subMainIndic == 2) {
citiesTableID = '10591kbl5tAaWG4Kamh9QCQ1HWjY4-ESWRDQ1GQZ0';
titleText = "Finance - Pending Accounts and Audit";
unitOfIndicator = "No. of years";
}
}else if (mainIndic == 7) {
citiesTableID = '1UOltn1AicEOL-FkG4mKsay6pEi8SZQKmf5y5xX9m';
titleText = "Education - High schools with IIT foundation";
unitOfIndicator = "No. of High schools";
}else if (mainIndic == 8) {
if (subMainIndic == 0) {
citiesTableID = '1KVFlQd2zfJ5soZv_kJrMsxZNPEzZSdCzvJoKAGlE';
titleText = "Water Supply Connections - Overall Coverage";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1dHEUFs9Edz-pfbBmX7dDczXZXdvHyhIT50681RiI';
titleText = "Water Supply - Connections Coverage";
unitOfIndicator = "No. of Connections";
} else if (subMainIndic == 2) {
citiesTableID = '1f6ZA4wqY7V3gJAOhz3M2jMi9VMpVtQFGG6_ExJH-';
titleText = "Water Supply per month - Cost Recovery";
unitOfIndicator = "Rupees (in Lakhs)";
}
}else if (mainIndic == 9) {
if (subMainIndic == 0) {
citiesTableID = '1WjL0SBK8k3NgOMS8YjiirnuA1JgnqOuQjAAfSKZ-';
titleText = "Street Lighting - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1XiO6lKhyPdCLTR6E_9ltEBaM20wQWDgt3X0E6Xqk';
titleText = "Street Lighting - LED Coverage";
unitOfIndicator = "No. of LEDs";
} else if (subMainIndic == 2) {
citiesTableID = '1SJZL2t_DchzylwR2zoSE-Zk1NOPVrQ-hitSn8KXx';
titleText = "Street Lighting - Additional Fixtures";
unitOfIndicator = "No. of Fixtures";
}
}else if (mainIndic == 11) {
if (subMainIndic == 0) {
citiesTableID = '1xCuO37vnXEN0Ake02ErGetRTZUo8W6mueNugmdhq';
titleText = "Town Planning Activities - Building Online Permissions";
unitOfIndicator = "No.of Applications";
}
}else if (mainIndic == 12) {
if (subMainIndic == 0) {
citiesTableID = '1ufZzYeUN40B-5u0Msggo8UIHddJ-jQMvES8IAqWL';
titleText = "Community Development - Overall";
unitOfIndicator = "-";
} else if (subMainIndic == 1) {
citiesTableID = '1ShLFRlL4D_O05ant_kRkkSprShJPYb_nQ8S4MCvT';
titleText = "Community Development - SHG Bank Linkage";
unitOfIndicator = "Rupees (in lakhs)";
} else if (subMainIndic == 2) {
citiesTableID = '1QjN7go-OdeLVtKnart_yuwWuKavxEJP_lSy9tyV4';
titleText = "Community Development - Livelihood";
unitOfIndicator = "No.";
}else if (subMainIndic == 3) {
citiesTableID = '1Oua3hYGMx3knhsK7yf36TspEvV_rJbE2lsCEWqLT';
titleText = "Community Development - Skill Training Programmes";
unitOfIndicator = "No.";
}
}
if (mode == "city") {
titleText += " - All ULBs";
} else if (mode == "district") {
titleText += " - " + districtName + " District";
} else if (mode == "region") {
titleText += " - " + regionName + " Region";
} else if (mode == "grade") {
if (gradeName == "G1") {
titleText += " - Special, Selection Grades";
} else if (gradeName == "G2") {
titleText += " - Grade I, II";
} else if (gradeName == "G3") {
titleText += " - Grades III, NP";
} else if (gradeName == "G4") {
titleText += " - Corporations Grade";
}else if (gradeName == "elevenulb") {
titleText += " - 11 ULBs";
}else if (gradeName == "regwise") {
titleText += " - Region-wise";
}
}
multiSeries = false;
reportTitleText = titleText;
if (chartType == "MultiChart") {
if(gradeName === 'regwise'){
titleText += " - <span style='color:#F5861F;font-weight:bold;'>Monthly</span> and <span style='color:#6B4F2C;font-weight:bold;'>Cumulative</span> Marks";
}else{
titleText += " - <span style='color:#F5861F;font-weight:bold;'>Monthly</span> and <span style='color:#6B4F2C;font-weight:bold;'>Cumulative</span> Achievement (%)";
}
multiSeries = true;
reportTitleText += " - Monthly v/s Cumulative Report";
} else {
if(gradeName === 'regwise'){
if (attributeNameX.indexOf("C-") > -1) {
titleText += " - Cumulative Marks";
reportTitleText += " - Cumulative Report";
} else if (attributeNameX.indexOf("UPM-") > -1) {
titleText += " - Upto Previous Month Marks";
reportTitleText += " - Upto previous month Report";
} else if (attributeNameX.indexOf("M-") > -1) {
reportTitleText += " - Monthly Report";
titleText += " - Monthly Marks";
}
}else{
if (attributeNameX.indexOf("C-") > -1) {
titleText += " - Cumulative Achievement (%)";
reportTitleText += " - Cumulative Report";
} else if (attributeNameX.indexOf("UPM-") > -1) {
titleText += " - Upto previous month Achievement (%)";
reportTitleText += " - Upto previous month Report";
} else if (attributeNameX.indexOf("M-") > -1) {
reportTitleText += " - Monthly Report";
titleText += " - Monthly Achievement (%)";
}
}
}
document.getElementById("titleText").innerHTML = "<b>" + titleText + "</b>";
initialize();
}
function drop(cities, bucket) {
clearMarkers();
for (var i = 0; i < cities.length; i++) {
addMarkerWithTimeout(cities[i], 1, bucket);
}
}
function addMarkerWithTimeout(position, timeout, bucket) {
var mrkr;
var image;
if (position[4] < bucket[0][1]) {
//console.log("red: " + position[4]);
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|e74c3c';
} else if (position[4] < bucket[1][1]) {
//console.log("amber: " + position[4]);
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|f1c40f';
} else if (position[4] <= bucket[2][1]) {
//console.log("green: " + position[4]);
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|2ecc71';
}
if (position[4] == 100) {
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|2ecc71';
}
//console.log(position[14]);//region-wise
if (gradeName == "regwise") {
if(position[14] == 'RAJAHMUNDRY'){
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|e74c3c'; //red
}else if(position[14] == 'VISAKHAPATNAM'){
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|f1c40f';//amber
}else if(position[14] == 'GUNTUR'){
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|2ecc71';//green
}else if(position[14] == 'ANANTAPUR'){
image = 'http://chart.googleapis.com/chart?chst=d_map_pin_letter&chld=|0099cc';//blue
}
}
var myLatLng = {
lat: parseFloat(position[0]),
lng: parseFloat(position[1])
};
if (unitOfIndicator == "-") {
position[6] = " NA";
}
mrkr = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image,
animation: google.maps.Animation.DROP
});
mrkr.addListener('mouseover', function() {
chartInfoBubble2.setContent("<div ><font size='2'><b>" + position[2] + "</b><br>" + "Grade: <b>" + position[3] + "</b></br>" + attributeNameX + ": <b>" + ((typeof(position[4]) == 'number') ? (position[4].toFixed(2)) : position[4]) + "</b></br>" + attributeNameY + ": <b>" + position[5] + "</b></br><table border='1' class='infoWindowTable' style='width=100%;'><tr><th>-</th><th>Upto previous month</th><th>Monthly</th><th>Cumulative</th></tr><tr><td><b>Marks (" + position[13] + ")</b></td><td><b>" + ((typeof(position[12]) == 'number') ? (position[12].toFixed(2)) : position[12]) + "</b></td><td><b>" + ((typeof(position[11]) == 'number') ? (position[11].toFixed(2)) : position[11]) + "</b></td><td><b>" + ((typeof(position[10]) == 'number') ? (position[10].toFixed(2)) : position[10]) + "</b></td></tr><tr><td><b>Rank</b></td><td><b>" + position[9] + "</b></td><td><b>" + position[8] + "</b></td><td><b>" + position[7] + "</b></td></tr></table></font></div>");
chartInfoBubble2.open(map,mrkr);
});
mrkr.addListener('mouseout', function() {
chartInfoBubble2.close();
});
//mrkr.setZIndex(101);
markers.push(mrkr);
}
function clearMarkers() {
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(null);
}
markers = [];
}
function selectChosenItemSelected() {
var selectBox = document.getElementById("chosenNames");
var selectedValue = selectBox.options[selectBox.selectedIndex].value;
tooltipValue = selectedValue;
ulbsummary('https://www.googleapis.com/fusiontables/v2/query?sql=', tooltipValue);
}
function generateReport() {
// alert(generateReportQuery);
var query = new google.visualization.Query('http://www.google.com/fusiontables/gvizdata?tq=');
//console.log('generateReportQuery:'+generateReportQuery);
query.setQuery(generateReportQuery);
query.send(openTableModal);
$('#loadingIndicator').show();
$('#table_div').hide();
}
function openTableModal(response) {
var data = response.getDataTable();
var myTableDiv = document.getElementById("table_div");
var table = document.getElementById('print_data_table');
var tableBody = document.getElementById('table_body');
while (tableBody.firstChild) {
tableBody.removeChild(tableBody.firstChild);
}
var heading = new Array();
var heading = [];
if (!overallSelected) {
if(gradeName == 'regwise'){
if (!multiSeries) {
heading = ["Sr No.", "Region", "Annual Target", "Target", "Achievement", "Achievement %", "Marks", "Max Marks"];
} else {
heading = ["Sr No.", "Region", "Annual Target", "C-Target", "C-Achievement", "C-Achievement %", "C-Marks", "Max Marks", "M-Target", "M-Achievement", "M-Achievement %", "M-Marks"];
}
}else{
if (!multiSeries) {
heading = ["Sr No.", "ULB Name", "Annual Target", "Target", "Achievement", "Achievement %", "Marks", "Max Marks", "Rank"];
} else {
heading = ["Sr No.", "ULB Name", "Annual Target", "C-Target", "C-Achievement", "C-Achievement %", "C-Marks", "Max Marks", "C-Rank", "M-Target", "M-Achievement", "M-Achievement %", "M-Marks", "M-Rank"];
}
}
} else {
if(gradeName == 'regwise'){
if (!multiSeries) {
heading = ["Sr No.", "Region", "Marks", "Max Marks"];
} else {
heading = ["Sr No.", "Region", "C-Marks", "M-Marks", "Max Marks"];
}
}else{
if (!multiSeries) {
heading = ["Sr No.", "ULB Name", "Marks", "Max Marks", "Rank"];
} else {
heading = ["Sr No.", "ULB Name", "C-Marks", "C-Rank", "M-Marks", "M-Rank", "Max Marks"];
}
}
}
var tr = document.createElement('TR');
tableBody.appendChild(tr);
for (i = 0; i < heading.length; i++) {
var th = document.createElement('TH')
//th.width = '200';
th.appendChild(document.createTextNode(heading[i]));
tr.appendChild(th);
}
var rows = [],
columns = [];
for (var i = 0; i < data.getNumberOfRows(); i++) {
var tr = document.createElement('TR');
var td = document.createElement('TD');
td.appendChild(document.createTextNode(i + 1));
tr.appendChild(td);
for (var j = 0; j < data.getNumberOfColumns(); j++) {
var td = document.createElement('TD');
var val = data.getValue(i, j);
//td.appendChild(document.createTextNode(val));
//console.log(val+'<--->'+typeof(val));
if(j == (data.getNumberOfColumns()-1)){
td.appendChild(document.createTextNode(val));
}else{
if(typeof(val) == 'string'){
td.appendChild(document.createTextNode(val));
}else if(typeof(val) == 'number'){
td.appendChild(document.createTextNode(val.toFixed(2)));
}
}
tr.appendChild(td);
}
tableBody.appendChild(tr);
}
myTableDiv.appendChild(table);
$('#loadingIndicator').hide();
$('#table_div').show();
$('#m-title').text(reportTitleText);
$('#m-subtitle').text('UOM : '+unitOfIndicator);
}
function paperPrint() {
$(".modal-content").printElement({
printBodyOptions: {
styleToAdd: 'height:auto;overflow:auto;margin-left:-25px;'
}
});
}<|fim▁end|> | if (attributeNameX.indexOf("C-") > -1) {
generateReportQuery = "SELECT '" + attributeNameY + "','Annual Target','C-Target','C-Achievement','C-Achievement (percentage)','C-Marks','Max Marks','C-Rank' " + generateReportQuery + " ORDER BY 'C-Rank' ASC";
} else if (attributeNameX.indexOf("UPM-") > -1) {
|
<|file_name|>matrix.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The CGMath Developers. For a full listing of the authors,
// refer to the Cargo.toml file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Column major, square matrix types and traits.
use std::fmt;
use std::mem;
use std::ops::*;
use rand::{Rand, Rng};
use rust_num::{Zero, One};
use rust_num::traits::cast;
use angle::{Rad, sin, cos, sin_cos};
use approx::ApproxEq;
use array::{Array1, Array2};
use num::{BaseFloat, BaseNum};
use point::{Point, Point3};
use quaternion::Quaternion;
use vector::{Vector, EuclideanVector};
use vector::{Vector2, Vector3, Vector4};
/// A 2 x 2, column major matrix
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Matrix2<S> { pub x: Vector2<S>, pub y: Vector2<S> }
/// A 3 x 3, column major matrix
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Matrix3<S> { pub x: Vector3<S>, pub y: Vector3<S>, pub z: Vector3<S> }
/// A 4 x 4, column major matrix
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Matrix4<S> { pub x: Vector4<S>, pub y: Vector4<S>, pub z: Vector4<S>, pub w: Vector4<S> }
impl<S> Matrix2<S> {
/// Create a new matrix, providing values for each index.
#[inline]
pub fn new(c0r0: S, c0r1: S,
c1r0: S, c1r1: S) -> Matrix2<S> {
Matrix2::from_cols(Vector2::new(c0r0, c0r1),
Vector2::new(c1r0, c1r1))
}
/// Create a new matrix, providing columns.
#[inline]
pub fn from_cols(c0: Vector2<S>, c1: Vector2<S>) -> Matrix2<S> {
Matrix2 { x: c0, y: c1 }
}
}
impl<S: BaseFloat> Matrix2<S> {
/// Create a transformation matrix that will cause a vector to point at
/// `dir`, using `up` for orientation.
pub fn look_at(dir: &Vector2<S>, up: &Vector2<S>) -> Matrix2<S> {
//TODO: verify look_at 2D
Matrix2::from_cols(up.clone(), dir.clone()).transpose()
}
#[inline]
pub fn from_angle(theta: Rad<S>) -> Matrix2<S> {
let cos_theta = cos(theta.clone());
let sin_theta = sin(theta.clone());
Matrix2::new(cos_theta.clone(), sin_theta.clone(),
-sin_theta.clone(), cos_theta.clone())
}
}
impl<S: Copy + Neg<Output = S>> Matrix2<S> {
/// Negate this `Matrix2` in-place.
#[inline]
pub fn neg_self(&mut self) {
self[0].neg_self();
self[1].neg_self();
}
}
impl<S> Matrix3<S> {
/// Create a new matrix, providing values for each index.
#[inline]
pub fn new(c0r0:S, c0r1:S, c0r2:S,
c1r0:S, c1r1:S, c1r2:S,
c2r0:S, c2r1:S, c2r2:S) -> Matrix3<S> {
Matrix3::from_cols(Vector3::new(c0r0, c0r1, c0r2),
Vector3::new(c1r0, c1r1, c1r2),
Vector3::new(c2r0, c2r1, c2r2))
}
/// Create a new matrix, providing columns.
#[inline]
pub fn from_cols(c0: Vector3<S>, c1: Vector3<S>, c2: Vector3<S>) -> Matrix3<S> {
Matrix3 { x: c0, y: c1, z: c2 }
}
}
impl<S: BaseFloat> Matrix3<S> {
/// Create a transformation matrix that will cause a vector to point at
/// `dir`, using `up` for orientation.
pub fn look_at(dir: &Vector3<S>, up: &Vector3<S>) -> Matrix3<S> {
let dir = dir.normalize();
let side = up.cross(&dir).normalize();
let up = dir.cross(&side).normalize();
Matrix3::from_cols(side, up, dir).transpose()
}
/// Create a matrix from a rotation around the `x` axis (pitch).
pub fn from_angle_x(theta: Rad<S>) -> Matrix3<S> {
// http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations
let (s, c) = sin_cos(theta);
Matrix3::new(S::one(), S::zero(), S::zero(),
S::zero(), c.clone(), s.clone(),
S::zero(), -s.clone(), c.clone())
}
/// Create a matrix from a rotation around the `y` axis (yaw).
pub fn from_angle_y(theta: Rad<S>) -> Matrix3<S> {
// http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations
let (s, c) = sin_cos(theta);
Matrix3::new(c.clone(), S::zero(), -s.clone(),
S::zero(), S::one(), S::zero(),
s.clone(), S::zero(), c.clone())
}
/// Create a matrix from a rotation around the `z` axis (roll).
pub fn from_angle_z(theta: Rad<S>) -> Matrix3<S> {
// http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations
let (s, c) = sin_cos(theta);
Matrix3::new( c.clone(), s.clone(), S::zero(),
-s.clone(), c.clone(), S::zero(),
S::zero(), S::zero(), S::one())
}
/// Create a matrix from a set of euler angles.
///
/// # Parameters
///
/// - `x`: the angular rotation around the `x` axis (pitch).
/// - `y`: the angular rotation around the `y` axis (yaw).
/// - `z`: the angular rotation around the `z` axis (roll).
pub fn from_euler(x: Rad<S>, y: Rad<S>, z: Rad<S>) -> Matrix3<S> {
// http://en.wikipedia.org/wiki/Rotation_matrix#General_rotations
let (sx, cx) = sin_cos(x);
let (sy, cy) = sin_cos(y);
let (sz, cz) = sin_cos(z);
Matrix3::new( cy * cz, cy * sz, -sy,
-cx * sz + sx * sy * cz, cx * cz + sx * sy * sz, sx * cy,
sx * sz + cx * sy * cz, -sx * cz + cx * sy * sz, cx * cy)
}
/// Create a matrix from a rotation around an arbitrary axis
pub fn from_axis_angle(axis: &Vector3<S>, angle: Rad<S>) -> Matrix3<S> {
let (s, c) = sin_cos(angle);
let _1subc = S::one() - c;
Matrix3::new(_1subc * axis.x * axis.x + c,
_1subc * axis.x * axis.y + s * axis.z,
_1subc * axis.x * axis.z - s * axis.y,
_1subc * axis.x * axis.y - s * axis.z,
_1subc * axis.y * axis.y + c,
_1subc * axis.y * axis.z + s * axis.x,
_1subc * axis.x * axis.z + s * axis.y,
_1subc * axis.y * axis.z - s * axis.x,
_1subc * axis.z * axis.z + c)
}
}
impl<S: Copy + Neg<Output = S>> Matrix3<S> {
/// Negate this `Matrix3` in-place.
#[inline]
pub fn neg_self(&mut self) {
self[0].neg_self();
self[1].neg_self();
self[2].neg_self();
}
}
impl<S> Matrix4<S> {
/// Create a new matrix, providing values for each index.
#[inline]
pub fn new(c0r0: S, c0r1: S, c0r2: S, c0r3: S,
c1r0: S, c1r1: S, c1r2: S, c1r3: S,
c2r0: S, c2r1: S, c2r2: S, c2r3: S,
c3r0: S, c3r1: S, c3r2: S, c3r3: S) -> Matrix4<S> {
Matrix4::from_cols(Vector4::new(c0r0, c0r1, c0r2, c0r3),
Vector4::new(c1r0, c1r1, c1r2, c1r3),
Vector4::new(c2r0, c2r1, c2r2, c2r3),
Vector4::new(c3r0, c3r1, c3r2, c3r3))
}
/// Create a new matrix, providing columns.
#[inline]
pub fn from_cols(c0: Vector4<S>, c1: Vector4<S>, c2: Vector4<S>, c3: Vector4<S>) -> Matrix4<S> {
Matrix4 { x: c0, y: c1, z: c2, w: c3 }
}
}
impl<S: BaseNum> Matrix4<S> {
/// Create a translation matrix from a Vector3
#[inline]
pub fn from_translation(v: &Vector3<S>) -> Matrix4<S> {
Matrix4::new(S::one(), S::zero(), S::zero(), S::zero(),
S::zero(), S::one(), S::zero(), S::zero(),
S::zero(), S::zero(), S::one(), S::zero(),
v.x, v.y, v.z, S::one())
}
}
impl<S: BaseFloat> Matrix4<S> {
/// Create a transformation matrix that will cause a vector to point at
/// `dir`, using `up` for orientation.
pub fn look_at(eye: &Point3<S>, center: &Point3<S>, up: &Vector3<S>) -> Matrix4<S> {
let f = (center - eye).normalize();
let s = f.cross(up).normalize();
let u = s.cross(&f);
Matrix4::new(s.x.clone(), u.x.clone(), -f.x.clone(), S::zero(),
s.y.clone(), u.y.clone(), -f.y.clone(), S::zero(),
s.z.clone(), u.z.clone(), -f.z.clone(), S::zero(),
-eye.dot(&s), -eye.dot(&u), eye.dot(&f), S::one())
}
}
impl<S: Copy + Neg<Output = S>> Matrix4<S> {
/// Negate this `Matrix4` in-place.
#[inline]
pub fn neg_self(&mut self) {
self[0].neg_self();
self[1].neg_self();
self[2].neg_self();
self[3].neg_self();
}
}
pub trait Matrix<S: BaseFloat, V: Vector<S> + 'static>: Array2<V, V, S> + ApproxEq<S> + Sized // where
// FIXME: blocked by rust-lang/rust#20671
//
// for<'a, 'b> &'a Self: Add<&'b Self, Output = Self>,
// for<'a, 'b> &'a Self: Sub<&'b Self, Output = Self>,
// for<'a, 'b> &'a Self: Mul<&'b Self, Output = Self>,
// for<'a, 'b> &'a Self: Mul<&'b V, Output = V>,
//
// for<'a> &'a Self: Mul<S, Output = Self>,
// for<'a> &'a Self: Div<S, Output = Self>,
// for<'a> &'a Self: Rem<S, Output = Self>,
{
/// Create a new diagonal matrix using the supplied value.
fn from_value(value: S) -> Self;
/// Create a matrix from a non-uniform scale
fn from_diagonal(value: &V) -> Self;
/// Create a matrix with all elements equal to zero.
#[inline]
fn zero() -> Self { Self::from_value(S::zero()) }
/// Create a matrix where the each element of the diagonal is equal to one.
#[inline]
fn one() -> Self { Self::from_value(S::one()) }
/// Multiply this matrix by a scalar, returning the new matrix.
#[must_use]
fn mul_s(&self, s: S) -> Self;
/// Divide this matrix by a scalar, returning the new matrix.
#[must_use]
fn div_s(&self, s: S) -> Self;
/// Take the remainder of this matrix by a scalar, returning the new
/// matrix.
#[must_use]
fn rem_s(&self, s: S) -> Self;
/// Add this matrix with another matrix, returning the new metrix.
#[must_use]
fn add_m(&self, m: &Self) -> Self;
/// Subtract another matrix from this matrix, returning the new matrix.
#[must_use]
fn sub_m(&self, m: &Self) -> Self;
/// Multiplay a vector by this matrix, returning a new vector.
fn mul_v(&self, v: &V) -> V;
/// Multiply this matrix by another matrix, returning the new matrix.
#[must_use]
fn mul_m(&self, m: &Self) -> Self;
/// Multiply this matrix by a scalar, in-place.
fn mul_self_s(&mut self, s: S);
/// Divide this matrix by a scalar, in-place.
fn div_self_s(&mut self, s: S);
/// Take the remainder of this matrix, in-place.
fn rem_self_s(&mut self, s: S);
/// Add this matrix with another matrix, in-place.
fn add_self_m(&mut self, m: &Self);
/// Subtract another matrix from this matrix, in-place.
fn sub_self_m(&mut self, m: &Self);
/// Multiply this matrix by another matrix, in-place.
#[inline]
fn mul_self_m(&mut self, m: &Self) { *self = self.mul_m(m); }
/// Transpose this matrix, returning a new matrix.
#[must_use]
fn transpose(&self) -> Self;
/// Transpose this matrix in-place.
fn transpose_self(&mut self);
/// Take the determinant of this matrix.
fn determinant(&self) -> S;
/// Return a vector containing the diagonal of this matrix.
fn diagonal(&self) -> V;
/// Return the trace of this matrix. That is, the sum of the diagonal.
#[inline]
fn trace(&self) -> S { self.diagonal().sum() }
/// Invert this matrix, returning a new matrix. `m.mul_m(m.invert())` is
/// the identity matrix. Returns `None` if this matrix is not invertible
/// (has a determinant of zero).
#[must_use]
fn invert(&self) -> Option<Self>;
// Identical to `invert`, except it does not check whether the matrix is invertible.
// Thus, you are responsible for ensuring as much.
#[must_use]
unsafe fn invert_unsafe(&self) -> Self {
self.invert_with_det(self.determinant())
}
unsafe fn invert_with_det(&self, det: S) -> Self;
/// Invert this matrix in-place.
#[inline]
fn invert_self(&mut self) {
*self = self.invert().expect("Attempted to invert a matrix with zero determinant.");
}
// Identical to `invert_self`, except it does not check whether the matrix is invertible.
// Thus, you are responsible for ensuring as much.
#[inline]
unsafe fn invert_self_unsafe(&mut self) {
*self = self.invert_unsafe();
}
/// Test if this matrix is invertible.
#[inline]
fn is_invertible(&self) -> bool { !self.determinant().approx_eq(&S::zero()) }
/// Test if this matrix is the identity matrix. That is, it is diagonal
/// and every element in the diagonal is one.
#[inline]
fn is_one(&self) -> bool { self.approx_eq(&Self::one()) }
/// Test if this is a diagonal matrix. That is, every element outside of
/// the diagonal is 0.
fn is_diagonal(&self) -> bool;
/// Test if this matrix is symmetric. That is, it is equal to its
/// transpose.
fn is_symmetric(&self) -> bool;
}
impl<S: Copy + 'static> Array2<Vector2<S>, Vector2<S>, S> for Matrix2<S> {
#[inline]
fn row(&self, r: usize) -> Vector2<S> {
Vector2::new(self[0][r],
self[1][r])
}
#[inline]
fn swap_rows(&mut self, a: usize, b: usize) {
self[0].swap_elems(a, b);
self[1].swap_elems(a, b);
}
}
impl<S: Copy + 'static> Array2<Vector3<S>, Vector3<S>, S> for Matrix3<S> {
#[inline]
fn row(&self, r: usize) -> Vector3<S> {
Vector3::new(self[0][r],
self[1][r],
self[2][r])
}
#[inline]
fn swap_rows(&mut self, a: usize, b: usize) {
self[0].swap_elems(a, b);
self[1].swap_elems(a, b);
self[2].swap_elems(a, b);
}
}
impl<S: Copy + 'static> Array2<Vector4<S>, Vector4<S>, S> for Matrix4<S> {
#[inline]
fn row(&self, r: usize) -> Vector4<S> {
Vector4::new(self[0][r],
self[1][r],
self[2][r],
self[3][r])
}
#[inline]
fn swap_rows(&mut self, a: usize, b: usize) {
self[0].swap_elems(a, b);
self[1].swap_elems(a, b);
self[2].swap_elems(a, b);
self[3].swap_elems(a, b);
}
}
impl<S: BaseFloat> Matrix<S, Vector2<S>> for Matrix2<S> {
#[inline]
fn from_value(value: S) -> Matrix2<S> {
Matrix2::new(value, S::zero(),
S::zero(), value)
}
#[inline]
fn from_diagonal(value: &Vector2<S>) -> Matrix2<S> {
Matrix2::new(value.x, S::zero(),
S::zero(), value.y)
}
#[inline] fn mul_s(&self, s: S) -> Matrix2<S> { self * s }
#[inline] fn div_s(&self, s: S) -> Matrix2<S> { self / s }
#[inline] fn rem_s(&self, s: S) -> Matrix2<S> { self % s }
#[inline] fn add_m(&self, m: &Matrix2<S>) -> Matrix2<S> { self + m }
#[inline] fn sub_m(&self, m: &Matrix2<S>) -> Matrix2<S> { self - m }
fn mul_m(&self, other: &Matrix2<S>) -> Matrix2<S> { self * other }
#[inline] fn mul_v(&self, v: &Vector2<S>) -> Vector2<S> { self * v }
#[inline]
fn mul_self_s(&mut self, s: S) {
self[0].mul_self_s(s);
self[1].mul_self_s(s);
}
#[inline]
fn div_self_s(&mut self, s: S) {
self[0].div_self_s(s);
self[1].div_self_s(s);
}
#[inline]
fn rem_self_s(&mut self, s: S) {
self[0].rem_self_s(s);
self[1].rem_self_s(s);
}
#[inline]
fn add_self_m(&mut self, m: &Matrix2<S>) {
self[0].add_self_v(&m[0]);
self[1].add_self_v(&m[1]);
}
#[inline]
fn sub_self_m(&mut self, m: &Matrix2<S>) {
self[0].sub_self_v(&m[0]);
self[1].sub_self_v(&m[1]);
}
fn transpose(&self) -> Matrix2<S> {
Matrix2::new(self[0][0], self[1][0],
self[0][1], self[1][1])
}
#[inline]
fn transpose_self(&mut self) {
self.swap_elems((0, 1), (1, 0));
}
#[inline]
fn determinant(&self) -> S {
self[0][0] * self[1][1] - self[1][0] * self[0][1]
}
#[inline]
fn diagonal(&self) -> Vector2<S> {
Vector2::new(self[0][0],
self[1][1])
}
#[inline]
unsafe fn invert_with_det(&self, det: S) -> Matrix2<S> {
Matrix2::new( self[1][1] / det, -self[0][1] / det,
-self[1][0] / det, self[0][0] / det)
}
fn invert(&self) -> Option<Matrix2<S>> {
let det = self.determinant();
if det.approx_eq(&S::zero()) {
None
} else {
Some(Matrix2::new( self[1][1] / det, -self[0][1] / det,
-self[1][0] / det, self[0][0] / det))
}
}
#[inline]
fn is_diagonal(&self) -> bool {
self[0][1].approx_eq(&S::zero()) &&
self[1][0].approx_eq(&S::zero())
}
#[inline]
fn is_symmetric(&self) -> bool {
self[0][1].approx_eq(&self[1][0]) &&
self[1][0].approx_eq(&self[0][1])
}
}
impl<S: BaseFloat> Matrix<S, Vector3<S>> for Matrix3<S> {
#[inline]
fn from_value(value: S) -> Matrix3<S> {
Matrix3::new(value, S::zero(), S::zero(),
S::zero(), value, S::zero(),
S::zero(), S::zero(), value)
}
#[inline]
fn from_diagonal(value: &Vector3<S>) -> Matrix3<S> {
Matrix3::new(value.x, S::zero(), S::zero(),
S::zero(), value.y, S::zero(),
S::zero(), S::zero(), value.z)
}
#[inline] fn mul_s(&self, s: S) -> Matrix3<S> { self * s }
#[inline] fn div_s(&self, s: S) -> Matrix3<S> { self / s }
#[inline] fn rem_s(&self, s: S) -> Matrix3<S> { self % s }
#[inline] fn add_m(&self, m: &Matrix3<S>) -> Matrix3<S> { self + m }
#[inline] fn sub_m(&self, m: &Matrix3<S>) -> Matrix3<S> { self - m }
fn mul_m(&self, other: &Matrix3<S>) -> Matrix3<S> { self * other }
#[inline] fn mul_v(&self, v: &Vector3<S>) -> Vector3<S> { self * v}
#[inline]
fn mul_self_s(&mut self, s: S) {
self[0].mul_self_s(s);
self[1].mul_self_s(s);
self[2].mul_self_s(s);
}
#[inline]
fn div_self_s(&mut self, s: S) {
self[0].div_self_s(s);
self[1].div_self_s(s);
self[2].div_self_s(s);
}
#[inline]
fn rem_self_s(&mut self, s: S) {
self[0].rem_self_s(s);
self[1].rem_self_s(s);
self[2].rem_self_s(s);
}
#[inline]
fn add_self_m(&mut self, m: &Matrix3<S>) {
self[0].add_self_v(&m[0]);
self[1].add_self_v(&m[1]);
self[2].add_self_v(&m[2]);
}
#[inline]
fn sub_self_m(&mut self, m: &Matrix3<S>) {
self[0].sub_self_v(&m[0]);
self[1].sub_self_v(&m[1]);
self[2].sub_self_v(&m[2]);
}
fn transpose(&self) -> Matrix3<S> {
Matrix3::new(self[0][0], self[1][0], self[2][0],
self[0][1], self[1][1], self[2][1],
self[0][2], self[1][2], self[2][2])
}
#[inline]
fn transpose_self(&mut self) {
self.swap_elems((0, 1), (1, 0));
self.swap_elems((0, 2), (2, 0));
self.swap_elems((1, 2), (2, 1));
}
fn determinant(&self) -> S {
self[0][0] * (self[1][1] * self[2][2] - self[2][1] * self[1][2]) -
self[1][0] * (self[0][1] * self[2][2] - self[2][1] * self[0][2]) +
self[2][0] * (self[0][1] * self[1][2] - self[1][1] * self[0][2])
}
#[inline]
fn diagonal(&self) -> Vector3<S> {
Vector3::new(self[0][0],
self[1][1],
self[2][2])
}
unsafe fn invert_with_det(&self, det: S) -> Matrix3<S> {
Matrix3::from_cols(self[1].cross(&self[2]).div_s(det),
self[2].cross(&self[0]).div_s(det),
self[0].cross(&self[1]).div_s(det)).transpose()
}
fn invert(&self) -> Option<Matrix3<S>> {
let det = self.determinant();
if det.approx_eq(&S::zero()) { None } else {
Some(Matrix3::from_cols(&self[1].cross(&self[2]) / det,
&self[2].cross(&self[0]) / det,
&self[0].cross(&self[1]) / det).transpose())
}
}
fn is_diagonal(&self) -> bool {
self[0][1].approx_eq(&S::zero()) &&
self[0][2].approx_eq(&S::zero()) &&
self[1][0].approx_eq(&S::zero()) &&
self[1][2].approx_eq(&S::zero()) &&
self[2][0].approx_eq(&S::zero()) &&
self[2][1].approx_eq(&S::zero())
}
fn is_symmetric(&self) -> bool {
self[0][1].approx_eq(&self[1][0]) &&
self[0][2].approx_eq(&self[2][0]) &&
self[1][0].approx_eq(&self[0][1]) &&
self[1][2].approx_eq(&self[2][1]) &&
self[2][0].approx_eq(&self[0][2]) &&
self[2][1].approx_eq(&self[1][2])
}
}
impl<S: BaseFloat> Matrix<S, Vector4<S>> for Matrix4<S> {
#[inline]
fn from_value(value: S) -> Matrix4<S> {
Matrix4::new(value, S::zero(), S::zero(), S::zero(),
S::zero(), value, S::zero(), S::zero(),
S::zero(), S::zero(), value, S::zero(),
S::zero(), S::zero(), S::zero(), value)
}
#[inline]
fn from_diagonal(value: &Vector4<S>) -> Matrix4<S> {
Matrix4::new(value.x, S::zero(), S::zero(), S::zero(),
S::zero(), value.y, S::zero(), S::zero(),
S::zero(), S::zero(), value.z, S::zero(),
S::zero(), S::zero(), S::zero(), value.w)
}
#[inline] fn mul_s(&self, s: S) -> Matrix4<S> { self * s }
#[inline] fn div_s(&self, s: S) -> Matrix4<S> { self / s }
#[inline] fn rem_s(&self, s: S) -> Matrix4<S> { self % s }
#[inline] fn add_m(&self, m: &Matrix4<S>) -> Matrix4<S> { self + m }
#[inline] fn sub_m(&self, m: &Matrix4<S>) -> Matrix4<S> { self - m }
fn mul_m(&self, other: &Matrix4<S>) -> Matrix4<S> { self * other }
#[inline] fn mul_v(&self, v: &Vector4<S>) -> Vector4<S> { self * v }
#[inline]
fn mul_self_s(&mut self, s: S) {
self[0].mul_self_s(s);
self[1].mul_self_s(s);
self[2].mul_self_s(s);
self[3].mul_self_s(s);
}
#[inline]
fn div_self_s(&mut self, s: S) {
self[0].div_self_s(s);
self[1].div_self_s(s);
self[2].div_self_s(s);
self[3].div_self_s(s);
}
#[inline]
fn rem_self_s(&mut self, s: S) {
self[0].rem_self_s(s);
self[1].rem_self_s(s);
self[2].rem_self_s(s);
self[3].rem_self_s(s);
}
#[inline]
fn add_self_m(&mut self, m: &Matrix4<S>) {
self[0].add_self_v(&m[0]);
self[1].add_self_v(&m[1]);
self[2].add_self_v(&m[2]);
self[3].add_self_v(&m[3]);
}
#[inline]
fn sub_self_m(&mut self, m: &Matrix4<S>) {
self[0].sub_self_v(&m[0]);
self[1].sub_self_v(&m[1]);
self[2].sub_self_v(&m[2]);
self[3].sub_self_v(&m[3]);
}
fn transpose(&self) -> Matrix4<S> {
Matrix4::new(self[0][0], self[1][0], self[2][0], self[3][0],
self[0][1], self[1][1], self[2][1], self[3][1],
self[0][2], self[1][2], self[2][2], self[3][2],
self[0][3], self[1][3], self[2][3], self[3][3])
}
fn transpose_self(&mut self) {
self.swap_elems((0, 1), (1, 0));
self.swap_elems((0, 2), (2, 0));
self.swap_elems((0, 3), (3, 0));
self.swap_elems((1, 2), (2, 1));
self.swap_elems((1, 3), (3, 1));
self.swap_elems((2, 3), (3, 2));
}
fn determinant(&self) -> S {
let m0 = Matrix3::new(self[1][1], self[2][1], self[3][1],
self[1][2], self[2][2], self[3][2],
self[1][3], self[2][3], self[3][3]);
let m1 = Matrix3::new(self[0][1], self[2][1], self[3][1],
self[0][2], self[2][2], self[3][2],
self[0][3], self[2][3], self[3][3]);
let m2 = Matrix3::new(self[0][1], self[1][1], self[3][1],
self[0][2], self[1][2], self[3][2],
self[0][3], self[1][3], self[3][3]);
let m3 = Matrix3::new(self[0][1], self[1][1], self[2][1],
self[0][2], self[1][2], self[2][2],
self[0][3], self[1][3], self[2][3]);<|fim▁hole|> self[1][0] * m1.determinant() +
self[2][0] * m2.determinant() -
self[3][0] * m3.determinant()
}
#[inline]
fn diagonal(&self) -> Vector4<S> {
Vector4::new(self[0][0],
self[1][1],
self[2][2],
self[3][3])
}
unsafe fn invert_with_det(&self, det: S) -> Matrix4<S> {
let one: S = S::one();
let inv_det = one / det;
let t = self.transpose();
let cf = |i, j| {
let mat = match i {
0 => Matrix3::from_cols(t.y.truncate_n(j),
t.z.truncate_n(j),
t.w.truncate_n(j)),
1 => Matrix3::from_cols(t.x.truncate_n(j),
t.z.truncate_n(j),
t.w.truncate_n(j)),
2 => Matrix3::from_cols(t.x.truncate_n(j),
t.y.truncate_n(j),
t.w.truncate_n(j)),
3 => Matrix3::from_cols(t.x.truncate_n(j),
t.y.truncate_n(j),
t.z.truncate_n(j)),
_ => panic!("out of range")
};
let sign = if (i+j) & 1 == 1 {-one} else {one};
mat.determinant() * sign * inv_det
};
Matrix4::new(cf(0, 0), cf(0, 1), cf(0, 2), cf(0, 3),
cf(1, 0), cf(1, 1), cf(1, 2), cf(1, 3),
cf(2, 0), cf(2, 1), cf(2, 2), cf(2, 3),
cf(3, 0), cf(3, 1), cf(3, 2), cf(3, 3))
}
fn invert(&self) -> Option<Matrix4<S>> {
let det = self.determinant();
if det.approx_eq(&S::zero()) { None } else {
let inv_det = S::one() / det;
let t = self.transpose();
let cf = |i, j| {
let mat = match i {
0 => Matrix3::from_cols(t.y.truncate_n(j), t.z.truncate_n(j), t.w.truncate_n(j)),
1 => Matrix3::from_cols(t.x.truncate_n(j), t.z.truncate_n(j), t.w.truncate_n(j)),
2 => Matrix3::from_cols(t.x.truncate_n(j), t.y.truncate_n(j), t.w.truncate_n(j)),
3 => Matrix3::from_cols(t.x.truncate_n(j), t.y.truncate_n(j), t.z.truncate_n(j)),
_ => panic!("out of range"),
};
let sign = if (i + j) & 1 == 1 { -S::one() } else { S::one() };
mat.determinant() * sign * inv_det
};
Some(Matrix4::new(cf(0, 0), cf(0, 1), cf(0, 2), cf(0, 3),
cf(1, 0), cf(1, 1), cf(1, 2), cf(1, 3),
cf(2, 0), cf(2, 1), cf(2, 2), cf(2, 3),
cf(3, 0), cf(3, 1), cf(3, 2), cf(3, 3)))
}
}
fn is_diagonal(&self) -> bool {
self[0][1].approx_eq(&S::zero()) &&
self[0][2].approx_eq(&S::zero()) &&
self[0][3].approx_eq(&S::zero()) &&
self[1][0].approx_eq(&S::zero()) &&
self[1][2].approx_eq(&S::zero()) &&
self[1][3].approx_eq(&S::zero()) &&
self[2][0].approx_eq(&S::zero()) &&
self[2][1].approx_eq(&S::zero()) &&
self[2][3].approx_eq(&S::zero()) &&
self[3][0].approx_eq(&S::zero()) &&
self[3][1].approx_eq(&S::zero()) &&
self[3][2].approx_eq(&S::zero())
}
fn is_symmetric(&self) -> bool {
self[0][1].approx_eq(&self[1][0]) &&
self[0][2].approx_eq(&self[2][0]) &&
self[0][3].approx_eq(&self[3][0]) &&
self[1][0].approx_eq(&self[0][1]) &&
self[1][2].approx_eq(&self[2][1]) &&
self[1][3].approx_eq(&self[3][1]) &&
self[2][0].approx_eq(&self[0][2]) &&
self[2][1].approx_eq(&self[1][2]) &&
self[2][3].approx_eq(&self[3][2]) &&
self[3][0].approx_eq(&self[0][3]) &&
self[3][1].approx_eq(&self[1][3]) &&
self[3][2].approx_eq(&self[2][3])
}
}
impl<S: BaseFloat> ApproxEq<S> for Matrix2<S> {
#[inline]
fn approx_eq_eps(&self, other: &Matrix2<S>, epsilon: &S) -> bool {
self[0].approx_eq_eps(&other[0], epsilon) &&
self[1].approx_eq_eps(&other[1], epsilon)
}
}
impl<S: BaseFloat> ApproxEq<S> for Matrix3<S> {
#[inline]
fn approx_eq_eps(&self, other: &Matrix3<S>, epsilon: &S) -> bool {
self[0].approx_eq_eps(&other[0], epsilon) &&
self[1].approx_eq_eps(&other[1], epsilon) &&
self[2].approx_eq_eps(&other[2], epsilon)
}
}
impl<S: BaseFloat> ApproxEq<S> for Matrix4<S> {
#[inline]
fn approx_eq_eps(&self, other: &Matrix4<S>, epsilon: &S) -> bool {
self[0].approx_eq_eps(&other[0], epsilon) &&
self[1].approx_eq_eps(&other[1], epsilon) &&
self[2].approx_eq_eps(&other[2], epsilon) &&
self[3].approx_eq_eps(&other[3], epsilon)
}
}
impl<S: Neg<Output = S>> Neg for Matrix2<S> {
type Output = Matrix2<S>;
#[inline]
fn neg(self) -> Matrix2<S> {
Matrix2::from_cols(-self.x, -self.y)
}
}
impl<S: Neg<Output = S>> Neg for Matrix3<S> {
type Output = Matrix3<S>;
#[inline]
fn neg(self) -> Matrix3<S> {
Matrix3::from_cols(-self.x, -self.y, -self.z)
}
}
impl<S: Neg<Output = S>> Neg for Matrix4<S> {
type Output = Matrix4<S>;
#[inline]
fn neg(self) -> Matrix4<S> {
Matrix4::from_cols(-self.x, -self.y, -self.z, -self.w)
}
}
macro_rules! impl_scalar_binary_operator {
($Binop:ident :: $binop:ident, $MatrixN:ident { $($field:ident),+ }) => {
impl<'a, S: BaseNum> $Binop<S> for &'a $MatrixN<S> {
type Output = $MatrixN<S>;
#[inline]
fn $binop(self, s: S) -> $MatrixN<S> {
$MatrixN { $($field: self.$field.$binop(s)),+ }
}
}
}
}
impl_scalar_binary_operator!(Mul::mul, Matrix2 { x, y });
impl_scalar_binary_operator!(Mul::mul, Matrix3 { x, y, z });
impl_scalar_binary_operator!(Mul::mul, Matrix4 { x, y, z, w });
impl_scalar_binary_operator!(Div::div, Matrix2 { x, y });
impl_scalar_binary_operator!(Div::div, Matrix3 { x, y, z });
impl_scalar_binary_operator!(Div::div, Matrix4 { x, y, z, w });
impl_scalar_binary_operator!(Rem::rem, Matrix2 { x, y });
impl_scalar_binary_operator!(Rem::rem, Matrix3 { x, y, z });
impl_scalar_binary_operator!(Rem::rem, Matrix4 { x, y, z, w });
macro_rules! impl_binary_operator {
($Binop:ident :: $binop:ident, $MatrixN:ident { $($field:ident),+ }) => {
impl<'a, 'b, S: BaseNum> $Binop<&'a $MatrixN<S>> for &'b $MatrixN<S> {
type Output = $MatrixN<S>;
#[inline]
fn $binop(self, other: &'a $MatrixN<S>) -> $MatrixN<S> {
$MatrixN { $($field: self.$field.$binop(&other.$field)),+ }
}
}
}
}
impl_binary_operator!(Add::add, Matrix2 { x, y });
impl_binary_operator!(Add::add, Matrix3 { x, y, z });
impl_binary_operator!(Add::add, Matrix4 { x, y, z, w });
impl_binary_operator!(Sub::sub, Matrix2 { x, y });
impl_binary_operator!(Sub::sub, Matrix3 { x, y, z });
impl_binary_operator!(Sub::sub, Matrix4 { x, y, z, w });
impl<'a, 'b, S: BaseNum> Mul<&'a Vector2<S>> for &'b Matrix2<S> {
type Output = Vector2<S>;
fn mul(self, v: &'a Vector2<S>) -> Vector2<S> {
Vector2::new(self.row(0).dot(v),
self.row(1).dot(v))
}
}
impl<'a, 'b, S: BaseNum> Mul<&'a Vector3<S>> for &'b Matrix3<S> {
type Output = Vector3<S>;
fn mul(self, v: &'a Vector3<S>) -> Vector3<S> {
Vector3::new(self.row(0).dot(v),
self.row(1).dot(v),
self.row(2).dot(v))
}
}
impl<'a, 'b, S: BaseNum> Mul<&'a Vector4<S>> for &'b Matrix4<S> {
type Output = Vector4<S>;
fn mul(self, v: &'a Vector4<S>) -> Vector4<S> {
Vector4::new(self.row(0).dot(v),
self.row(1).dot(v),
self.row(2).dot(v),
self.row(3).dot(v))
}
}
impl<'a, 'b, S: BaseNum> Mul<&'a Matrix2<S>> for &'b Matrix2<S> {
type Output = Matrix2<S>;
fn mul(self, other: &'a Matrix2<S>) -> Matrix2<S> {
Matrix2::new(self.row(0).dot(&other[0]), self.row(1).dot(&other[0]),
self.row(0).dot(&other[1]), self.row(1).dot(&other[1]))
}
}
impl<'a, 'b, S: BaseNum> Mul<&'a Matrix3<S>> for &'b Matrix3<S> {
type Output = Matrix3<S>;
fn mul(self, other: &'a Matrix3<S>) -> Matrix3<S> {
Matrix3::new(self.row(0).dot(&other[0]),self.row(1).dot(&other[0]),self.row(2).dot(&other[0]),
self.row(0).dot(&other[1]),self.row(1).dot(&other[1]),self.row(2).dot(&other[1]),
self.row(0).dot(&other[2]),self.row(1).dot(&other[2]),self.row(2).dot(&other[2]))
}
}
impl<'a, 'b, S: BaseNum> Mul<&'a Matrix4<S>> for &'b Matrix4<S> {
type Output = Matrix4<S>;
fn mul(self, other: &'a Matrix4<S>) -> Matrix4<S> {
// Using self.row(0).dot(other[0]) like the other matrix multiplies
// causes the LLVM to miss identical loads and multiplies. This optimization
// causes the code to be auto vectorized properly increasing the performance
// around ~4 times.
macro_rules! dot_matrix4 {
($A:expr, $B:expr, $I:expr, $J:expr) => {
($A[0][$I]) * ($B[$J][0]) +
($A[1][$I]) * ($B[$J][1]) +
($A[2][$I]) * ($B[$J][2]) +
($A[3][$I]) * ($B[$J][3])
};
};
Matrix4::new(dot_matrix4!(self, other, 0, 0), dot_matrix4!(self, other, 1, 0), dot_matrix4!(self, other, 2, 0), dot_matrix4!(self, other, 3, 0),
dot_matrix4!(self, other, 0, 1), dot_matrix4!(self, other, 1, 1), dot_matrix4!(self, other, 2, 1), dot_matrix4!(self, other, 3, 1),
dot_matrix4!(self, other, 0, 2), dot_matrix4!(self, other, 1, 2), dot_matrix4!(self, other, 2, 2), dot_matrix4!(self, other, 3, 2),
dot_matrix4!(self, other, 0, 3), dot_matrix4!(self, other, 1, 3), dot_matrix4!(self, other, 2, 3), dot_matrix4!(self, other, 3, 3))
}
}
macro_rules! index_operators {
($MatrixN:ident<$S:ident>, $n:expr, $Output:ty, $I:ty) => {
impl<$S> Index<$I> for $MatrixN<$S> {
type Output = $Output;
#[inline]
fn index<'a>(&'a self, i: $I) -> &'a $Output {
let v: &[[$S; $n]; $n] = self.as_ref();
From::from(&v[i])
}
}
impl<$S> IndexMut<$I> for $MatrixN<$S> {
#[inline]
fn index_mut<'a>(&'a mut self, i: $I) -> &'a mut $Output {
let v: &mut [[$S; $n]; $n] = self.as_mut();
From::from(&mut v[i])
}
}
}
}
index_operators!(Matrix2<S>, 2, Vector2<S>, usize);
index_operators!(Matrix3<S>, 3, Vector3<S>, usize);
index_operators!(Matrix4<S>, 4, Vector4<S>, usize);
// index_operators!(Matrix2<S>, 2, [Vector2<S>], Range<usize>);
// index_operators!(Matrix3<S>, 3, [Vector3<S>], Range<usize>);
// index_operators!(Matrix4<S>, 4, [Vector4<S>], Range<usize>);
// index_operators!(Matrix2<S>, 2, [Vector2<S>], RangeTo<usize>);
// index_operators!(Matrix3<S>, 3, [Vector3<S>], RangeTo<usize>);
// index_operators!(Matrix4<S>, 4, [Vector4<S>], RangeTo<usize>);
// index_operators!(Matrix2<S>, 2, [Vector2<S>], RangeFrom<usize>);
// index_operators!(Matrix3<S>, 3, [Vector3<S>], RangeFrom<usize>);
// index_operators!(Matrix4<S>, 4, [Vector4<S>], RangeFrom<usize>);
// index_operators!(Matrix2<S>, 2, [Vector2<S>], RangeFull);
// index_operators!(Matrix3<S>, 3, [Vector3<S>], RangeFull);
// index_operators!(Matrix4<S>, 4, [Vector4<S>], RangeFull);
macro_rules! fixed_array_conversions {
($MatrixN:ident <$S:ident> { $($field:ident : $index:expr),+ }, $n:expr) => {
impl<$S> Into<[[$S; $n]; $n]> for $MatrixN<$S> {
#[inline]
fn into(self) -> [[$S; $n]; $n] {
match self { $MatrixN { $($field),+ } => [$($field.into()),+] }
}
}
impl<$S> AsRef<[[$S; $n]; $n]> for $MatrixN<$S> {
#[inline]
fn as_ref(&self) -> &[[$S; $n]; $n] {
unsafe { mem::transmute(self) }
}
}
impl<$S> AsMut<[[$S; $n]; $n]> for $MatrixN<$S> {
#[inline]
fn as_mut(&mut self) -> &mut [[$S; $n]; $n] {
unsafe { mem::transmute(self) }
}
}
impl<$S: Copy> From<[[$S; $n]; $n]> for $MatrixN<$S> {
#[inline]
fn from(m: [[$S; $n]; $n]) -> $MatrixN<$S> {
// We need to use a copy here because we can't pattern match on arrays yet
$MatrixN { $($field: From::from(m[$index])),+ }
}
}
impl<'a, $S> From<&'a [[$S; $n]; $n]> for &'a $MatrixN<$S> {
#[inline]
fn from(m: &'a [[$S; $n]; $n]) -> &'a $MatrixN<$S> {
unsafe { mem::transmute(m) }
}
}
impl<'a, $S> From<&'a mut [[$S; $n]; $n]> for &'a mut $MatrixN<$S> {
#[inline]
fn from(m: &'a mut [[$S; $n]; $n]) -> &'a mut $MatrixN<$S> {
unsafe { mem::transmute(m) }
}
}
// impl<$S> Into<[$S; ($n * $n)]> for $MatrixN<$S> {
// #[inline]
// fn into(self) -> [[$S; $n]; $n] {
// // TODO: Not sure how to implement this...
// unimplemented!()
// }
// }
impl<$S> AsRef<[$S; ($n * $n)]> for $MatrixN<$S> {
#[inline]
fn as_ref(&self) -> &[$S; ($n * $n)] {
unsafe { mem::transmute(self) }
}
}
impl<$S> AsMut<[$S; ($n * $n)]> for $MatrixN<$S> {
#[inline]
fn as_mut(&mut self) -> &mut [$S; ($n * $n)] {
unsafe { mem::transmute(self) }
}
}
// impl<$S> From<[$S; ($n * $n)]> for $MatrixN<$S> {
// #[inline]
// fn from(m: [$S; ($n * $n)]) -> $MatrixN<$S> {
// // TODO: Not sure how to implement this...
// unimplemented!()
// }
// }
impl<'a, $S> From<&'a [$S; ($n * $n)]> for &'a $MatrixN<$S> {
#[inline]
fn from(m: &'a [$S; ($n * $n)]) -> &'a $MatrixN<$S> {
unsafe { mem::transmute(m) }
}
}
impl<'a, $S> From<&'a mut [$S; ($n * $n)]> for &'a mut $MatrixN<$S> {
#[inline]
fn from(m: &'a mut [$S; ($n * $n)]) -> &'a mut $MatrixN<$S> {
unsafe { mem::transmute(m) }
}
}
}
}
fixed_array_conversions!(Matrix2<S> { x:0, y:1 }, 2);
fixed_array_conversions!(Matrix3<S> { x:0, y:1, z:2 }, 3);
fixed_array_conversions!(Matrix4<S> { x:0, y:1, z:2, w:3 }, 4);
impl<S: BaseFloat> From<Matrix2<S>> for Matrix3<S> {
/// Clone the elements of a 2-dimensional matrix into the top-left corner
/// of a 3-dimensional identity matrix.
fn from(m: Matrix2<S>) -> Matrix3<S> {
Matrix3::new(m[0][0], m[0][1], S::zero(),
m[1][0], m[1][1], S::zero(),
S::zero(), S::zero(), S::one())
}
}
impl<S: BaseFloat> From<Matrix2<S>> for Matrix4<S> {
/// Clone the elements of a 2-dimensional matrix into the top-left corner
/// of a 4-dimensional identity matrix.
fn from(m: Matrix2<S>) -> Matrix4<S> {
Matrix4::new(m[0][0], m[0][1], S::zero(), S::zero(),
m[1][0], m[1][1], S::zero(), S::zero(),
S::zero(), S::zero(), S::one(), S::zero(),
S::zero(), S::zero(), S::zero(), S::one())
}
}
impl<S: BaseFloat> From<Matrix3<S>> for Matrix4<S> {
/// Clone the elements of a 3-dimensional matrix into the top-left corner
/// of a 4-dimensional identity matrix.
fn from(m: Matrix3<S>) -> Matrix4<S> {
Matrix4::new(m[0][0], m[0][1], m[0][2], S::zero(),
m[1][0], m[1][1], m[1][2], S::zero(),
m[2][0], m[2][1], m[2][2], S::zero(),
S::zero(), S::zero(), S::zero(), S::one())
}
}
impl<S: BaseFloat> From<Matrix3<S>> for Quaternion<S> {
/// Convert the matrix to a quaternion
fn from(mat: Matrix3<S>) -> Quaternion<S> {
// http://www.cs.ucr.edu/~vbz/resources/quatut.pdf
let trace = mat.trace();
let half: S = cast(0.5f64).unwrap();
if trace >= S::zero() {
let s = (S::one() + trace).sqrt();
let w = half * s;
let s = half / s;
let x = (mat[1][2] - mat[2][1]) * s;
let y = (mat[2][0] - mat[0][2]) * s;
let z = (mat[0][1] - mat[1][0]) * s;
Quaternion::new(w, x, y, z)
} else if (mat[0][0] > mat[1][1]) && (mat[0][0] > mat[2][2]) {
let s = (half + (mat[0][0] - mat[1][1] - mat[2][2])).sqrt();
let w = half * s;
let s = half / s;
let x = (mat[0][1] - mat[1][0]) * s;
let y = (mat[2][0] - mat[0][2]) * s;
let z = (mat[1][2] - mat[2][1]) * s;
Quaternion::new(w, x, y, z)
} else if mat[1][1] > mat[2][2] {
let s = (half + (mat[1][1] - mat[0][0] - mat[2][2])).sqrt();
let w = half * s;
let s = half / s;
let x = (mat[0][1] - mat[1][0]) * s;
let y = (mat[1][2] - mat[2][1]) * s;
let z = (mat[2][0] - mat[0][2]) * s;
Quaternion::new(w, x, y, z)
} else {
let s = (half + (mat[2][2] - mat[0][0] - mat[1][1])).sqrt();
let w = half * s;
let s = half / s;
let x = (mat[2][0] - mat[0][2]) * s;
let y = (mat[1][2] - mat[2][1]) * s;
let z = (mat[0][1] - mat[1][0]) * s;
Quaternion::new(w, x, y, z)
}
}
}
impl<S: BaseNum> fmt::Debug for Matrix2<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[[{:?}, {:?}], [{:?}, {:?}]]",
self[0][0], self[0][1],
self[1][0], self[1][1])
}
}
impl<S: BaseNum> fmt::Debug for Matrix3<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[[{:?}, {:?}, {:?}], [{:?}, {:?}, {:?}], [{:?}, {:?}, {:?}]]",
self[0][0], self[0][1], self[0][2],
self[1][0], self[1][1], self[1][2],
self[2][0], self[2][1], self[2][2])
}
}
impl<S: BaseNum> fmt::Debug for Matrix4<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[[{:?}, {:?}, {:?}, {:?}], [{:?}, {:?}, {:?}, {:?}], [{:?}, {:?}, {:?}, {:?}], [{:?}, {:?}, {:?}, {:?}]]",
self[0][0], self[0][1], self[0][2], self[0][3],
self[1][0], self[1][1], self[1][2], self[1][3],
self[2][0], self[2][1], self[2][2], self[2][3],
self[3][0], self[3][1], self[3][2], self[3][3])
}
}
impl<S: BaseFloat + Rand> Rand for Matrix2<S> {
#[inline]
fn rand<R: Rng>(rng: &mut R) -> Matrix2<S> {
Matrix2{ x: rng.gen(), y: rng.gen() }
}
}
impl<S: BaseFloat + Rand> Rand for Matrix3<S> {
#[inline]
fn rand<R: Rng>(rng: &mut R) -> Matrix3<S> {
Matrix3{ x: rng.gen(), y: rng.gen(), z: rng.gen() }
}
}
impl<S: BaseFloat + Rand> Rand for Matrix4<S> {
#[inline]
fn rand<R: Rng>(rng: &mut R) -> Matrix4<S> {
Matrix4{ x: rng.gen(), y: rng.gen(), z: rng.gen(), w: rng.gen() }
}
}<|fim▁end|> |
self[0][0] * m0.determinant() - |
<|file_name|>dns_string_io.py<|end_file_name|><|fim▁begin|>import struct
OK, EFORMAT, ESERVER, ENAME, ENOTIMP, EREFUSED = range(6)
IXFR, AXFR, MAILB, MAILA, ALL_RECORDS = range(251, 256)
IN, CS, CH, HS = range(1, 5)
from io import BytesIO
class Message:
"""
L{Message} contains all the information represented by a single
DNS request or response.
@ivar id: See L{__init__}
@ivar answer: See L{__init__}
@ivar opCode: See L{__init__}
@ivar recDes: See L{__init__}
@ivar recAv: See L{__init__}
@ivar auth: See L{__init__}
@ivar rCode: See L{__init__}
@ivar trunc: See L{__init__}
@ivar maxSize: See L{__init__}
@ivar authenticData: See L{__init__}
@ivar checkingDisabled: See L{__init__}
@ivar queries: The queries which are being asked of or answered by
DNS server.
@type queries: L{list} of L{Query}
@ivar answers: Records containing the answers to C{queries} if
this is a response message.
@type answers: L{list} of L{RRHeader}
@ivar authority: Records containing information about the
authoritative DNS servers for the names in C{queries}.
@type authority: L{list} of L{RRHeader}
@ivar additional: Records containing IP addresses of host names
in C{answers} and C{authority}.
@type additional: L{list} of L{RRHeader}
"""
headerFmt = "!H2B4H"
headerSize = struct.calcsize(headerFmt)
# Question, answer, additional, and nameserver lists
queries = answers = add = ns = None
def __init__(self, id=0, answer=0, opCode=0, recDes=0, recAv=0,
auth=0, rCode=OK, trunc=0, maxSize=512,
authenticData=0, checkingDisabled=0):
"""
@param id: A 16 bit identifier assigned by the program that
generates any kind of query. This identifier is copied to
the corresponding reply and can be used by the requester
to match up replies to outstanding queries.
@type id: L{int}
@param answer: A one bit field that specifies whether this
message is a query (0), or a response (1).
@type answer: L{int}
@param opCode: A four bit field that specifies kind of query in
this message. This value is set by the originator of a query
and copied into the response.
@type opCode: L{int}
@param recDes: Recursion Desired - this bit may be set in a
query and is copied into the response. If RD is set, it
directs the name server to pursue the query recursively.
Recursive query support is optional.
@type recDes: L{int}
@param recAv: Recursion Available - this bit is set or cleared
in a response and denotes whether recursive query support
is available in the name server.
@type recAv: L{int}
@param auth: Authoritative Answer - this bit is valid in
responses and specifies that the responding name server
is an authority for the domain name in question section.
@type auth: L{int}
@ivar rCode: A response code, used to indicate success or failure in a
message which is a response from a server to a client request.
@type rCode: C{0 <= int < 16}
@param trunc: A flag indicating that this message was
truncated due to length greater than that permitted on the
transmission channel.
@type trunc: L{int}
@param maxSize: The requestor's UDP payload size is the number
of octets of the largest UDP payload that can be
reassembled and delivered in the requestor's network
stack.
@type maxSize: L{int}
@param authenticData: A flag indicating in a response that all
the data included in the answer and authority portion of
the response has been authenticated by the server
according to the policies of that server.
See U{RFC2535 section-6.1<https://tools.ietf.org/html/rfc2535#section-6.1>}.
@type authenticData: L{int}
@param checkingDisabled: A flag indicating in a query that
pending (non-authenticated) data is acceptable to the
resolver sending the query.
See U{RFC2535 section-6.1<https://tools.ietf.org/html/rfc2535#section-6.1>}.
@type authenticData: L{int}
"""
self.maxSize = maxSize
self.id = id
self.answer = answer
self.opCode = opCode
self.auth = auth
self.trunc = trunc
self.recDes = recDes
self.recAv = recAv
self.rCode = rCode
self.authenticData = authenticData
self.checkingDisabled = checkingDisabled
self.queries = []
self.answers = []
self.authority = []
self.additional = []
def addQuery(self, name, type=ALL_RECORDS, cls=IN):
"""
Add another query to this Message.
@type name: C{bytes}
@param name: The name to query.
@type type: C{int}
@param type: Query type
@type cls: C{int}
@param cls: Query class
"""
self.queries.append(Query(name, type, cls))
def encode(self, strio):
compDict = {}
body_tmp = BytesIO()
for q in self.queries:
q.encode(body_tmp, compDict)
for q in self.answers:
q.encode(body_tmp, compDict)
for q in self.authority:
q.encode(body_tmp, compDict)
for q in self.additional:
q.encode(body_tmp, compDict)
body = body_tmp.getvalue()
size = len(body) + self.headerSize
if self.maxSize and size > self.maxSize:
self.trunc = 1<|fim▁hole|> | ((self.auth & 1 ) << 2 )
| ((self.trunc & 1 ) << 1 )
| ( self.recDes & 1 ) )
byte4 = ( ( (self.recAv & 1 ) << 7 )
| ((self.authenticData & 1) << 5)
| ((self.checkingDisabled & 1) << 4)
| (self.rCode & 0xf ) )
strio.write(struct.pack(self.headerFmt, self.id, byte3, byte4,
len(self.queries), len(self.answers),
len(self.authority), len(self.additional)))
strio.write(body)
def decode(self, strio, length=None):
self.maxSize = 0
header = readPrecisely(strio, self.headerSize)
r = struct.unpack(self.headerFmt, header)
self.id, byte3, byte4, nqueries, nans, nns, nadd = r
self.answer = ( byte3 >> 7 ) & 1
self.opCode = ( byte3 >> 3 ) & 0xf
self.auth = ( byte3 >> 2 ) & 1
self.trunc = ( byte3 >> 1 ) & 1
self.recDes = byte3 & 1
self.recAv = ( byte4 >> 7 ) & 1
self.authenticData = ( byte4 >> 5 ) & 1
self.checkingDisabled = ( byte4 >> 4 ) & 1
self.rCode = byte4 & 0xf
self.queries = []
for i in range(nqueries):
self.name.decode(strio)
buff = readPrecisely(strio, 4)
self.type, self.cls = struct.unpack("!HH", buff)
q = Query()
try:
q.decode(strio)
except EOFError:
return
self.queries.append(q)
items = (
(self.answers, nans),
(self.authority, nns),
(self.additional, nadd))
for (l, n) in items:
self.parseRecords(l, n, strio)
def parseRecords(self, list, num, strio):
for i in range(num):
header = RRHeader(auth=self.auth)
try:
header.decode(strio)
except EOFError:
return
t = self.lookupRecordType(header.type)
if not t:
continue
header.payload = t(ttl=header.ttl)
try:
header.payload.decode(strio, header.rdlength)
except EOFError:
return
list.append(header)
# Create a mapping from record types to their corresponding Record_*
# classes. This relies on the global state which has been created so
# far in initializing this module (so don't define Record classes after
# this).
_recordTypes = {}
for name in globals():
if name.startswith('Record_'):
_recordTypes[globals()[name].TYPE] = globals()[name]
# Clear the iteration variable out of the class namespace so it
# doesn't become an attribute.
del name
def lookupRecordType(self, type):
"""
Retrieve the L{IRecord} implementation for the given record type.
@param type: A record type, such as L{A} or L{NS}.
@type type: C{int}
@return: An object which implements L{IRecord} or C{None} if none
can be found for the given type.
@rtype: L{types.ClassType}
"""
return self._recordTypes.get(type, UnknownRecord)
def toStr(self):
"""
Encode this L{Message} into a byte string in the format described by RFC
1035.
@rtype: C{bytes}
"""
strio = BytesIO()
self.encode(strio)
return strio.getvalue()
def fromStr(self, str):
"""
Decode a byte string in the format described by RFC 1035 into this
L{Message}.
@param str: L{bytes}
"""
strio = BytesIO(str)
self.decode(strio)
def readPrecisely(file, l):
buff = file.read(l)
if len(buff) < l:
raise EOFError
return buff
# DNS Protocol Version Query Request
verPayload = '\x02\xec' # Transaction ID 748
verPayload += '\x01\x00' # Standard query flag (1, 0)
verPayload += '\x00\x01' # Questions 1
verPayload += '\x00\x00' # Number of Answers 0
verPayload += '\x00\x00' # Number of Authoritative Records 0
verPayload += '\x00\x00' # Number of Additional Records 0
verPayload += '\x07\x76\x65\x72\x73\x69\x6f\x6e\x04\x62\x69\x6e\x64\x00\x00\x10\x00\x03' # version.bind Request
headerFmt = "!H2B4H"
headerSize = struct.calcsize(headerFmt)
strio = BytesIO(verPayload)
print(strio)
header = readPrecisely(strio, headerSize)
print(header)
print(struct.unpack(headerFmt, header))
m = Message()
m.fromStr(verPayload)<|fim▁end|> | body = body[:self.maxSize - self.headerSize]
byte3 = (( ( self.answer & 1 ) << 7 )
| ((self.opCode & 0xf ) << 3 ) |
<|file_name|>protocol.go<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package thrift
import (
"context"
"errors"
"fmt"
)
const (
VERSION_MASK = 0xffff0000
VERSION_1 = 0x80010000
)
type TProtocol interface {
WriteMessageBegin(ctx context.Context, name string, typeId TMessageType, seqid int32) error
WriteMessageEnd(ctx context.Context) error
WriteStructBegin(ctx context.Context, name string) error
WriteStructEnd(ctx context.Context) error
WriteFieldBegin(ctx context.Context, name string, typeId TType, id int16) error
WriteFieldEnd(ctx context.Context) error<|fim▁hole|> WriteListEnd(ctx context.Context) error
WriteSetBegin(ctx context.Context, elemType TType, size int) error
WriteSetEnd(ctx context.Context) error
WriteBool(ctx context.Context, value bool) error
WriteByte(ctx context.Context, value int8) error
WriteI16(ctx context.Context, value int16) error
WriteI32(ctx context.Context, value int32) error
WriteI64(ctx context.Context, value int64) error
WriteDouble(ctx context.Context, value float64) error
WriteString(ctx context.Context, value string) error
WriteBinary(ctx context.Context, value []byte) error
ReadMessageBegin(ctx context.Context) (name string, typeId TMessageType, seqid int32, err error)
ReadMessageEnd(ctx context.Context) error
ReadStructBegin(ctx context.Context) (name string, err error)
ReadStructEnd(ctx context.Context) error
ReadFieldBegin(ctx context.Context) (name string, typeId TType, id int16, err error)
ReadFieldEnd(ctx context.Context) error
ReadMapBegin(ctx context.Context) (keyType TType, valueType TType, size int, err error)
ReadMapEnd(ctx context.Context) error
ReadListBegin(ctx context.Context) (elemType TType, size int, err error)
ReadListEnd(ctx context.Context) error
ReadSetBegin(ctx context.Context) (elemType TType, size int, err error)
ReadSetEnd(ctx context.Context) error
ReadBool(ctx context.Context) (value bool, err error)
ReadByte(ctx context.Context) (value int8, err error)
ReadI16(ctx context.Context) (value int16, err error)
ReadI32(ctx context.Context) (value int32, err error)
ReadI64(ctx context.Context) (value int64, err error)
ReadDouble(ctx context.Context) (value float64, err error)
ReadString(ctx context.Context) (value string, err error)
ReadBinary(ctx context.Context) (value []byte, err error)
Skip(ctx context.Context, fieldType TType) (err error)
Flush(ctx context.Context) (err error)
Transport() TTransport
}
// The maximum recursive depth the skip() function will traverse
const DEFAULT_RECURSION_DEPTH = 64
// Skips over the next data element from the provided input TProtocol object.
func SkipDefaultDepth(ctx context.Context, prot TProtocol, typeId TType) (err error) {
return Skip(ctx, prot, typeId, DEFAULT_RECURSION_DEPTH)
}
// Skips over the next data element from the provided input TProtocol object.
func Skip(ctx context.Context, self TProtocol, fieldType TType, maxDepth int) (err error) {
if maxDepth <= 0 {
return NewTProtocolExceptionWithType(DEPTH_LIMIT, errors.New("Depth limit exceeded"))
}
switch fieldType {
case BOOL:
_, err = self.ReadBool(ctx)
return
case BYTE:
_, err = self.ReadByte(ctx)
return
case I16:
_, err = self.ReadI16(ctx)
return
case I32:
_, err = self.ReadI32(ctx)
return
case I64:
_, err = self.ReadI64(ctx)
return
case DOUBLE:
_, err = self.ReadDouble(ctx)
return
case STRING:
_, err = self.ReadString(ctx)
return
case STRUCT:
if _, err = self.ReadStructBegin(ctx); err != nil {
return err
}
for {
_, typeId, _, err := self.ReadFieldBegin(ctx)
if err != nil {
return err
}
if typeId == STOP {
break
}
err = Skip(ctx, self, typeId, maxDepth-1)
if err != nil {
return err
}
self.ReadFieldEnd(ctx)
}
return self.ReadStructEnd(ctx)
case MAP:
keyType, valueType, size, err := self.ReadMapBegin(ctx)
if err != nil {
return err
}
for i := 0; i < size; i++ {
err := Skip(ctx, self, keyType, maxDepth-1)
if err != nil {
return err
}
err = Skip(ctx, self, valueType, maxDepth-1)
if err != nil {
return err
}
}
return self.ReadMapEnd(ctx)
case SET:
elemType, size, err := self.ReadSetBegin(ctx)
if err != nil {
return err
}
for i := 0; i < size; i++ {
err := Skip(ctx, self, elemType, maxDepth-1)
if err != nil {
return err
}
}
return self.ReadSetEnd(ctx)
case LIST:
elemType, size, err := self.ReadListBegin(ctx)
if err != nil {
return err
}
for i := 0; i < size; i++ {
err := Skip(ctx, self, elemType, maxDepth-1)
if err != nil {
return err
}
}
return self.ReadListEnd(ctx)
default:
return NewTProtocolExceptionWithType(INVALID_DATA, fmt.Errorf("Unknown data type %d", fieldType))
}
return nil
}<|fim▁end|> | WriteFieldStop(ctx context.Context) error
WriteMapBegin(ctx context.Context, keyType TType, valueType TType, size int) error
WriteMapEnd(ctx context.Context) error
WriteListBegin(ctx context.Context, elemType TType, size int) error |
<|file_name|>instr_table.rs<|end_file_name|><|fim▁begin|>use crate::arch::cpu::Cpu;
use crate::arch::instrs::*;
use crate::utils::tls::Syncify;
use lazy_static::*;
use log::debug;
lazy_static! {
pub static ref INSTR_TABLE: Syncify<[Instr; 256]> = {
unsafe { Syncify::new ([
Instr::new(Box::new(others::brk), "brk::implied", 0 ), // 00
Instr::new(Box::new(ora::indirect_x), "ora::indirect_x", 2 ), // 01
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 02
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 03
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 04
Instr::new(Box::new(ora::zeropage), "ora::zeropage", 2 ), // 05
Instr::new(Box::new(asl::zeropage), "asl::zeropage", 2 ), // 06
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 07
Instr::new(Box::new(pushpop::php), "php::implied", 1 ), // 08
Instr::new(Box::new(ora::immediate), "ora::immediate", 2 ), // 09
Instr::new(Box::new(asl::accumulator), "asl::accumulator", 1 ), // 0a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 0b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 0c
Instr::new(Box::new(ora::absolute), "ora::absolute", 3 ), // 0d
Instr::new(Box::new(asl::absolute), "asl::absolute", 3 ), // 0e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 0f
Instr::new(Box::new(branches::bpl), "bpl", 2 ), // 10
Instr::new(Box::new(ora::indirect_y), "ora::indirect_y", 2 ), // 11
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 12
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 13
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 14
Instr::new(Box::new(ora::zeropage_x), "ora::zeropage_x", 2 ), // 15
Instr::new(Box::new(asl::zeropage_x), "asl::zeropage_x", 2 ), // 16
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 17
Instr::new(Box::new(flags::clc), "clc::implied", 1 ), // 18
Instr::new(Box::new(ora::absolute_y), "ora::absolute_y", 3 ), // 19
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 1a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 1b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 1c
Instr::new(Box::new(ora::absolute_x), "ora::absolute_x", 3 ), // 1d
Instr::new(Box::new(asl::absolute_x), "asl::absolute_x", 3 ), // 1e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 1f
Instr::new(Box::new(subroutines::jsr), "jsr::absolute", 0 ), // 20
Instr::new(Box::new(and::indirect_x), "and::indirect_x", 2 ), // 21
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 22
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 23
Instr::new(Box::new(bit::zeropage), "bit::zeropage", 2 ), // 24
Instr::new(Box::new(and::zeropage), "and::zeropage", 2 ), // 25
Instr::new(Box::new(rol::zeropage), "rol::zeropage", 2 ), // 26
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 27
Instr::new(Box::new(pushpop::plp), "plp::implied", 1 ), // 28
Instr::new(Box::new(and::immediate), "and::immediate", 2 ), // 29
Instr::new(Box::new(rol::accumulator), "rol::accumulator", 1 ), // 2a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 2b
Instr::new(Box::new(bit::absolute), "bit::absolute", 3 ), // 2c
Instr::new(Box::new(and::absolute), "and::absolute", 3 ), // 2d
Instr::new(Box::new(rol::absolute), "rol::absolute", 3 ), // 2e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 2f
Instr::new(Box::new(branches::bmi), "bmi", 0 ), // 30
Instr::new(Box::new(and::indirect_y), "and::indirect_y", 2 ), // 31
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 32
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 33
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 34
Instr::new(Box::new(and::zeropage_x), "and::zeropage_x", 2 ), // 35
Instr::new(Box::new(rol::zeropage_x), "rol::zeropage_x", 2 ), // 36
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 37
Instr::new(Box::new(flags::sec), "sec::implied", 1 ), // 38
Instr::new(Box::new(and::absolute_y), "and::absolute_y", 3 ), // 39
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 3a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 3b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 3c
Instr::new(Box::new(and::absolute_x), "and::absolute_x", 3 ), // 3d
Instr::new(Box::new(rol::absolute_x), "rol::absolute_x", 3 ), // 3e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 3f
Instr::new(Box::new(subroutines::rti), "rti::absolute", 0 ), // 40
Instr::new(Box::new(eor::indirect_x), "eor::indirect_x", 2 ), // 41
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 42
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 43
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 44
Instr::new(Box::new(eor::zeropage), "eor::zeropage", 2 ), // 45
Instr::new(Box::new(lsr::zeropage), "lsr::zeropage", 2 ), // 46
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 47
Instr::new(Box::new(pushpop::pha), "pha::implied", 1 ), // 48
Instr::new(Box::new(eor::immediate), "eor::immediate", 2 ), // 49
Instr::new(Box::new(lsr::accumulator), "lsr::accumulator", 1 ), // 4a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 4b
Instr::new(Box::new(jmp::absolute), "jmp::absolute", 0 ), // 4c
Instr::new(Box::new(eor::absolute), "eor::absolute", 3 ), // 4d
Instr::new(Box::new(lsr::absolute), "lsr::absolute", 3 ), // 4e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 4f
Instr::new(Box::new(branches::bvc), "bvc", 2 ), // 50
Instr::new(Box::new(eor::indirect_y), "eor::indirect_y", 2 ), // 51
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 52
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 53
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 54
Instr::new(Box::new(eor::zeropage_x), "eor::zeropage_x", 2 ), // 55
Instr::new(Box::new(lsr::zeropage_x), "lsr::zeropage_x", 2 ), // 56
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 57
Instr::new(Box::new(flags::cli), "cli::implied", 1 ), // 58
Instr::new(Box::new(eor::absolute_y), "eor::absolute_y", 3 ), // 59
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 5a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 5b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 5c
Instr::new(Box::new(eor::absolute_x), "eor::absolute_x", 3 ), // 5d
Instr::new(Box::new(lsr::absolute_x), "lsr::absolute_x", 3 ), // 5e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 5f
Instr::new(Box::new(subroutines::rts), "rts::absolute", 1 ), // 60
Instr::new(Box::new(adc::indirect_x), "adc::indirect_x", 2 ), // 61
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 62
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 63
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 64
Instr::new(Box::new(adc::zeropage), "adc::zeropage", 2 ), // 65
Instr::new(Box::new(ror::zeropage), "ror::zeropage", 2 ), // 66
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 67
Instr::new(Box::new(pushpop::pla), "pla::implied", 1 ), // 68
Instr::new(Box::new(adc::immediate), "adc::immediate", 2 ), // 69
Instr::new(Box::new(ror::accumulator), "ror::accumulator", 1 ), // 6a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 6b
Instr::new(Box::new(jmp::indirect_absolute), "jmp::indirect_absolute", 0 ), // 6c
Instr::new(Box::new(adc::absolute), "adc::absolute", 3 ), // 6d
Instr::new(Box::new(ror::absolute), "ror::absolute", 3 ), // 6e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 6f
Instr::new(Box::new(branches::bvs), "bvs", 0 ), // 70
Instr::new(Box::new(adc::indirect_y), "adc::indirect_y", 2 ), // 71
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 72
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 73
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 74
Instr::new(Box::new(adc::zeropage_x), "adc::zeropage_x", 2 ), // 75
Instr::new(Box::new(ror::zeropage_x), "ror::zeropage_x", 2 ), // 76
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 77
Instr::new(Box::new(flags::sei), "sei::implied", 1 ), // 78
Instr::new(Box::new(adc::absolute_y), "adc::absolute_y", 3 ), // 79
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 7a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 7b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 7c
Instr::new(Box::new(adc::absolute_x), "adc::absolute_x", 3 ), // 7d
Instr::new(Box::new(ror::absolute_x), "ror::absolute_x", 3 ), // 7e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 7f
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 80
Instr::new(Box::new(sta::indirect_x), "sta::indirect_x", 2 ), // 81
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 82
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 83
Instr::new(Box::new(sty::zeropage), "sty::zeropage", 2 ), // 84
Instr::new(Box::new(sta::zeropage), "sta::zeropage", 2 ), // 85
Instr::new(Box::new(stx::zeropage), "stx::zeropage", 2 ), // 86
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 87
Instr::new(Box::new(dey::implied), "dey::implied", 1 ), // 88
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 89
Instr::new(Box::new(transfers::txa), "txa::implied", 1 ), // 8a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 8b
Instr::new(Box::new(sty::absolute), "sty::absolute", 3 ), // 8c
Instr::new(Box::new(sta::absolute), "sta::absolute", 3 ), // 8d
Instr::new(Box::new(stx::absolute), "stx::absolute", 3 ), // 8e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 8f
Instr::new(Box::new(branches::bcc), "bcc", 0 ), // 90
Instr::new(Box::new(sta::indirect_y), "sta::indirect_y", 2 ), // 91
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 92
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 93
Instr::new(Box::new(sty::zeropage_x), "sty::zeropage_x", 2 ), // 94
Instr::new(Box::new(sta::zeropage_x), "sta::zeropage_x", 2 ), // 95
Instr::new(Box::new(stx::zeropage_y), "stx::zeropage_y", 2 ), // 96
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 97
Instr::new(Box::new(transfers::tya), "tya::implied", 1 ), // 98
Instr::new(Box::new(sta::absolute_y), "sta::absolute_y", 3 ), // 99
Instr::new(Box::new(transfers::txs), "txs::implied", 1 ), // 9a
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 9b
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 9c
Instr::new(Box::new(sta::absolute_x), "sta::absolute_x", 3 ), // 9d
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 9e
Instr::new(Box::new(error_fn), "error_fn", 255 ), // 9f
Instr::new(Box::new(ldy::immediate), "ldy::immediate", 2 ), // a0
Instr::new(Box::new(lda::indirect_x), "lda::indirect_x", 2 ), // a1
Instr::new(Box::new(ldx::immediate), "ldx::immediate", 2 ), // a2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // a3
Instr::new(Box::new(ldy::zeropage), "ldy::zeropage", 2 ), // a4
Instr::new(Box::new(lda::zeropage), "lda::zeropage", 2 ), // a5
Instr::new(Box::new(ldx::zeropage), "ldx::zeropage", 2 ), // a6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // a7
Instr::new(Box::new(transfers::tay), "tay::implied", 1 ), // a8
Instr::new(Box::new(lda::immediate), "lda::immediate", 2 ), // a9
Instr::new(Box::new(transfers::tax), "tax::implied", 1 ), // aa
Instr::new(Box::new(error_fn), "error_fn", 255 ), // ab
Instr::new(Box::new(ldy::absolute), "ldy::absolute", 3 ), // ac
Instr::new(Box::new(lda::absolute), "lda::absolute", 3 ), // ad
Instr::new(Box::new(ldx::absolute), "ldx::absolute", 3 ), // ae
Instr::new(Box::new(error_fn), "error_fn", 255 ), // af
Instr::new(Box::new(branches::bcs), "bcs", 2 ), // b0
Instr::new(Box::new(lda::indirect_y), "lda::indirect_y", 2 ), // b1
Instr::new(Box::new(error_fn), "error_fn", 255 ), // b2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // b3
Instr::new(Box::new(ldy::zeropage_x), "ldy::zeropage_x", 2 ), // b4
Instr::new(Box::new(lda::zeropage_x), "lda::zeropage_x", 2 ), // b5
Instr::new(Box::new(ldx::zeropage_y), "ldx::zeropage_y", 2 ), // b6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // b7
Instr::new(Box::new(flags::clv), "clv::implied", 1 ), // b8
Instr::new(Box::new(lda::absolute_y), "lda::absolute_y", 3 ), // b9
Instr::new(Box::new(transfers::tsx), "tsx::implied", 1 ), // ba
Instr::new(Box::new(error_fn), "error_fn", 255 ), // bb
Instr::new(Box::new(ldy::absolute_x), "ldy::absolute_x", 3 ), // bc
Instr::new(Box::new(lda::absolute_x), "lda::absolute_x", 3 ), // bd
Instr::new(Box::new(ldx::absolute_y), "ldx::absolute_y", 3 ), // be
Instr::new(Box::new(error_fn), "error_fn", 255 ), // bf
Instr::new(Box::new(cpy::immediate), "cpy::immediate", 2 ), // c0
Instr::new(Box::new(cmp::indirect_x), "cmp::indirect_x", 2 ), // c1
Instr::new(Box::new(error_fn), "error_fn", 255 ), // c2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // c3
Instr::new(Box::new(cpy::zeropage), "cpy::zeropage", 2 ), // c4
Instr::new(Box::new(cmp::zeropage), "cmp::zeropage", 2 ), // c5
Instr::new(Box::new(dec::zeropage), "dec::zeropage", 2 ), // c6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // c7
Instr::new(Box::new(iny::implied), "iny::implied", 1 ), // c8
Instr::new(Box::new(cmp::immediate), "cmp::immediate", 2 ), // c9
Instr::new(Box::new(dex::implied), "dex::implied", 1 ), // ca
Instr::new(Box::new(error_fn), "error_fn", 255 ), // cb
Instr::new(Box::new(cpy::absolute), "cpy::absolute", 3 ), // cc
Instr::new(Box::new(cmp::absolute), "cmp::absolute", 3 ), // cd
Instr::new(Box::new(dec::absolute), "dec::absolute", 3 ), // ce
Instr::new(Box::new(error_fn), "error_fn", 255 ), // cf
Instr::new(Box::new(branches::bne), "bne", 2 ), // d0
Instr::new(Box::new(cmp::indirect_y), "cmp::indirect_y", 2 ), // d1
Instr::new(Box::new(error_fn), "error_fn", 255 ), // d2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // d3
Instr::new(Box::new(error_fn), "error_fn", 255 ), // d4
Instr::new(Box::new(cmp::zeropage_x), "cmp::zeropage_x", 2 ), // d5
Instr::new(Box::new(dec::zeropage_x), "dec::zeropage_x", 2 ), // d6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // d7
Instr::new(Box::new(flags::cld), "cld::implied", 1 ), // d8<|fim▁hole|> Instr::new(Box::new(cmp::absolute_x), "cmp::absolute_x", 3 ), // dd
Instr::new(Box::new(dec::absolute_x), "dec::absolute_x", 3 ), // de
Instr::new(Box::new(error_fn), "error_fn", 255 ), // df
Instr::new(Box::new(cpx::immediate), "cpx::immediate", 2 ), // e0
Instr::new(Box::new(sbc::indirect_x), "sbc::indirect_x", 2 ), // e1
Instr::new(Box::new(error_fn), "error_fn", 255 ), // e2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // e3
Instr::new(Box::new(cpx::zeropage), "cpx::zeropage", 2 ), // e4
Instr::new(Box::new(sbc::zeropage), "sbc::zeropage", 2 ), // e5
Instr::new(Box::new(inc::zeropage), "inc::zeropage", 2 ), // e6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // e7
Instr::new(Box::new(inx::implied), "inx::implied", 1 ), // e8
Instr::new(Box::new(sbc::immediate), "sbc::immediate", 2 ), // e9
Instr::new(Box::new(others::nop), "nop::implied", 1 ), // ea
Instr::new(Box::new(error_fn), "error_fn", 255 ), // eb
Instr::new(Box::new(cpx::absolute), "cpx::absolute", 3 ), // ec
Instr::new(Box::new(sbc::absolute), "sbc::absolute", 3 ), // ed
Instr::new(Box::new(inc::absolute), "inc::absolute", 3 ), // ee
Instr::new(Box::new(error_fn), "error_fn", 255 ), // ef
Instr::new(Box::new(branches::beq), "beq", 0 ), // f0
Instr::new(Box::new(sbc::indirect_y), "sbc::indirect_y", 2 ), // f1
Instr::new(Box::new(error_fn), "error_fn", 255 ), // f2
Instr::new(Box::new(error_fn), "error_fn", 255 ), // f3
Instr::new(Box::new(error_fn), "error_fn", 255 ), // f4
Instr::new(Box::new(sbc::zeropage_x), "sbc::zeropage_x", 2 ), // f5
Instr::new(Box::new(inc::zeropage_x), "inc::zeropage_x", 2 ), // f6
Instr::new(Box::new(error_fn), "error_fn", 255 ), // f7
Instr::new(Box::new(flags::sed), "sed::implied", 1 ), // f8
Instr::new(Box::new(sbc::absolute_y), "sbc::absolute_y", 3 ), // f9
Instr::new(Box::new(error_fn), "error_fn", 255 ), // fa
Instr::new(Box::new(error_fn), "error_fn", 255 ), // fb
Instr::new(Box::new(error_fn), "error_fn", 255 ), // fc
Instr::new(Box::new(sbc::absolute_x), "sbc::absolute_x", 3 ), // fd
Instr::new(Box::new(inc::absolute_x), "inc::absolute_x", 3 ), // fe
Instr::new(Box::new(error_fn), "error_fn", 255) /* ff */,
])}};
}
pub struct Instr {
pub fun: Box<dyn Fn(&mut Cpu) -> (u8, u8)>,
pub fname: String,
pub ilen: usize,
}
impl Instr {
fn new<S: Into<String>>(fun: Box<dyn Fn(&mut Cpu) -> (u8, u8)>, fname: S, ilen: usize) -> Self {
Instr {
fun,
fname: fname.into(),
ilen,
}
}
}
pub fn error_fn(_cpu: &mut Cpu) -> (u8, u8) {
// panic!("Invalid opcode!");
(0xFF, 0xFF)
}
fn format_hex(data: &[u8]) -> String {
let hexes: Vec<_> = data.iter().map(|v| format!("{:02X}", v)).collect();
hexes.join("")
}
fn get_fname_for_print(fname: &str, arg: &str) -> String {
let pieces: Vec<&str> = fname.split("::").collect();
let instr_name = pieces.get(0).unwrap();
let address = pieces.get(1);
match address {
Some(&"implied") => instr_name.to_string(),
Some(&"zeropage_x") => format!("{} {}+x", instr_name, arg),
Some(&"zeropage") => format!("{} {}", instr_name, arg),
Some(&"immediate") => format!("{} #{}", instr_name, arg),
Some(&"absolute_x") => format!("{} [{}+x]", instr_name, arg),
Some(&"absolute_y") => format!("{} [{}+y]", instr_name, arg),
Some(&"absolute") => format!("{} [{}]", instr_name, arg),
Some(&"indirect_x") => format!("{} x({})", instr_name, arg),
Some(&"indirect_y") => format!("{} y({})", instr_name, arg),
_ => instr_name.to_string(),
}
}
pub fn disassemble_instr(prg: &[u8], current: usize) -> (String, usize) {
let opcode: u8 = prg[current];
let Instr {
ref fname,
mut ilen,
..
} = INSTR_TABLE[opcode as usize];
let is_error = ilen == 0xFF;
if ilen == 0 || ilen == 0xFF {
// branches or error
ilen = 1;
}
let a = if is_error {
format!("{} ({:02X})", fname, opcode)
} else {
let codes = &format_hex(&prg[current + 1..current + ilen]);
debug!(
"{:02X}> Found function {}, opcode: {:02X}, {}, bytes: {:?}",
current + 16,
fname,
opcode,
ilen,
codes
);
get_fname_for_print(&fname, codes)
};
(a.to_owned(), current + ilen)
}
// decode functions
#[macro_export]
macro_rules! decode_absolute {
( $cpu:expr ) => {{
let low = $cpu.memory.fetch($cpu.registers.pc + 1);
let high = $cpu.memory.fetch($cpu.registers.pc + 2);
(to_u16(low, high), 3)
}};
}
#[macro_export]
macro_rules! decode_immediate {
( $cpu:expr ) => {{
($cpu.memory.fetch($cpu.registers.pc + 1), 2)
}};
}
#[macro_export]
macro_rules! decode_zeropage {
( $cpu:expr ) => {{
($cpu.memory.fetch($cpu.registers.pc + 1), 2)
}};
}
#[macro_export]
macro_rules! decode_absolute_indexed {
( $cpu:expr, $offset:expr ) => {{
let low = $cpu.memory.fetch($cpu.registers.pc + 1);
let high = $cpu.memory.fetch($cpu.registers.pc + 2);
(to_u16(low, high).wrapping_add($offset as u16), 3)
}};
}
#[macro_export]
macro_rules! decode_zeropage_indexed {
( $cpu:expr, $offset:expr ) => {{
let addr = $cpu.memory.fetch($cpu.registers.pc + 1);
(addr.wrapping_add($offset), 2)
}};
}
#[macro_export]
macro_rules! decode_indexed_indirect {
( $cpu:expr ) => {{
let op = ($cpu
.memory
.fetch($cpu.registers.pc + 1)
.wrapping_add($cpu.registers.x_reg)) as u16
& 0xFF;
let low = $cpu.memory.fetch(op);
let high = $cpu.memory.fetch((op + 1) & 0xFF);
(to_u16(low, high), 2)
}};
}
#[macro_export]
macro_rules! decode_indirect_indexed {
( $cpu:expr ) => {{
let op = $cpu.memory.fetch($cpu.registers.pc + 1) as u16;
let low = $cpu.memory.fetch(op);
let high = $cpu.memory.fetch((op + 1) & 0xFF);
(
to_u16(low, high).wrapping_add($cpu.registers.y_reg as u16),
2,
)
}};
}<|fim▁end|> | Instr::new(Box::new(cmp::absolute_y), "cmp::absolute_y", 3 ), // d9
Instr::new(Box::new(error_fn), "error_fn", 255 ), // da
Instr::new(Box::new(error_fn), "error_fn", 255 ), // db
Instr::new(Box::new(error_fn), "error_fn", 255 ), // dc |
<|file_name|>signals.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
<|fim▁hole|>from blinker import Namespace
namespace = Namespace()
#: Trigerred when a dataset is published
on_dataset_published = namespace.signal('on-dataset-published')<|fim▁end|> | |
<|file_name|>http_client.py<|end_file_name|><|fim▁begin|>import os
import sys
import textwrap
import warnings
from stripe import error, util
# - Requests is the preferred HTTP library
# - Google App Engine has urlfetch
# - Use Pycurl if it's there (at least it verifies SSL certs)
# - Fall back to urllib2 with a warning if needed
try:
import urllib2
except ImportError:
pass
try:
import pycurl
except ImportError:
pycurl = None
try:
import requests
except ImportError:
requests = None
else:
try:
# Require version 0.8.8, but don't want to depend on distutils
version = requests.__version__
major, minor, patch = [int(i) for i in version.split('.')]
except Exception:
# Probably some new-fangled version, so it should support verify
pass
else:
if (major, minor, patch) < (0, 8, 8):
sys.stderr.write(
'Warning: the Stripe library requires that your Python '
'"requests" library be newer than version 0.8.8, but your '
'"requests" library is version %s. Stripe will fall back to '
'an alternate HTTP library so everything should work. We '
'recommend upgrading your "requests" library. If you have any '
'questions, please contact [email protected]. (HINT: running '
'"pip install -U requests" should upgrade your requests '
'library to the latest version.)' % (version,))
requests = None
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
def new_default_http_client(*args, **kwargs):
if urlfetch:
impl = UrlFetchClient
elif requests:
impl = RequestsClient
elif pycurl:
impl = PycurlClient
else:
impl = Urllib2Client
warnings.warn(
"Warning: the Stripe library is falling back to urllib2/urllib "
"because neither requests nor pycurl are installed. "
"urllib2's SSL implementation doesn't verify server "
"certificates. For improved security, we suggest installing "
"requests.")
return impl(*args, **kwargs)
class HTTPClient(object):
def __init__(self, verify_ssl_certs=True):
self._verify_ssl_certs = verify_ssl_certs
def request(self, method, url, headers, post_data=None):
raise NotImplementedError(
'HTTPClient subclasses must implement `request`')
class RequestsClient(HTTPClient):
name = 'requests'
def request(self, method, url, headers, post_data=None):
kwargs = {}
if self._verify_ssl_certs:
kwargs['verify'] = os.path.join(
os.path.dirname(__file__), 'data/ca-certificates.crt')
else:
kwargs['verify'] = False
try:
try:
result = requests.request(method,
url,
headers=headers,
data=post_data,
timeout=80,
**kwargs)
except TypeError, e:
raise TypeError(
'Warning: It looks like your installed version of the '
'"requests" library is not compatible with Stripe\'s '
'usage thereof. (HINT: The most likely cause is that '
'your "requests" library is out of date. You can fix '
'that by running "pip install -U requests".) The '
'underlying error was: %s' % (e,))
# This causes the content to actually be read, which could cause
# e.g. a socket timeout. TODO: The other fetch methods probably
# are succeptible to the same and should be updated.
content = result.content
status_code = result.status_code
except Exception, e:
# Would catch just requests.exceptions.RequestException, but can
# also raise ValueError, RuntimeError, etc.
self._handle_request_error(e)
return content, status_code
def _handle_request_error(self, e):
if isinstance(e, requests.exceptions.RequestException):
msg = ("Unexpected error communicating with Stripe. "
"If this problem persists, let us know at "
"[email protected].")
err = "%s: %s" % (type(e).__name__, str(e))
else:
msg = ("Unexpected error communicating with Stripe. "
"It looks like there's probably a configuration "
"issue locally. If this problem persists, let us "
"know at [email protected].")
err = "A %s was raised" % (type(e).__name__,)
if str(e):
err += " with error message %s" % (str(e),)
else:
err += " with no error message"
msg = textwrap.fill(msg) + "\n\n(Network error: %s)" % (err,)
raise error.APIConnectionError(msg)
class UrlFetchClient(HTTPClient):
name = 'urlfetch'
def request(self, method, url, headers, post_data=None):<|fim▁hole|> headers=headers,
# Google App Engine doesn't let us specify our own cert bundle.
# However, that's ok because the CA bundle they use recognizes
# api.stripe.com.
validate_certificate=self._verify_ssl_certs,
# GAE requests time out after 60 seconds, so make sure we leave
# some time for the application to handle a slow Stripe
deadline=55,
payload=post_data
)
except urlfetch.Error, e:
self._handle_request_error(e, url)
return result.content, result.status_code
def _handle_request_error(self, e, url):
if isinstance(e, urlfetch.InvalidURLError):
msg = ("The Stripe library attempted to fetch an "
"invalid URL (%r). This is likely due to a bug "
"in the Stripe Python bindings. Please let us know "
"at [email protected]." % (url,))
elif isinstance(e, urlfetch.DownloadError):
msg = "There was a problem retrieving data from Stripe."
elif isinstance(e, urlfetch.ResponseTooLargeError):
msg = ("There was a problem receiving all of your data from "
"Stripe. This is likely due to a bug in Stripe. "
"Please let us know at [email protected].")
else:
msg = ("Unexpected error communicating with Stripe. If this "
"problem persists, let us know at [email protected].")
msg = textwrap.fill(msg) + "\n\n(Network error: " + str(e) + ")"
raise error.APIConnectionError(msg)
class PycurlClient(HTTPClient):
name = 'pycurl'
def request(self, method, url, headers, post_data=None):
s = util.StringIO.StringIO()
curl = pycurl.Curl()
if method == 'get':
curl.setopt(pycurl.HTTPGET, 1)
elif method == 'post':
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.POSTFIELDS, post_data)
else:
curl.setopt(pycurl.CUSTOMREQUEST, method.upper())
# pycurl doesn't like unicode URLs
curl.setopt(pycurl.URL, util.utf8(url))
curl.setopt(pycurl.WRITEFUNCTION, s.write)
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.CONNECTTIMEOUT, 30)
curl.setopt(pycurl.TIMEOUT, 80)
curl.setopt(pycurl.HTTPHEADER, ['%s: %s' % (k, v)
for k, v in headers.iteritems()])
if self._verify_ssl_certs:
curl.setopt(pycurl.CAINFO, os.path.join(
os.path.dirname(__file__), 'data/ca-certificates.crt'))
else:
curl.setopt(pycurl.SSL_VERIFYHOST, False)
try:
curl.perform()
except pycurl.error, e:
self._handle_request_error(e)
rbody = s.getvalue()
rcode = curl.getinfo(pycurl.RESPONSE_CODE)
return rbody, rcode
def _handle_request_error(self, e):
if e[0] in [pycurl.E_COULDNT_CONNECT,
pycurl.E_COULDNT_RESOLVE_HOST,
pycurl.E_OPERATION_TIMEOUTED]:
msg = ("Could not connect to Stripe. Please check your "
"internet connection and try again. If this problem "
"persists, you should check Stripe's service status at "
"https://twitter.com/stripestatus, or let us know at "
"[email protected].")
elif (e[0] in [pycurl.E_SSL_CACERT,
pycurl.E_SSL_PEER_CERTIFICATE]):
msg = ("Could not verify Stripe's SSL certificate. Please make "
"sure that your network is not intercepting certificates. "
"If this problem persists, let us know at "
"[email protected].")
else:
msg = ("Unexpected error communicating with Stripe. If this "
"problem persists, let us know at [email protected].")
msg = textwrap.fill(msg) + "\n\n(Network error: " + e[1] + ")"
raise error.APIConnectionError(msg)
class Urllib2Client(HTTPClient):
if sys.version_info >= (3, 0):
name = 'urllib.request'
else:
name = 'urllib2'
def request(self, method, url, headers, post_data=None):
if sys.version_info >= (3, 0) and isinstance(post_data, basestring):
post_data = post_data.encode('utf-8')
req = urllib2.Request(url, post_data, headers)
if method not in ('get', 'post'):
req.get_method = lambda: method.upper()
try:
response = urllib2.urlopen(req)
rbody = response.read()
rcode = response.code
except urllib2.HTTPError, e:
rcode = e.code
rbody = e.read()
except (urllib2.URLError, ValueError), e:
self._handle_request_error(e)
return rbody, rcode
def _handle_request_error(self, e):
msg = ("Unexpected error communicating with Stripe. "
"If this problem persists, let us know at [email protected].")
msg = textwrap.fill(msg) + "\n\n(Network error: " + str(e) + ")"
raise error.APIConnectionError(msg)<|fim▁end|> | try:
result = urlfetch.fetch(
url=url,
method=method, |
<|file_name|>graft.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>var Transform = require('readable-stream').Transform;
var inherits = require('inherits');
var deepMatch = require('./lib/deepMatch');
var wrap = require('./lib/wrapMessage');
var jschan = require('jschan');
function noop() {}
function Graft() {
if (!(this instanceof Graft)) {
return new Graft();
}
Transform.call(this, { objectMode: true, highWaterMark: 16 });
var that = this;
function readFirst() {
/*jshint validthis:true */
this.removeListener('readable', readFirst);
that._transform(wrap(this.read(), this), null, noop);
}
this._session = jschan.memorySession();
this._session.on('channel', function(channel) {
channel.on('readable', readFirst);
});
this._nextChannel = this._session.WriteChannel();
this.on('pipe', function(source) {
source.on('ready', that.emit.bind(that, 'ready'));
this.on('end', function() {
source.end();
});
});
this._patterns = [];
}
inherits(Graft, Transform);
Graft.prototype._transform = function flowing(obj, enc, done) {
if (!obj._session && !obj._channel) {
// it quacks like a duck, so it's a duck - s/duck/request/g
var channel = this._nextChannel;
this._nextChannel = this._session.WriteChannel();
channel.write(obj);
return done();
}
var i;
for (i = 0; i < this._patterns.length; i++) {
if (this._patterns[i].pattern(obj)) {
this._patterns[i].stream.write(obj, done);
return;
}
}
this.push(obj);
done();
};
Graft.prototype.branch = function(pattern, stream) {
if (!pattern) {
throw new Error('missing pattern');
}
if (!stream) {
throw new Error('missing destination');
}
this._patterns.push({
pattern: pattern,
stream: stream
});
return this;
};
Graft.prototype.where = function(pattern, stream) {
return this.branch(function(req) {
return deepMatch(pattern, req);
}, stream);
};
Graft.prototype.close = function(cb) {
function complete() {
/*jshint validthis:true */
this._session.close(function(err) {
if (err) {
return cb(err);
}
return cb();
});
}
if (this._readableState.endEmitted) {
complete.call(this);
} else {
this.on('end', complete);
}
if (!this._readableState.flowing) {
this.resume();
}
this.end();
};
Graft.prototype.ReadChannel = function() {
return this._nextChannel.ReadChannel();
};
Graft.prototype.WriteChannel = function() {
return this._nextChannel.WriteChannel();
};
module.exports = Graft;<|fim▁end|> | |
<|file_name|>JythonAbacus.py<|end_file_name|><|fim▁begin|>#
# Copyright 2006 The Apache Software Foundation<|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from org.apache.hadoop.fs import Path
from org.apache.hadoop.io import *
from org.apache.hadoop.mapred import *
from org.apache.hadoop.abacus import *
from java.util import *;
import sys
class AbacusMapper(ValueAggregatorMapper):
def map(self, key, value, output, reporter):
ValueAggregatorMapper.map(self, key, value, output, reporter);
class AbacusReducer(ValueAggregatorReducer):
def reduce(self, key, values, output, reporter):
ValueAggregatorReducer.reduce(self, key, values, output, reporter);
class AbacusCombiner(ValueAggregatorCombiner):
def reduce(self, key, values, output, reporter):
ValueAggregatorCombiner.reduce(self, key, values, output, reporter);
def printUsage(code):
print "Abacus <input> <output> <numOfReducers> <inputformat> <specfile>"
sys.exit(code)
def main(args):
if len(args) < 6:
printUsage(1);
inDir = args[1];
outDir = args[2];
numOfReducers = int(args[3]);
theInputFormat = args[4];
specFile = args[5];
print "numOfReducers: ", numOfReducers, "theInputFormat: ", theInputFormat, "specFile: ", specFile
conf = JobConf(AbacusMapper);
conf.setJobName("recordcount");
conf.addDefaultResource(Path(specFile));
if theInputFormat=="textinputformat":
conf.setInputFormat(TextInputFormat);
else:
conf.setInputFormat(SequenceFileInputFormat);
conf.setOutputFormat(TextOutputFormat);
conf.setMapOutputKeyClass(Text);
conf.setMapOutputValueClass(Text);
conf.setOutputKeyClass(Text);
conf.setOutputValueClass(Text);
conf.setNumMapTasks(1);
conf.setNumReduceTasks(numOfReducers);
conf.setMapperClass(AbacusMapper);
conf.setCombinerClass(AbacusCombiner);
conf.setReducerClass(AbacusReducer);
conf.setInputPath(Path(args[1]))
conf.setOutputPath(Path(args[2]))
JobClient.runJob(conf);
if __name__ == "__main__":
main(sys.argv)<|fim▁end|> | |
<|file_name|>redmine_backend.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from redmine import Redmine
from feedbacks import settings
from base import IBackend
<|fim▁hole|>class RedmineBackend(IBackend):
def __init__(self):
self.redmine = Redmine(settings.DJFEEDBACK_REDMINE_URL,
key=settings.DJFEEDBACK_REDMINE_KEY)
self.project_id = settings.DJFEEDBACK_REDMINE_PROJECT_ID
def post(self, message):
tracker = message.ftype.ftype
if tracker is None:
tracker = 'bug'
email_field_id = settings.DJFEEDBACK_REDMINE_EMAIL_FIELD_ID
custom_fields = [
{
'id': email_field_id,
'value': message.email
}
]
self.redmine.issue.create(
project_id=self.project_id,
tracker_id=settings.DJFEEDBACK_REDMINE_TRACKERS[tracker],
subject=message.subj,
description=message.text,
assigned_to_id=settings.DJFEEDBACK_REDMINE_ASSIGN_TO_ID,
custom_fields=custom_fields
)<|fim▁end|> | |
<|file_name|>step_defs.js<|end_file_name|><|fim▁begin|>/* eslint-disable no-undef */
const cukeBtnSubmit = '//button[@data-cuke="save-item"]';
const cukeInpSize = '//input[@data-cuke="size"]';
const cukeInpTitle = '//input[@data-cuke="title"]';
const cukeInpContent = '//textarea[@data-cuke="content"]';
const cukeSize = '//x-cuke[@id="size"]';
const cukeTitle = '//x-cuke[@id="title"]';
const cukeContent = '//x-cuke[@id="content"]';
/*
const cukeHrefEdit = '//a[@data-cuke="edit-ite"]';
const cukeHrefDelete = '//a[@data-cuke="delete-item"]';
*/
const cukeInvalidSize = '//span[@class="help-block error-block"]';
let size = '';
let title = '';
let content = '';
module.exports = function () {
// Scenario: Create a new widget
// ------------------------------------------------------------------------
this.Given(/^I have opened the 'add widgets' page : "([^"]*)"$/, function (_url) {
browser.setViewportSize({ width: 1024, height: 480 });
browser.timeouts('implicit', 60000);
browser.timeouts('page load', 60000);
browser.url(_url);
server.call('_widgets.wipe');
});
this.When(/^I create a "([^"]*)" millimetre "([^"]*)" item with text "([^"]*)",$/,
function (_size, _title, _content) {
size = _size;
title = _title;
content = _content;
browser.waitForEnabled( cukeBtnSubmit );
browser.setValue(cukeInpTitle, title);
browser.setValue(cukeInpSize, size);
browser.setValue(cukeInpContent, content);
browser.click(cukeBtnSubmit);
});
this.Then(/^I see a new record with the same title, size and contents\.$/, function () {
expect(browser.getText(cukeSize)).toEqual(size + ' millimetres.');
expect(browser.getText(cukeTitle)).toEqual(title);
expect(browser.getText(cukeContent)).toEqual(content);
});
// =======================================================================
// Scenario: Verify field validation
// ------------------------------------------------------------------------
this.Given(/^I have opened the widgets list page : "([^"]*)"$/, function (_url) {
browser.setViewportSize({ width: 1024, height: 480 });
browser.timeoutsImplicitWait(1000);
browser.url(_url);
});
/*
let link = '';
this.Given(/^I choose to edit the "([^"]*)" item,$/, function (_widget) {
link = '//a[@data-cuke="' + _widget + '"]';
browser.waitForExist( link );
browser.click(link);
browser.waitForEnabled( cukeHrefEdit );
browser.click(cukeHrefEdit);
});
*/
this.When(/^I set 'Size' to "([^"]*)"$/, function (_size) {
browser.setValue(cukeInpSize, _size);
});
this.Then(/^I see the size validation hint "([^"]*)"\.$/, function (_message) {
expect(browser.getText(cukeInvalidSize)).toEqual(_message);<|fim▁hole|> });
// =======================================================================
// Scenario: Fail to delete widget
// ------------------------------------------------------------------------
let widget = '';
this.Given(/^I choose to view the "([^"]*)" item,$/, function (_widget) {
widget = _widget;
const cukeHrefWidget = `//a[@data-cuke="${widget}"]`;
browser.waitForEnabled( cukeHrefWidget );
browser.click( cukeHrefWidget );
});
/*
let href = null;
this.When(/^I decide to delete the item,$/, function () {
href = cukeHrefDelete;
browser.waitForExist( href );
});
this.Then(/^I see it is disabled\.$/, function () {
expect(browser.isEnabled( href )).toBe(true);
});
*/
// =======================================================================
// Scenario: Unable to update widget
// ------------------------------------------------------------------------
/*
this.When(/^I attempt to edit the item,$/, function () {
href = cukeHrefEdit;
browser.waitForExist( href );
});
*/
// =======================================================================
// Scenario: Prohibited from add and from update
// ------------------------------------------------------------------------
this.Given(/^I have opened the widgets editor page : "([^"]*)"$/, function (_url) {
browser.setViewportSize({ width: 1024, height: 480 });
browser.timeouts('implicit', 60000);
browser.timeouts('page load', 60000);
browser.url(_url);
});
/*
this.Then(/^I see the warning "([^"]*)"$/, function (_warning) {
expect(_warning).toEqual(browser.getText(cukeWarning));
});
*/
// =======================================================================
// Scenario: Hide widget
// ------------------------------------------------------------------------
/*
this.Given(/^I have elected to "([^"]*)" the "([^"]*)" item\.$/, function (_cmd, _widget) {
link = '//a[@data-cuke="' + _widget + '"]';
browser.waitForEnabled( link );
browser.click(link);
let cukeHrefCmd = '//a[@data-cuke="' + _cmd + '-widget"]';
browser.waitForEnabled( cukeHrefCmd );
browser.click( cukeHrefCmd );
});
*/
/*
this.Then(/^I no longer see that widget record\.$/, function () {
browser.waitForEnabled( cukeWidgetsList );
let item = browser.elements(link);
expect(item.value.length).toEqual(0);
});
*/
};<|fim▁end|> | |
<|file_name|>click_report.test.js<|end_file_name|><|fim▁begin|>import * as chai from 'chai';
const expect = chai.expect;
const chaiAsPromised = require('chai-as-promised');
import * as sinon from 'sinon';
import * as sinonAsPromised from 'sinon-as-promised';
import { ClickReport } from '../src/models/click_report';
chai.use(chaiAsPromised);
describe('ClickReport', () => {
const tableName = 'ClickReports-table';
const campaignId = 'thatCampaignId';
let tNameStub;
const clickReportHashKey = 'campaignId';
const clickReportRangeKey = 'timestamp';
before(() => {
sinon.stub(ClickReport, '_client').resolves(true);
tNameStub = sinon.stub(ClickReport, 'tableName', { get: () => tableName});
});
describe('#get', () => {
it('calls the DynamoDB get method with correct params', (done) => {
ClickReport.get(campaignId, clickReportRangeKey).then(() => {
const args = ClickReport._client.lastCall.args;
expect(args[0]).to.equal('get');
expect(args[1]).to.have.deep.property(`Key.${clickReportHashKey}`, campaignId);
expect(args[1]).to.have.deep.property(`Key.${clickReportRangeKey}`, clickReportRangeKey);<|fim▁hole|> done();
});
});
});
describe('#hashKey', () => {
it('returns the hash key name', () => {
expect(ClickReport.hashKey).to.equal(clickReportHashKey);
});
});
describe('#rangeKey', () => {
it('returns the range key name', () => {
expect(ClickReport.rangeKey).to.equal(clickReportRangeKey);
});
});
after(() => {
ClickReport._client.restore();
tNameStub.restore();
});
});<|fim▁end|> | expect(args[1]).to.have.property('TableName', tableName); |
<|file_name|>UnitAuraProcHandler.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2011 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "Common.h"
#include "Log.h"
#include "ObjectMgr.h"
#include "SpellMgr.h"
#include "Player.h"
#include "Unit.h"
#include "Spell.h"
#include "SpellAuras.h"
#include "Totem.h"
#include "Creature.h"
#include "Formulas.h"
#include "CreatureAI.h"
#include "Util.h"
pAuraProcHandler AuraProcHandler[TOTAL_AURAS]=
{
&Unit::HandleNULLProc, // 0 SPELL_AURA_NONE
&Unit::HandleNULLProc, // 1 SPELL_AURA_BIND_SIGHT
&Unit::HandleNULLProc, // 2 SPELL_AURA_MOD_POSSESS
&Unit::HandleNULLProc, // 3 SPELL_AURA_PERIODIC_DAMAGE
&Unit::HandleDummyAuraProc, // 4 SPELL_AURA_DUMMY
&Unit::HandleRemoveByDamageProc, // 5 SPELL_AURA_MOD_CONFUSE
&Unit::HandleNULLProc, // 6 SPELL_AURA_MOD_CHARM
&Unit::HandleRemoveByDamageChanceProc, // 7 SPELL_AURA_MOD_FEAR
&Unit::HandleNULLProc, // 8 SPELL_AURA_PERIODIC_HEAL
&Unit::HandleNULLProc, // 9 SPELL_AURA_MOD_ATTACKSPEED
&Unit::HandleNULLProc, // 10 SPELL_AURA_MOD_THREAT
&Unit::HandleNULLProc, // 11 SPELL_AURA_MOD_TAUNT
&Unit::HandleNULLProc, // 12 SPELL_AURA_MOD_STUN
&Unit::HandleNULLProc, // 13 SPELL_AURA_MOD_DAMAGE_DONE
&Unit::HandleNULLProc, // 14 SPELL_AURA_MOD_DAMAGE_TAKEN
&Unit::HandleNULLProc, // 15 SPELL_AURA_DAMAGE_SHIELD
&Unit::HandleRemoveByDamageProc, // 16 SPELL_AURA_MOD_STEALTH
&Unit::HandleNULLProc, // 17 SPELL_AURA_MOD_STEALTH_DETECT
&Unit::HandleRemoveByDamageProc, // 18 SPELL_AURA_MOD_INVISIBILITY
&Unit::HandleNULLProc, // 19 SPELL_AURA_MOD_INVISIBILITY_DETECTION
&Unit::HandleNULLProc, // 20 SPELL_AURA_OBS_MOD_HEALTH
&Unit::HandleNULLProc, // 21 SPELL_AURA_OBS_MOD_MANA
&Unit::HandleNULLProc, // 22 SPELL_AURA_MOD_RESISTANCE
&Unit::HandleNULLProc, // 23 SPELL_AURA_PERIODIC_TRIGGER_SPELL
&Unit::HandleNULLProc, // 24 SPELL_AURA_PERIODIC_ENERGIZE
&Unit::HandleNULLProc, // 25 SPELL_AURA_MOD_PACIFY
&Unit::HandleRemoveByDamageChanceProc, // 26 SPELL_AURA_MOD_ROOT
&Unit::HandleNULLProc, // 27 SPELL_AURA_MOD_SILENCE
&Unit::HandleNULLProc, // 28 SPELL_AURA_REFLECT_SPELLS
&Unit::HandleNULLProc, // 29 SPELL_AURA_MOD_STAT
&Unit::HandleNULLProc, // 30 SPELL_AURA_MOD_SKILL
&Unit::HandleNULLProc, // 31 SPELL_AURA_MOD_INCREASE_SPEED
&Unit::HandleNULLProc, // 32 SPELL_AURA_MOD_INCREASE_MOUNTED_SPEED
&Unit::HandleNULLProc, // 33 SPELL_AURA_MOD_DECREASE_SPEED
&Unit::HandleNULLProc, // 34 SPELL_AURA_MOD_INCREASE_HEALTH
&Unit::HandleNULLProc, // 35 SPELL_AURA_MOD_INCREASE_ENERGY
&Unit::HandleNULLProc, // 36 SPELL_AURA_MOD_SHAPESHIFT
&Unit::HandleNULLProc, // 37 SPELL_AURA_EFFECT_IMMUNITY
&Unit::HandleNULLProc, // 38 SPELL_AURA_STATE_IMMUNITY
&Unit::HandleNULLProc, // 39 SPELL_AURA_SCHOOL_IMMUNITY
&Unit::HandleNULLProc, // 40 SPELL_AURA_DAMAGE_IMMUNITY
&Unit::HandleNULLProc, // 41 SPELL_AURA_DISPEL_IMMUNITY
&Unit::HandleProcTriggerSpellAuraProc, // 42 SPELL_AURA_PROC_TRIGGER_SPELL
&Unit::HandleProcTriggerDamageAuraProc, // 43 SPELL_AURA_PROC_TRIGGER_DAMAGE
&Unit::HandleNULLProc, // 44 SPELL_AURA_TRACK_CREATURES
&Unit::HandleNULLProc, // 45 SPELL_AURA_TRACK_RESOURCES
&Unit::HandleNULLProc, // 46 SPELL_AURA_46 (used in test spells 54054 and 54058, and spell 48050) (3.0.8a-3.2.2a)
&Unit::HandleNULLProc, // 47 SPELL_AURA_MOD_PARRY_PERCENT
&Unit::HandleNULLProc, // 48 SPELL_AURA_48 spell Napalm (area damage spell with additional delayed damage effect)
&Unit::HandleNULLProc, // 49 SPELL_AURA_MOD_DODGE_PERCENT
&Unit::HandleNULLProc, // 50 SPELL_AURA_MOD_CRITICAL_HEALING_AMOUNT
&Unit::HandleNULLProc, // 51 SPELL_AURA_MOD_BLOCK_PERCENT
&Unit::HandleNULLProc, // 52 SPELL_AURA_MOD_CRIT_PERCENT
&Unit::HandleNULLProc, // 53 SPELL_AURA_PERIODIC_LEECH
&Unit::HandleNULLProc, // 54 SPELL_AURA_MOD_HIT_CHANCE
&Unit::HandleNULLProc, // 55 SPELL_AURA_MOD_SPELL_HIT_CHANCE
&Unit::HandleNULLProc, // 56 SPELL_AURA_TRANSFORM
&Unit::HandleSpellCritChanceAuraProc, // 57 SPELL_AURA_MOD_SPELL_CRIT_CHANCE
&Unit::HandleNULLProc, // 58 SPELL_AURA_MOD_INCREASE_SWIM_SPEED
&Unit::HandleNULLProc, // 59 SPELL_AURA_MOD_DAMAGE_DONE_CREATURE
&Unit::HandleRemoveByDamageChanceProc, // 60 SPELL_AURA_MOD_PACIFY_SILENCE
&Unit::HandleNULLProc, // 61 SPELL_AURA_MOD_SCALE
&Unit::HandleNULLProc, // 62 SPELL_AURA_PERIODIC_HEALTH_FUNNEL
&Unit::HandleNULLProc, // 63 unused (3.0.8a-3.2.2a) old SPELL_AURA_PERIODIC_MANA_FUNNEL
&Unit::HandleNULLProc, // 64 SPELL_AURA_PERIODIC_MANA_LEECH
&Unit::HandleModCastingSpeedNotStackAuraProc, // 65 SPELL_AURA_MOD_CASTING_SPEED_NOT_STACK
&Unit::HandleNULLProc, // 66 SPELL_AURA_FEIGN_DEATH
&Unit::HandleNULLProc, // 67 SPELL_AURA_MOD_DISARM
&Unit::HandleNULLProc, // 68 SPELL_AURA_MOD_STALKED
&Unit::HandleNULLProc, // 69 SPELL_AURA_SCHOOL_ABSORB
&Unit::HandleNULLProc, // 70 SPELL_AURA_EXTRA_ATTACKS Useless, used by only one spell 41560 that has only visual effect (3.2.2a)
&Unit::HandleNULLProc, // 71 SPELL_AURA_MOD_SPELL_CRIT_CHANCE_SCHOOL
&Unit::HandleModPowerCostSchoolAuraProc, // 72 SPELL_AURA_MOD_POWER_COST_SCHOOL_PCT
&Unit::HandleModPowerCostSchoolAuraProc, // 73 SPELL_AURA_MOD_POWER_COST_SCHOOL
&Unit::HandleReflectSpellsSchoolAuraProc, // 74 SPELL_AURA_REFLECT_SPELLS_SCHOOL
&Unit::HandleNULLProc, // 75 SPELL_AURA_MOD_LANGUAGE
&Unit::HandleNULLProc, // 76 SPELL_AURA_FAR_SIGHT
&Unit::HandleMechanicImmuneResistanceAuraProc, // 77 SPELL_AURA_MECHANIC_IMMUNITY
&Unit::HandleNULLProc, // 78 SPELL_AURA_MOUNTED
&Unit::HandleModDamagePercentDoneAuraProc, // 79 SPELL_AURA_MOD_DAMAGE_PERCENT_DONE
&Unit::HandleNULLProc, // 80 SPELL_AURA_MOD_PERCENT_STAT
&Unit::HandleNULLProc, // 81 SPELL_AURA_SPLIT_DAMAGE_PCT
&Unit::HandleNULLProc, // 82 SPELL_AURA_WATER_BREATHING
&Unit::HandleNULLProc, // 83 SPELL_AURA_MOD_BASE_RESISTANCE
&Unit::HandleNULLProc, // 84 SPELL_AURA_MOD_REGEN
&Unit::HandleCantTrigger, // 85 SPELL_AURA_MOD_POWER_REGEN
&Unit::HandleNULLProc, // 86 SPELL_AURA_CHANNEL_DEATH_ITEM
&Unit::HandleNULLProc, // 87 SPELL_AURA_MOD_DAMAGE_PERCENT_TAKEN
&Unit::HandleNULLProc, // 88 SPELL_AURA_MOD_HEALTH_REGEN_PERCENT
&Unit::HandleNULLProc, // 89 SPELL_AURA_PERIODIC_DAMAGE_PERCENT
&Unit::HandleNULLProc, // 90 unused (3.0.8a-3.2.2a) old SPELL_AURA_MOD_RESIST_CHANCE
&Unit::HandleNULLProc, // 91 SPELL_AURA_MOD_DETECT_RANGE
&Unit::HandleNULLProc, // 92 SPELL_AURA_PREVENTS_FLEEING
&Unit::HandleNULLProc, // 93 SPELL_AURA_MOD_UNATTACKABLE
&Unit::HandleNULLProc, // 94 SPELL_AURA_INTERRUPT_REGEN
&Unit::HandleNULLProc, // 95 SPELL_AURA_GHOST
&Unit::HandleNULLProc, // 96 SPELL_AURA_SPELL_MAGNET
&Unit::HandleNULLProc, // 97 SPELL_AURA_MANA_SHIELD
&Unit::HandleNULLProc, // 98 SPELL_AURA_MOD_SKILL_TALENT
&Unit::HandleNULLProc, // 99 SPELL_AURA_MOD_ATTACK_POWER
&Unit::HandleNULLProc, //100 SPELL_AURA_AURAS_VISIBLE obsolete 3.x? all player can see all auras now, but still have 2 spells including GM-spell (1852,2855)
&Unit::HandleNULLProc, //101 SPELL_AURA_MOD_RESISTANCE_PCT
&Unit::HandleNULLProc, //102 SPELL_AURA_MOD_MELEE_ATTACK_POWER_VERSUS
&Unit::HandleNULLProc, //103 SPELL_AURA_MOD_TOTAL_THREAT
&Unit::HandleNULLProc, //104 SPELL_AURA_WATER_WALK
&Unit::HandleNULLProc, //105 SPELL_AURA_FEATHER_FALL
&Unit::HandleNULLProc, //106 SPELL_AURA_HOVER
&Unit::HandleAddFlatModifierAuraProc, //107 SPELL_AURA_ADD_FLAT_MODIFIER
&Unit::HandleAddPctModifierAuraProc, //108 SPELL_AURA_ADD_PCT_MODIFIER
&Unit::HandleNULLProc, //109 SPELL_AURA_ADD_TARGET_TRIGGER
&Unit::HandleNULLProc, //110 SPELL_AURA_MOD_POWER_REGEN_PERCENT
&Unit::HandleNULLProc, //111 SPELL_AURA_ADD_CASTER_HIT_TRIGGER
&Unit::HandleOverrideClassScriptAuraProc, //112 SPELL_AURA_OVERRIDE_CLASS_SCRIPTS
&Unit::HandleNULLProc, //113 SPELL_AURA_MOD_RANGED_DAMAGE_TAKEN
&Unit::HandleNULLProc, //114 SPELL_AURA_MOD_RANGED_DAMAGE_TAKEN_PCT
&Unit::HandleNULLProc, //115 SPELL_AURA_MOD_HEALING
&Unit::HandleNULLProc, //116 SPELL_AURA_MOD_REGEN_DURING_COMBAT
&Unit::HandleMechanicImmuneResistanceAuraProc, //117 SPELL_AURA_MOD_MECHANIC_RESISTANCE
&Unit::HandleNULLProc, //118 SPELL_AURA_MOD_HEALING_PCT
&Unit::HandleNULLProc, //119 unused (3.0.8a-3.2.2a) old SPELL_AURA_SHARE_PET_TRACKING
&Unit::HandleNULLProc, //120 SPELL_AURA_UNTRACKABLE
&Unit::HandleNULLProc, //121 SPELL_AURA_EMPATHY
&Unit::HandleNULLProc, //122 SPELL_AURA_MOD_OFFHAND_DAMAGE_PCT
&Unit::HandleNULLProc, //123 SPELL_AURA_MOD_TARGET_RESISTANCE
&Unit::HandleNULLProc, //124 SPELL_AURA_MOD_RANGED_ATTACK_POWER
&Unit::HandleNULLProc, //125 SPELL_AURA_MOD_MELEE_DAMAGE_TAKEN
&Unit::HandleNULLProc, //126 SPELL_AURA_MOD_MELEE_DAMAGE_TAKEN_PCT
&Unit::HandleNULLProc, //127 SPELL_AURA_RANGED_ATTACK_POWER_ATTACKER_BONUS
&Unit::HandleNULLProc, //128 SPELL_AURA_MOD_POSSESS_PET
&Unit::HandleNULLProc, //129 SPELL_AURA_MOD_SPEED_ALWAYS
&Unit::HandleNULLProc, //130 SPELL_AURA_MOD_MOUNTED_SPEED_ALWAYS
&Unit::HandleNULLProc, //131 SPELL_AURA_MOD_RANGED_ATTACK_POWER_VERSUS
&Unit::HandleNULLProc, //132 SPELL_AURA_MOD_INCREASE_ENERGY_PERCENT
&Unit::HandleNULLProc, //133 SPELL_AURA_MOD_INCREASE_HEALTH_PERCENT
&Unit::HandleNULLProc, //134 SPELL_AURA_MOD_MANA_REGEN_INTERRUPT
&Unit::HandleNULLProc, //135 SPELL_AURA_MOD_HEALING_DONE
&Unit::HandleNULLProc, //136 SPELL_AURA_MOD_HEALING_DONE_PERCENT
&Unit::HandleNULLProc, //137 SPELL_AURA_MOD_TOTAL_STAT_PERCENTAGE
&Unit::HandleHasteAuraProc, //138 SPELL_AURA_MOD_MELEE_HASTE
&Unit::HandleNULLProc, //139 SPELL_AURA_FORCE_REACTION
&Unit::HandleNULLProc, //140 SPELL_AURA_MOD_RANGED_HASTE
&Unit::HandleNULLProc, //141 SPELL_AURA_MOD_RANGED_AMMO_HASTE
&Unit::HandleNULLProc, //142 SPELL_AURA_MOD_BASE_RESISTANCE_PCT
&Unit::HandleNULLProc, //143 SPELL_AURA_MOD_RESISTANCE_EXCLUSIVE
&Unit::HandleNULLProc, //144 SPELL_AURA_SAFE_FALL
&Unit::HandleNULLProc, //145 SPELL_AURA_MOD_PET_TALENT_POINTS
&Unit::HandleNULLProc, //146 SPELL_AURA_ALLOW_TAME_PET_TYPE
&Unit::HandleNULLProc, //147 SPELL_AURA_MECHANIC_IMMUNITY_MASK
&Unit::HandleNULLProc, //148 SPELL_AURA_RETAIN_COMBO_POINTS
&Unit::HandleCantTrigger, //149 SPELL_AURA_REDUCE_PUSHBACK
&Unit::HandleNULLProc, //150 SPELL_AURA_MOD_SHIELD_BLOCKVALUE_PCT
&Unit::HandleNULLProc, //151 SPELL_AURA_TRACK_STEALTHED
&Unit::HandleNULLProc, //152 SPELL_AURA_MOD_DETECTED_RANGE
&Unit::HandleNULLProc, //153 SPELL_AURA_SPLIT_DAMAGE_FLAT
&Unit::HandleNULLProc, //154 SPELL_AURA_MOD_STEALTH_LEVEL
&Unit::HandleNULLProc, //155 SPELL_AURA_MOD_WATER_BREATHING
&Unit::HandleNULLProc, //156 SPELL_AURA_MOD_REPUTATION_GAIN
&Unit::HandleNULLProc, //157 SPELL_AURA_PET_DAMAGE_MULTI (single test like spell 20782, also single for 214 aura)
&Unit::HandleNULLProc, //158 SPELL_AURA_MOD_SHIELD_BLOCKVALUE
&Unit::HandleNULLProc, //159 SPELL_AURA_NO_PVP_CREDIT
&Unit::HandleNULLProc, //160 SPELL_AURA_MOD_AOE_AVOIDANCE
&Unit::HandleNULLProc, //161 SPELL_AURA_MOD_HEALTH_REGEN_IN_COMBAT
&Unit::HandleNULLProc, //162 SPELL_AURA_POWER_BURN_MANA
&Unit::HandleNULLProc, //163 SPELL_AURA_MOD_CRIT_DAMAGE_BONUS
&Unit::HandleNULLProc, //164 unused (3.0.8a-3.2.2a), only one test spell 10654
&Unit::HandleNULLProc, //165 SPELL_AURA_MELEE_ATTACK_POWER_ATTACKER_BONUS
&Unit::HandleNULLProc, //166 SPELL_AURA_MOD_ATTACK_POWER_PCT
&Unit::HandleNULLProc, //167 SPELL_AURA_MOD_RANGED_ATTACK_POWER_PCT
&Unit::HandleNULLProc, //168 SPELL_AURA_MOD_DAMAGE_DONE_VERSUS
&Unit::HandleNULLProc, //169 SPELL_AURA_MOD_CRIT_PERCENT_VERSUS
&Unit::HandleNULLProc, //170 SPELL_AURA_DETECT_AMORE different spells that ignore transformation effects
&Unit::HandleNULLProc, //171 SPELL_AURA_MOD_SPEED_NOT_STACK
&Unit::HandleNULLProc, //172 SPELL_AURA_MOD_MOUNTED_SPEED_NOT_STACK
&Unit::HandleNULLProc, //173 unused (3.0.8a-3.2.2a) no spells, old SPELL_AURA_ALLOW_CHAMPION_SPELLS only for Proclaim Champion spell
&Unit::HandleNULLProc, //174 SPELL_AURA_MOD_SPELL_DAMAGE_OF_STAT_PERCENT
&Unit::HandleNULLProc, //175 SPELL_AURA_MOD_SPELL_HEALING_OF_STAT_PERCENT
&Unit::HandleNULLProc, //176 SPELL_AURA_SPIRIT_OF_REDEMPTION only for Spirit of Redemption spell, die at aura end
&Unit::HandleNULLProc, //177 SPELL_AURA_AOE_CHARM (22 spells)
&Unit::HandleNULLProc, //178 SPELL_AURA_MOD_DEBUFF_RESISTANCE
&Unit::HandleNULLProc, //179 SPELL_AURA_MOD_ATTACKER_SPELL_CRIT_CHANCE
&Unit::HandleNULLProc, //180 SPELL_AURA_MOD_FLAT_SPELL_DAMAGE_VERSUS
&Unit::HandleNULLProc, //181 unused (3.0.8a-3.2.2a) old SPELL_AURA_MOD_FLAT_SPELL_CRIT_DAMAGE_VERSUS
&Unit::HandleNULLProc, //182 SPELL_AURA_MOD_RESISTANCE_OF_STAT_PERCENT
&Unit::HandleNULLProc, //183 SPELL_AURA_MOD_CRITICAL_THREAT only used in 28746
&Unit::HandleNULLProc, //184 SPELL_AURA_MOD_ATTACKER_MELEE_HIT_CHANCE
&Unit::HandleNULLProc, //185 SPELL_AURA_MOD_ATTACKER_RANGED_HIT_CHANCE
&Unit::HandleNULLProc, //186 SPELL_AURA_MOD_ATTACKER_SPELL_HIT_CHANCE
&Unit::HandleNULLProc, //187 SPELL_AURA_MOD_ATTACKER_MELEE_CRIT_CHANCE
&Unit::HandleNULLProc, //188 SPELL_AURA_MOD_ATTACKER_RANGED_CRIT_CHANCE
&Unit::HandleModRating, //189 SPELL_AURA_MOD_RATING
&Unit::HandleNULLProc, //190 SPELL_AURA_MOD_FACTION_REPUTATION_GAIN
&Unit::HandleNULLProc, //191 SPELL_AURA_USE_NORMAL_MOVEMENT_SPEED
&Unit::HandleNULLProc, //192 SPELL_AURA_HASTE_MELEE
&Unit::HandleNULLProc, //193 SPELL_AURA_HASTE_ALL (in fact combat (any type attack) speed pct)
&Unit::HandleNULLProc, //194 SPELL_AURA_MOD_IGNORE_ABSORB_SCHOOL
&Unit::HandleNULLProc, //195 SPELL_AURA_MOD_IGNORE_ABSORB_FOR_SPELL
&Unit::HandleNULLProc, //196 SPELL_AURA_MOD_COOLDOWN (single spell 24818 in 3.2.2a)
&Unit::HandleNULLProc, //197 SPELL_AURA_MOD_ATTACKER_SPELL_AND_WEAPON_CRIT_CHANCEe
&Unit::HandleNULLProc, //198 unused (3.0.8a-3.2.2a) old SPELL_AURA_MOD_ALL_WEAPON_SKILLS
&Unit::HandleNULLProc, //199 SPELL_AURA_MOD_INCREASES_SPELL_PCT_TO_HIT
&Unit::HandleNULLProc, //200 SPELL_AURA_MOD_KILL_XP_PCT
&Unit::HandleNULLProc, //201 SPELL_AURA_FLY this aura enable flight mode...
&Unit::HandleNULLProc, //202 SPELL_AURA_CANNOT_BE_DODGED
&Unit::HandleNULLProc, //203 SPELL_AURA_MOD_ATTACKER_MELEE_CRIT_DAMAGE
&Unit::HandleNULLProc, //204 SPELL_AURA_MOD_ATTACKER_RANGED_CRIT_DAMAGE
&Unit::HandleNULLProc, //205 SPELL_AURA_MOD_ATTACKER_SPELL_CRIT_DAMAGE
&Unit::HandleNULLProc, //206 SPELL_AURA_MOD_FLIGHT_SPEED
&Unit::HandleNULLProc, //207 SPELL_AURA_MOD_FLIGHT_SPEED_MOUNTED
&Unit::HandleNULLProc, //208 SPELL_AURA_MOD_FLIGHT_SPEED_STACKING
&Unit::HandleNULLProc, //209 SPELL_AURA_MOD_FLIGHT_SPEED_MOUNTED_STACKING
&Unit::HandleNULLProc, //210 SPELL_AURA_MOD_FLIGHT_SPEED_NOT_STACKING
&Unit::HandleNULLProc, //211 SPELL_AURA_MOD_FLIGHT_SPEED_MOUNTED_NOT_STACKING
&Unit::HandleNULLProc, //212 SPELL_AURA_MOD_RANGED_ATTACK_POWER_OF_STAT_PERCENT
&Unit::HandleNULLProc, //213 SPELL_AURA_MOD_RAGE_FROM_DAMAGE_DEALT implemented in Player::RewardRage
&Unit::HandleNULLProc, //214 Tamed Pet Passive (single test like spell 20782, also single for 157 aura)
&Unit::HandleNULLProc, //215 SPELL_AURA_ARENA_PREPARATION
&Unit::HandleNULLProc, //216 SPELL_AURA_HASTE_SPELLS
&Unit::HandleNULLProc, //217 unused (3.0.8a-3.2.2a)
&Unit::HandleNULLProc, //218 SPELL_AURA_HASTE_RANGED
&Unit::HandleNULLProc, //219 SPELL_AURA_MOD_MANA_REGEN_FROM_STAT
&Unit::HandleNULLProc, //220 SPELL_AURA_MOD_RATING_FROM_STAT
&Unit::HandleNULLProc, //221 ignored
&Unit::HandleNULLProc, //222 unused (3.0.8a-3.2.2a) only for spell 44586 that not used in real spell cast
&Unit::HandleNULLProc, //223 dummy code (cast damage spell to attacker) and another dymmy (jump to another nearby raid member)
&Unit::HandleNULLProc, //224 unused (3.0.8a-3.2.2a)
&Unit::HandleMendingAuraProc, //225 SPELL_AURA_PRAYER_OF_MENDING
&Unit::HandlePeriodicDummyAuraProc, //226 SPELL_AURA_PERIODIC_DUMMY
&Unit::HandleNULLProc, //227 SPELL_AURA_PERIODIC_TRIGGER_SPELL_WITH_VALUE
&Unit::HandleNULLProc, //228 SPELL_AURA_DETECT_STEALTH
&Unit::HandleNULLProc, //229 SPELL_AURA_MOD_AOE_DAMAGE_AVOIDANCE
&Unit::HandleNULLProc, //230 Commanding Shout
&Unit::HandleProcTriggerSpellAuraProc, //231 SPELL_AURA_PROC_TRIGGER_SPELL_WITH_VALUE
&Unit::HandleNULLProc, //232 SPELL_AURA_MECHANIC_DURATION_MOD
&Unit::HandleNULLProc, //233 set model id to the one of the creature with id m_modifier.m_miscvalue
&Unit::HandleNULLProc, //234 SPELL_AURA_MECHANIC_DURATION_MOD_NOT_STACK
&Unit::HandleNULLProc, //235 SPELL_AURA_MOD_DISPEL_RESIST
&Unit::HandleNULLProc, //236 SPELL_AURA_CONTROL_VEHICLE
&Unit::HandleNULLProc, //237 SPELL_AURA_MOD_SPELL_DAMAGE_OF_ATTACK_POWER
&Unit::HandleNULLProc, //238 SPELL_AURA_MOD_SPELL_HEALING_OF_ATTACK_POWER
&Unit::HandleNULLProc, //239 SPELL_AURA_MOD_SCALE_2 only in Noggenfogger Elixir (16595) before 2.3.0 aura 61
&Unit::HandleNULLProc, //240 SPELL_AURA_MOD_EXPERTISE
&Unit::HandleNULLProc, //241 Forces the player to move forward
&Unit::HandleNULLProc, //242 SPELL_AURA_MOD_SPELL_DAMAGE_FROM_HEALING (only 2 test spels in 3.2.2a)
&Unit::HandleNULLProc, //243 faction reaction override spells
&Unit::HandleNULLProc, //244 SPELL_AURA_COMPREHEND_LANGUAGE
&Unit::HandleNULLProc, //245 SPELL_AURA_MOD_DURATION_OF_MAGIC_EFFECTS
&Unit::HandleNULLProc, //246 SPELL_AURA_MOD_DURATION_OF_EFFECTS_BY_DISPEL
&Unit::HandleNULLProc, //247 target to become a clone of the caster
&Unit::HandleNULLProc, //248 SPELL_AURA_MOD_COMBAT_RESULT_CHANCE
&Unit::HandleNULLProc, //249 SPELL_AURA_CONVERT_RUNE
&Unit::HandleNULLProc, //250 SPELL_AURA_MOD_INCREASE_HEALTH_2
&Unit::HandleNULLProc, //251 SPELL_AURA_MOD_ENEMY_DODGE
&Unit::HandleNULLProc, //252 SPELL_AURA_SLOW_ALL
&Unit::HandleNULLProc, //253 SPELL_AURA_MOD_BLOCK_CRIT_CHANCE
&Unit::HandleNULLProc, //254 SPELL_AURA_MOD_DISARM_SHIELD disarm Shield
&Unit::HandleNULLProc, //255 SPELL_AURA_MOD_MECHANIC_DAMAGE_TAKEN_PERCENT
&Unit::HandleNULLProc, //256 SPELL_AURA_NO_REAGENT_USE Use SpellClassMask for spell select
&Unit::HandleNULLProc, //257 SPELL_AURA_MOD_TARGET_RESIST_BY_SPELL_CLASS Use SpellClassMask for spell select
&Unit::HandleNULLProc, //258 SPELL_AURA_MOD_SPELL_VISUAL
&Unit::HandleNULLProc, //259 corrupt healing over time spell
&Unit::HandleNULLProc, //260 SPELL_AURA_SCREEN_EFFECT (miscvalue = id in ScreenEffect.dbc) not required any code
&Unit::HandleNULLProc, //261 SPELL_AURA_PHASE undetectable invisibility?
&Unit::HandleNULLProc, //262 SPELL_AURA_IGNORE_UNIT_STATE
&Unit::HandleNULLProc, //263 SPELL_AURA_ALLOW_ONLY_ABILITY player can use only abilities set in SpellClassMask
&Unit::HandleNULLProc, //264 unused (3.0.8a-3.2.2a)
&Unit::HandleNULLProc, //265 unused (3.0.8a-3.2.2a)
&Unit::HandleNULLProc, //266 unused (3.0.8a-3.2.2a)
&Unit::HandleNULLProc, //267 SPELL_AURA_MOD_IMMUNE_AURA_APPLY_SCHOOL
&Unit::HandleNULLProc, //268 SPELL_AURA_MOD_ATTACK_POWER_OF_STAT_PERCENT
&Unit::HandleNULLProc, //269 SPELL_AURA_MOD_IGNORE_DAMAGE_REDUCTION_SCHOOL
&Unit::HandleNULLProc, //270 SPELL_AURA_MOD_IGNORE_TARGET_RESIST (unused in 3.2.2a)
&Unit::HandleModDamageFromCasterAuraProc, //271 SPELL_AURA_MOD_DAMAGE_FROM_CASTER
&Unit::HandleNULLProc, //272 SPELL_AURA_MAELSTROM_WEAPON (unclear use for aura, it used in (3.2.2a...3.3.0) in single spell 53817 that spellmode stacked and charged spell expected to be drop as stack
&Unit::HandleNULLProc, //273 SPELL_AURA_X_RAY (client side implementation)
&Unit::HandleNULLProc, //274 proc free shot?
&Unit::HandleNULLProc, //275 SPELL_AURA_MOD_IGNORE_SHAPESHIFT Use SpellClassMask for spell select
&Unit::HandleNULLProc, //276 mod damage % mechanic?
&Unit::HandleNULLProc, //277 SPELL_AURA_MOD_MAX_AFFECTED_TARGETS Use SpellClassMask for spell select
&Unit::HandleNULLProc, //278 SPELL_AURA_MOD_DISARM_RANGED disarm ranged weapon
&Unit::HandleNULLProc, //279 visual effects? 58836 and 57507
&Unit::HandleNULLProc, //280 SPELL_AURA_MOD_TARGET_ARMOR_PCT
&Unit::HandleNULLProc, //281 SPELL_AURA_MOD_HONOR_GAIN
&Unit::HandleNULLProc, //282 SPELL_AURA_INCREASE_BASE_HEALTH_PERCENT
&Unit::HandleNULLProc, //283 SPELL_AURA_MOD_HEALING_RECEIVED
&Unit::HandleNULLProc, //284 51 spells
&Unit::HandleNULLProc, //285 SPELL_AURA_MOD_ATTACK_POWER_OF_ARMOR
&Unit::HandleNULLProc, //286 SPELL_AURA_ABILITY_PERIODIC_CRIT
&Unit::HandleNULLProc, //287 SPELL_AURA_DEFLECT_SPELLS
&Unit::HandleNULLProc, //288 increase parry/deflect, prevent attack (single spell used 67801)
&Unit::HandleNULLProc, //289 unused (3.2.2a)
&Unit::HandleNULLProc, //290 SPELL_AURA_MOD_ALL_CRIT_CHANCE
&Unit::HandleNULLProc, //291 SPELL_AURA_MOD_QUEST_XP_PCT
&Unit::HandleNULLProc, //292 call stabled pet
&Unit::HandleNULLProc, //293 3 spells
&Unit::HandleNULLProc, //294 2 spells, possible prevent mana regen
&Unit::HandleNULLProc, //295 unused (3.2.2a)
&Unit::HandleNULLProc, //296 2 spells
&Unit::HandleNULLProc, //297 1 spell (counter spell school?)
&Unit::HandleNULLProc, //298 unused (3.2.2a)
&Unit::HandleNULLProc, //299 unused (3.2.2a)
&Unit::HandleNULLProc, //300 3 spells (share damage?)
&Unit::HandleNULLProc, //301 5 spells
&Unit::HandleNULLProc, //302 unused (3.2.2a)
&Unit::HandleNULLProc, //303 17 spells
&Unit::HandleNULLProc, //304 2 spells (alcohol effect?)
&Unit::HandleNULLProc, //305 SPELL_AURA_MOD_MINIMUM_SPEED
&Unit::HandleNULLProc, //306 1 spell
&Unit::HandleNULLProc, //307 absorb healing?
&Unit::HandleNULLProc, //308 new aura for hunter traps
&Unit::HandleNULLProc, //309 absorb healing?
&Unit::HandleNULLProc, //310 pet avoidance passive?
&Unit::HandleNULLProc, //311 0 spells in 3.3
&Unit::HandleNULLProc, //312 0 spells in 3.3
&Unit::HandleNULLProc, //313 0 spells in 3.3
&Unit::HandleNULLProc, //314 1 test spell (reduce duration of silince/magic)
&Unit::HandleNULLProc, //315 underwater walking
&Unit::HandleNULLProc //316 makes haste affect HOT/DOT ticks
};
bool Unit::IsTriggeredAtSpellProcEvent(Unit *pVictim, SpellAuraHolder* holder, SpellEntry const* procSpell, uint32 procFlag, uint32 procExtra, WeaponAttackType attType, bool isVictim, SpellProcEventEntry const*& spellProcEvent )
{
SpellEntry const* spellProto = holder->GetSpellProto ();
// Get proc Event Entry
spellProcEvent = sSpellMgr.GetSpellProcEvent(spellProto->Id);
// Get EventProcFlag
uint32 EventProcFlag;
if (spellProcEvent && spellProcEvent->procFlags) // if exist get custom spellProcEvent->procFlags
EventProcFlag = spellProcEvent->procFlags;
else
EventProcFlag = spellProto->procFlags; // else get from spell proto
// Continue if no trigger exist
if (!EventProcFlag)
return false;
// Check spellProcEvent data requirements
if(!SpellMgr::IsSpellProcEventCanTriggeredBy(spellProcEvent, EventProcFlag, procSpell, procFlag, procExtra))
return false;
// In most cases req get honor or XP from kill
if (EventProcFlag & PROC_FLAG_KILL && GetTypeId() == TYPEID_PLAYER)
{
bool allow = ((Player*)this)->isHonorOrXPTarget(pVictim);
// Shadow Word: Death - can trigger from every kill
if (holder->GetId() == 32409)
allow = true;
if (!allow)
return false;
}
// Aura added by spell can`t trigger from self (prevent drop charges/do triggers)
// But except periodic triggers (can triggered from self)
if(procSpell && procSpell->Id == spellProto->Id && !(spellProto->procFlags & PROC_FLAG_ON_TAKE_PERIODIC))
return false;
// Check if current equipment allows aura to proc
if(!isVictim && GetTypeId() == TYPEID_PLAYER)
{
if(spellProto->EquippedItemClass == ITEM_CLASS_WEAPON)
{
Item *item = NULL;
if(attType == BASE_ATTACK)
item = ((Player*)this)->GetItemByPos(INVENTORY_SLOT_BAG_0, EQUIPMENT_SLOT_MAINHAND);
else if (attType == OFF_ATTACK)
item = ((Player*)this)->GetItemByPos(INVENTORY_SLOT_BAG_0, EQUIPMENT_SLOT_OFFHAND);
else
item = ((Player*)this)->GetItemByPos(INVENTORY_SLOT_BAG_0, EQUIPMENT_SLOT_RANGED);
if(!item || item->IsBroken() || item->GetProto()->Class != ITEM_CLASS_WEAPON || !((1<<item->GetProto()->SubClass) & spellProto->EquippedItemSubClassMask))
return false;
}
else if(spellProto->EquippedItemClass == ITEM_CLASS_ARMOR)
{
// Check if player is wearing shield
Item *item = ((Player*)this)->GetItemByPos(INVENTORY_SLOT_BAG_0, EQUIPMENT_SLOT_OFFHAND);
if(!item || item->IsBroken() || !CanUseEquippedWeapon(OFF_ATTACK) || item->GetProto()->Class != ITEM_CLASS_ARMOR || !((1<<item->GetProto()->SubClass) & spellProto->EquippedItemSubClassMask))
return false;
}
}
// Get chance from spell
float chance = (float)spellProto->procChance;
// If in spellProcEvent exist custom chance, chance = spellProcEvent->customChance;
if(spellProcEvent && spellProcEvent->customChance)
chance = spellProcEvent->customChance;
// If PPM exist calculate chance from PPM
if(!isVictim && spellProcEvent && spellProcEvent->ppmRate != 0)
{
uint32 WeaponSpeed = GetAttackTime(attType);
chance = GetPPMProcChance(WeaponSpeed, spellProcEvent->ppmRate);
}
// Apply chance modifier aura
if(Player* modOwner = GetSpellModOwner())
{
modOwner->ApplySpellMod(spellProto->Id,SPELLMOD_CHANCE_OF_SUCCESS,chance);
modOwner->ApplySpellMod(spellProto->Id,SPELLMOD_FREQUENCY_OF_SUCCESS,chance);
}
return roll_chance_f(chance);
}
SpellAuraProcResult Unit::HandleHasteAuraProc(Unit *pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const * /*procSpell*/, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 cooldown)
{
SpellEntry const *hasteSpell = triggeredByAura->GetSpellProto();
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
uint32 triggered_spell_id = 0;
Unit* target = pVictim;
int32 basepoints0 = 0;
switch(hasteSpell->SpellFamilyName)
{
case SPELLFAMILY_ROGUE:
{
switch(hasteSpell->Id)
{
// Blade Flurry
case 13877:
case 33735:
{
target = SelectRandomUnfriendlyTarget(pVictim);
if(!target)
return SPELL_AURA_PROC_FAILED;
basepoints0 = damage;
triggered_spell_id = 22482;
break;
}
}
break;
}
}
// processed charge only counting case
if(!triggered_spell_id)
return SPELL_AURA_PROC_OK;
SpellEntry const* triggerEntry = sSpellStore.LookupEntry(triggered_spell_id);
if(!triggerEntry)
{
sLog.outError("Unit::HandleHasteAuraProc: Spell %u have nonexistent triggered spell %u",hasteSpell->Id,triggered_spell_id);
return SPELL_AURA_PROC_FAILED;
}
// default case
if (!target || (target != this && !target->isAlive()))
return SPELL_AURA_PROC_FAILED;
if (cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(triggered_spell_id))
return SPELL_AURA_PROC_FAILED;
if (basepoints0)
CastCustomSpell(target,triggered_spell_id,&basepoints0,NULL,NULL,true,castItem,triggeredByAura);
else
CastSpell(target,triggered_spell_id,true,castItem,triggeredByAura);
if (cooldown && GetTypeId()==TYPEID_PLAYER)
((Player*)this)->AddSpellCooldown(triggered_spell_id,0,time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleSpellCritChanceAuraProc(Unit *pVictim, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const * procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 cooldown)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
SpellEntry const *triggeredByAuraSpell = triggeredByAura->GetSpellProto();
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
uint32 triggered_spell_id = 0;
Unit* target = pVictim;
int32 basepoints0 = 0;
switch(triggeredByAuraSpell->SpellFamilyName)
{
case SPELLFAMILY_MAGE:
{
switch(triggeredByAuraSpell->Id)
{
// Focus Magic
case 54646:
{
Unit* caster = triggeredByAura->GetCaster();
if (!caster)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 54648;
target = caster;
break;
}
}
}
}
// processed charge only counting case
if (!triggered_spell_id)
return SPELL_AURA_PROC_OK;
SpellEntry const* triggerEntry = sSpellStore.LookupEntry(triggered_spell_id);
if(!triggerEntry)
{
sLog.outError("Unit::HandleHasteAuraProc: Spell %u have nonexistent triggered spell %u",triggeredByAuraSpell->Id,triggered_spell_id);
return SPELL_AURA_PROC_FAILED;
}
// default case
if (!target || (target != this && !target->isAlive()))
return SPELL_AURA_PROC_FAILED;
if (cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(triggered_spell_id))
return SPELL_AURA_PROC_FAILED;
if (basepoints0)
CastCustomSpell(target,triggered_spell_id,&basepoints0,NULL,NULL,true,castItem,triggeredByAura);
else
CastSpell(target,triggered_spell_id,true,castItem,triggeredByAura);
if (cooldown && GetTypeId()==TYPEID_PLAYER)
((Player*)this)->AddSpellCooldown(triggered_spell_id,0,time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleDummyAuraProc(Unit *pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const * procSpell, uint32 procFlag, uint32 procEx, uint32 cooldown)
{
SpellEntry const *dummySpell = triggeredByAura->GetSpellProto ();
SpellEffectIndex effIndex = triggeredByAura->GetEffIndex();
int32 triggerAmount = triggeredByAura->GetModifier()->m_amount;
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
// some dummy spells have trigger spell in spell data already (from 3.0.3)
uint32 triggered_spell_id = dummySpell->EffectApplyAuraName[effIndex] == SPELL_AURA_DUMMY ? dummySpell->EffectTriggerSpell[effIndex] : 0;
Unit* target = pVictim;
int32 basepoints[MAX_EFFECT_INDEX] = {0, 0, 0};
ObjectGuid originalCaster = ObjectGuid();
switch(dummySpell->SpellFamilyName)
{
case SPELLFAMILY_GENERIC:
{
switch (dummySpell->Id)
{
// Eye for an Eye
case 9799:
case 25988:
{
// return damage % to attacker but < 50% own total health
basepoints[0] = triggerAmount*int32(damage)/100;
if (basepoints[0] > (int32)GetMaxHealth()/2)
basepoints[0] = (int32)GetMaxHealth()/2;
triggered_spell_id = 25997;
break;
}
// Sweeping Strikes (NPC spells may be)
case 18765:
case 35429:
{
// prevent chain of triggered spell from same triggered spell
if (procSpell && procSpell->Id == 26654)
return SPELL_AURA_PROC_FAILED;
target = SelectRandomUnfriendlyTarget(pVictim);
if(!target)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 26654;
break;
}
// Twisted Reflection (boss spell)
case 21063:
triggered_spell_id = 21064;
break;
// Unstable Power
case 24658:
{
if (!procSpell || procSpell->Id == 24659)
return SPELL_AURA_PROC_FAILED;
// Need remove one 24659 aura
RemoveAuraHolderFromStack(24659);
return SPELL_AURA_PROC_OK;
}
// Restless Strength
case 24661:
{
// Need remove one 24662 aura
RemoveAuraHolderFromStack(24662);
return SPELL_AURA_PROC_OK;
}
// Adaptive Warding (Frostfire Regalia set)
case 28764:
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
// find Mage Armor
bool found = false;
AuraList const& mRegenInterupt = GetAurasByType(SPELL_AURA_MOD_MANA_REGEN_INTERRUPT);
for(AuraList::const_iterator iter = mRegenInterupt.begin(); iter != mRegenInterupt.end(); ++iter)
{
if(SpellEntry const* iterSpellProto = (*iter)->GetSpellProto())
{
if(iterSpellProto->SpellFamilyName==SPELLFAMILY_MAGE && (iterSpellProto->SpellFamilyFlags & UI64LIT(0x10000000)))
{
found=true;
break;
}
}
}
if(!found)
return SPELL_AURA_PROC_FAILED;
switch(GetFirstSchoolInMask(GetSpellSchoolMask(procSpell)))
{
case SPELL_SCHOOL_NORMAL:
case SPELL_SCHOOL_HOLY:
return SPELL_AURA_PROC_FAILED; // ignored
case SPELL_SCHOOL_FIRE: triggered_spell_id = 28765; break;
case SPELL_SCHOOL_NATURE: triggered_spell_id = 28768; break;
case SPELL_SCHOOL_FROST: triggered_spell_id = 28766; break;
case SPELL_SCHOOL_SHADOW: triggered_spell_id = 28769; break;
case SPELL_SCHOOL_ARCANE: triggered_spell_id = 28770; break;
default:
return SPELL_AURA_PROC_FAILED;
}
target = this;
break;
}
// Obsidian Armor (Justice Bearer`s Pauldrons shoulder)
case 27539:
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
switch(GetFirstSchoolInMask(GetSpellSchoolMask(procSpell)))
{
case SPELL_SCHOOL_NORMAL:
return SPELL_AURA_PROC_FAILED; // ignore
case SPELL_SCHOOL_HOLY: triggered_spell_id = 27536; break;
case SPELL_SCHOOL_FIRE: triggered_spell_id = 27533; break;
case SPELL_SCHOOL_NATURE: triggered_spell_id = 27538; break;
case SPELL_SCHOOL_FROST: triggered_spell_id = 27534; break;
case SPELL_SCHOOL_SHADOW: triggered_spell_id = 27535; break;
case SPELL_SCHOOL_ARCANE: triggered_spell_id = 27540; break;
default:
return SPELL_AURA_PROC_FAILED;
}
target = this;
break;
}
// Mana Leech (Passive) (Priest Pet Aura)
case 28305:
{
// Cast on owner
target = GetOwner();
if(!target)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 34650;
break;
}
// Divine purpose
case 31871:
case 31872:
{
// Roll chance
if (!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
// Remove any stun effect on target
SpellAuraHolderMap& Auras = pVictim->GetSpellAuraHolderMap();
for(SpellAuraHolderMap::const_iterator iter = Auras.begin(); iter != Auras.end();)
{
SpellEntry const *spell = iter->second->GetSpellProto();
if( spell->Mechanic == MECHANIC_STUN ||
iter->second->HasMechanic(MECHANIC_STUN))
{
pVictim->RemoveAurasDueToSpell(spell->Id);
iter = Auras.begin();
}
else
++iter;
}
return SPELL_AURA_PROC_OK;
}
// Mark of Malice
case 33493:
{
// Cast finish spell at last charge
if (triggeredByAura->GetHolder()->GetAuraCharges() > 1)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 33494;
break;
}
// Vampiric Aura (boss spell)
case 38196:
{
basepoints[0] = 3 * damage; // 300%
if (basepoints[0] < 0)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 31285;
target = this;
break;
}
// Aura of Madness (Darkmoon Card: Madness trinket)
//=====================================================
// 39511 Sociopath: +35 strength (Paladin, Rogue, Druid, Warrior)
// 40997 Delusional: +70 attack power (Rogue, Hunter, Paladin, Warrior, Druid)
// 40998 Kleptomania: +35 agility (Warrior, Rogue, Paladin, Hunter, Druid)
// 40999 Megalomania: +41 damage/healing (Druid, Shaman, Priest, Warlock, Mage, Paladin)
// 41002 Paranoia: +35 spell/melee/ranged crit strike rating (All classes)
// 41005 Manic: +35 haste (spell, melee and ranged) (All classes)
// 41009 Narcissism: +35 intellect (Druid, Shaman, Priest, Warlock, Mage, Paladin, Hunter)
// 41011 Martyr Complex: +35 stamina (All classes)
// 41406 Dementia: Every 5 seconds either gives you +5% damage/healing. (Druid, Shaman, Priest, Warlock, Mage, Paladin)
// 41409 Dementia: Every 5 seconds either gives you -5% damage/healing. (Druid, Shaman, Priest, Warlock, Mage, Paladin)
case 39446:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Select class defined buff
switch (getClass())
{
case CLASS_PALADIN: // 39511,40997,40998,40999,41002,41005,41009,41011,41409
case CLASS_DRUID: // 39511,40997,40998,40999,41002,41005,41009,41011,41409
{
uint32 RandomSpell[]={39511,40997,40998,40999,41002,41005,41009,41011,41409};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_ROGUE: // 39511,40997,40998,41002,41005,41011
case CLASS_WARRIOR: // 39511,40997,40998,41002,41005,41011
{
uint32 RandomSpell[]={39511,40997,40998,41002,41005,41011};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_PRIEST: // 40999,41002,41005,41009,41011,41406,41409
case CLASS_SHAMAN: // 40999,41002,41005,41009,41011,41406,41409
case CLASS_MAGE: // 40999,41002,41005,41009,41011,41406,41409
case CLASS_WARLOCK: // 40999,41002,41005,41009,41011,41406,41409
{
uint32 RandomSpell[]={40999,41002,41005,41009,41011,41406,41409};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_HUNTER: // 40997,40999,41002,41005,41009,41011,41406,41409
{
uint32 RandomSpell[]={40997,40999,41002,41005,41009,41011,41406,41409};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
default:
return SPELL_AURA_PROC_FAILED;
}
target = this;
if (roll_chance_i(10))
((Player*)this)->Say("This is Madness!", LANG_UNIVERSAL);
break;
}
// Sunwell Exalted Caster Neck (Shattered Sun Pendant of Acumen neck)
// cast 45479 Light's Wrath if Exalted by Aldor
// cast 45429 Arcane Bolt if Exalted by Scryers
case 45481:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Get Aldor reputation rank
if (((Player *)this)->GetReputationRank(932) == REP_EXALTED)
{
target = this;
triggered_spell_id = 45479;
break;
}
// Get Scryers reputation rank
if (((Player *)this)->GetReputationRank(934) == REP_EXALTED)
{
// triggered at positive/self casts also, current attack target used then
if(IsFriendlyTo(target))
{
target = getVictim();
if(!target)
{
target = ObjectAccessor::GetUnit(*this,((Player *)this)->GetSelectionGuid());
if(!target)
return SPELL_AURA_PROC_FAILED;
}
if(IsFriendlyTo(target))
return SPELL_AURA_PROC_FAILED;
}
triggered_spell_id = 45429;
break;
}
return SPELL_AURA_PROC_FAILED;
}
// Sunwell Exalted Melee Neck (Shattered Sun Pendant of Might neck)
// cast 45480 Light's Strength if Exalted by Aldor
// cast 45428 Arcane Strike if Exalted by Scryers
case 45482:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Get Aldor reputation rank
if (((Player *)this)->GetReputationRank(932) == REP_EXALTED)
{
target = this;
triggered_spell_id = 45480;
break;
}
// Get Scryers reputation rank
if (((Player *)this)->GetReputationRank(934) == REP_EXALTED)
{
triggered_spell_id = 45428;
break;
}
return SPELL_AURA_PROC_FAILED;
}
// Sunwell Exalted Tank Neck (Shattered Sun Pendant of Resolve neck)
// cast 45431 Arcane Insight if Exalted by Aldor
// cast 45432 Light's Ward if Exalted by Scryers
case 45483:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Get Aldor reputation rank
if (((Player *)this)->GetReputationRank(932) == REP_EXALTED)
{
target = this;
triggered_spell_id = 45432;
break;
}
// Get Scryers reputation rank
if (((Player *)this)->GetReputationRank(934) == REP_EXALTED)
{
target = this;
triggered_spell_id = 45431;
break;
}
return SPELL_AURA_PROC_FAILED;
}
// Sunwell Exalted Healer Neck (Shattered Sun Pendant of Restoration neck)
// cast 45478 Light's Salvation if Exalted by Aldor
// cast 45430 Arcane Surge if Exalted by Scryers
case 45484:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Get Aldor reputation rank
if (((Player *)this)->GetReputationRank(932) == REP_EXALTED)
{
target = this;
triggered_spell_id = 45478;
break;
}
// Get Scryers reputation rank
if (((Player *)this)->GetReputationRank(934) == REP_EXALTED)
{
triggered_spell_id = 45430;
break;
}
return SPELL_AURA_PROC_FAILED;
}
/*
// Sunwell Exalted Caster Neck (??? neck)
// cast ??? Light's Wrath if Exalted by Aldor
// cast ??? Arcane Bolt if Exalted by Scryers*/
case 46569:
return SPELL_AURA_PROC_FAILED; // old unused version
// Living Seed
case 48504:
{
triggered_spell_id = 48503;
basepoints[0] = triggerAmount;
target = this;
break;
}
// Health Leech (used by Bloodworms)
case 50453:
{
Unit *owner = GetOwner();
if (!owner)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 50454;
basepoints[0] = int32(damage*1.69);
target = owner;
break;
}
// Vampiric Touch (generic, used by some boss)
case 52723:
case 60501:
{
triggered_spell_id = 52724;
basepoints[0] = damage / 2;
target = this;
break;
}
// Shadowfiend Death (Gain mana if pet dies with Glyph of Shadowfiend)
case 57989:
{
Unit *owner = GetOwner();
if (!owner || owner->GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Glyph of Shadowfiend (need cast as self cast for owner, no hidden cooldown)
owner->CastSpell(owner,58227,true,castItem,triggeredByAura);
return SPELL_AURA_PROC_OK;
}
// Kill Command, pet aura
case 58914:
{
// also decrease owner buff stack
if (Unit* owner = GetOwner())
owner->RemoveAuraHolderFromStack(34027);
// Remove only single aura from stack
if (triggeredByAura->GetStackAmount() > 1 && !triggeredByAura->GetHolder()->ModStackAmount(-1))
return SPELL_AURA_PROC_CANT_TRIGGER;
}
// Glyph of Life Tap
case 63320:
triggered_spell_id = 63321;
break;
// Shiny Shard of the Scale - Equip Effect
case 69739:
// Cauterizing Heal or Searing Flame
triggered_spell_id = (procFlag & PROC_FLAG_SUCCESSFUL_POSITIVE_SPELL) ? 69734 : 69730;
break;
// Purified Shard of the Scale - Equip Effect
case 69755:
// Cauterizing Heal or Searing Flame
triggered_spell_id = (procFlag & PROC_FLAG_SUCCESSFUL_POSITIVE_SPELL) ? 69733 : 69729;
break;
case 70871:
// Soul of Blood qween
triggered_spell_id = 70872;
basepoints[0] = int32(triggerAmount* damage /100);
if (basepoints[0] < 0)
return SPELL_AURA_PROC_FAILED;
break;
// Glyph of Shadowflame
case 63310:
{
triggered_spell_id = 63311;
break;
}
// Item - Shadowmourne Legendary
case 71903:
{
if (!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 71905; // Soul Fragment
SpellAuraHolder *aurHolder = GetSpellAuraHolder(triggered_spell_id);
// will added first to stack
if (!aurHolder)
CastSpell(this, 72521, true); // Shadowmourne Visual Low
// half stack
else if (aurHolder->GetStackAmount() + 1 == 6)
CastSpell(this, 72523, true); // Shadowmourne Visual High
// full stack
else if (aurHolder->GetStackAmount() + 1 >= aurHolder->GetSpellProto()->StackAmount)
{
RemoveAurasDueToSpell(triggered_spell_id);
CastSpell(this, 71904, true); // Chaos Bane
return SPELL_AURA_PROC_OK;
}
break;
}
// Deathbringer's Will (Item - Icecrown 25 Normal Melee Trinket)
//=====================================================
// 71492 Speed of the Vrykul: +600 haste rating (Death Knight, Druid, Paladin, Rogue, Warrior, Shaman)
// 71485 Agility of the Vrykul: +600 agility (Druid, Hunter, Rogue, Shaman)
// 71486 Power of the Taunka: +1200 attack power (Hunter, Rogue, Shaman)
// 71484 Strength of the Taunka: +600 strength (Death Knight, Druid, Paladin, Warrior)
// 71491 Aim of the Iron Dwarves: +600 critical strike rating (Death Knight, Hunter, Paladin, Warrior)
case 71519:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
if(HasAura(71491) || HasAura(71484) || HasAura(71492) || HasAura(71486) || HasAura(71485))
return SPELL_AURA_PROC_FAILED;
// Select class defined buff
switch (getClass())
{
case CLASS_PALADIN:
{
uint32 RandomSpell[]={71492,71484,71491};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_DRUID:
{
uint32 RandomSpell[]={71492,71485,71484};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_ROGUE:
{
uint32 RandomSpell[]={71492,71485,71486};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_WARRIOR:
{
uint32 RandomSpell[]={71492,71484,71491};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_SHAMAN:
{
uint32 RandomSpell[]={71485,71486,71492};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_HUNTER:
{
uint32 RandomSpell[]={71485,71486,71491};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_DEATH_KNIGHT:
{
uint32 RandomSpell[]={71484,71492,71491};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Deathbringer's Will (Item - Icecrown 25 Heroic Melee Trinket)
//=====================================================
// 71560 Speed of the Vrykul: +700 haste rating (Death Knight, Druid, Paladin, Rogue, Warrior, Shaman)
// 71556 Agility of the Vrykul: +700 agility (Druid, Hunter, Rogue, Shaman)
// 71558 Power of the Taunka: +1400 attack power (Hunter, Rogue, Shaman)
// 71561 Strength of the Taunka: +700 strength (Death Knight, Druid, Paladin, Warrior)
// 71559 Aim of the Iron Dwarves: +700 critical strike rating (Death Knight, Hunter, Paladin, Warrior)
case 71562:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
if(HasAura(71559) || HasAura(71561) || HasAura(71560) || HasAura(71556) || HasAura(71558))
return SPELL_AURA_PROC_FAILED;
// Select class defined buff
switch (getClass())
{
case CLASS_PALADIN:
{
uint32 RandomSpell[]={71560,71561,71559};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_DRUID:
{
uint32 RandomSpell[]={71560,71556,71561};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_ROGUE:
{
uint32 RandomSpell[]={71560,71556,71558,};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_WARRIOR:
{
uint32 RandomSpell[]={71560,71561,71559,};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_SHAMAN:
{
uint32 RandomSpell[]={71556,71558,71560};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_HUNTER:
{
uint32 RandomSpell[]={71556,71558,71559};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
case CLASS_DEATH_KNIGHT:
{
uint32 RandomSpell[]={71561,71560,71559};
triggered_spell_id = RandomSpell[ irand(0, sizeof(RandomSpell)/sizeof(uint32) - 1) ];
break;
}
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Necrotic Touch item 50692
case 71875:
case 71877:
{
basepoints[0] = damage * triggerAmount / 100;
target = pVictim;
triggered_spell_id = 71879;
break;
}
}
break;
}
case SPELLFAMILY_MAGE:
{
// Magic Absorption
if (dummySpell->SpellIconID == 459) // only this spell have SpellIconID == 459 and dummy aura
{
if (getPowerType() != POWER_MANA)
return SPELL_AURA_PROC_FAILED;
// mana reward
basepoints[0] = (triggerAmount * GetMaxPower(POWER_MANA) / 100);
target = this;
triggered_spell_id = 29442;
break;
}
// Master of Elements
if (dummySpell->SpellIconID == 1920)
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
// mana cost save
int32 cost = procSpell->manaCost + procSpell->ManaCostPercentage * GetCreateMana() / 100;
basepoints[0] = cost * triggerAmount/100;
if (basepoints[0] <=0)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 29077;
break;
}
// Arcane Potency
if (dummySpell->SpellIconID == 2120)
{
if(!procSpell || procSpell->Id == 44401)
return SPELL_AURA_PROC_FAILED;
target = this;
switch (dummySpell->Id)
{
case 31571: triggered_spell_id = 57529; break;
case 31572: triggered_spell_id = 57531; break;
default:
sLog.outError("Unit::HandleDummyAuraProc: non handled spell id: %u",dummySpell->Id);
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Hot Streak
if (dummySpell->SpellIconID == 2999)
{
if (effIndex != EFFECT_INDEX_0)
return SPELL_AURA_PROC_OK;
Aura *counter = GetAura(triggeredByAura->GetId(), EFFECT_INDEX_1);
if (!counter)
return SPELL_AURA_PROC_OK;
// Count spell criticals in a row in second aura
Modifier *mod = counter->GetModifier();
if (procEx & PROC_EX_CRITICAL_HIT)
{
mod->m_amount *=2;
if (mod->m_amount < 100) // not enough
return SPELL_AURA_PROC_OK;
// Critical counted -> roll chance
if (roll_chance_i(triggerAmount))
CastSpell(this, 48108, true, castItem, triggeredByAura);
}
mod->m_amount = 25;
return SPELL_AURA_PROC_OK;
}
// Burnout
if (dummySpell->SpellIconID == 2998)
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
int32 cost = procSpell->manaCost + procSpell->ManaCostPercentage * GetCreateMana() / 100;
basepoints[0] = cost * triggerAmount/100;
if (basepoints[0] <=0)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 44450;
target = this;
break;
}
// Incanter's Regalia set (add trigger chance to Mana Shield)
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000000000008000))
{
if (GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 37436;
break;
}
switch(dummySpell->Id)
{
// Ignite
case 11119:
case 11120:
case 12846:
case 12847:
case 12848:
{
switch (dummySpell->Id)
{
case 11119: basepoints[0] = int32(0.04f*damage); break;
case 11120: basepoints[0] = int32(0.08f*damage); break;
case 12846: basepoints[0] = int32(0.12f*damage); break;
case 12847: basepoints[0] = int32(0.16f*damage); break;
case 12848: basepoints[0] = int32(0.20f*damage); break;
default:
sLog.outError("Unit::HandleDummyAuraProc: non handled spell id: %u (IG)",dummySpell->Id);
return SPELL_AURA_PROC_FAILED;
}
triggered_spell_id = 12654;
break;
}
// Empowered Fire (mana regen)
case 12654:
{
Unit* caster = triggeredByAura->GetCaster();
// it should not be triggered from other ignites
if (caster && pVictim && caster->GetGUID() == pVictim->GetGUID())
{
Unit::AuraList const& auras = caster->GetAurasByType(SPELL_AURA_ADD_FLAT_MODIFIER);
for (Unit::AuraList::const_iterator i = auras.begin(); i != auras.end(); i++)
{
switch((*i)->GetId())
{
case 31656:
case 31657:
case 31658:
{
if(roll_chance_i(int32((*i)->GetSpellProto()->procChance)))
{
caster->CastSpell(caster, 67545, true);
return SPELL_AURA_PROC_OK;
}
else
return SPELL_AURA_PROC_FAILED;
}
}
}
}
return SPELL_AURA_PROC_FAILED;
}
// Arcane Blast proc-off only from arcane school and not from self
case 36032:
{
if(procSpell->EffectTriggerSpell[1] == 36032 || GetSpellSchoolMask(procSpell) != SPELL_SCHOOL_MASK_ARCANE)
return SPELL_AURA_PROC_FAILED;
}
// Glyph of Ice Block
case 56372:
{
if (GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// not 100% safe with client version switches but for 3.1.3 no spells with cooldown that can have mage player except Frost Nova.
((Player*)this)->RemoveSpellCategoryCooldown(35, true);
return SPELL_AURA_PROC_OK;
}
// Glyph of Icy Veins
case 56374:
{
Unit::AuraList const& hasteAuras = GetAurasByType(SPELL_AURA_MOD_CASTING_SPEED_NOT_STACK);
for(Unit::AuraList::const_iterator i = hasteAuras.begin(); i != hasteAuras.end();)
{
if (!IsPositiveSpell((*i)->GetId()))
{
RemoveAurasDueToSpell((*i)->GetId());
i = hasteAuras.begin();
}
else
++i;
}
RemoveSpellsCausingAura(SPELL_AURA_HASTE_SPELLS);
RemoveSpellsCausingAura(SPELL_AURA_MOD_DECREASE_SPEED);
return SPELL_AURA_PROC_OK;
}
// Glyph of Polymorph
case 56375:
{
if (!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
pVictim->RemoveSpellsCausingAura(SPELL_AURA_PERIODIC_DAMAGE);
pVictim->RemoveSpellsCausingAura(SPELL_AURA_PERIODIC_DAMAGE_PERCENT);
pVictim->RemoveSpellsCausingAura(SPELL_AURA_PERIODIC_LEECH);
return SPELL_AURA_PROC_OK;
}
// Blessing of Ancient Kings
case 64411:
{
// for DOT procs
if (!IsPositiveSpell(procSpell->Id))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 64413;
basepoints[0] = damage * 15 / 100;
break;
}
}
break;
}
case SPELLFAMILY_WARRIOR:
{
// Retaliation
if (dummySpell->SpellFamilyFlags == UI64LIT(0x0000000800000000))
{
// check attack comes not from behind
if (!HasInArc(M_PI_F, pVictim))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 22858;
break;
}
// Second Wind
if (dummySpell->SpellIconID == 1697)
{
// only for spells and hit/crit (trigger start always) and not start from self casted spells (5530 Mace Stun Effect for example)
if (procSpell == 0 || !(procEx & (PROC_EX_NORMAL_HIT|PROC_EX_CRITICAL_HIT)) || this == pVictim)
return SPELL_AURA_PROC_FAILED;
// Need stun or root mechanic
if (!(GetAllSpellMechanicMask(procSpell) & IMMUNE_TO_ROOT_AND_STUN_MASK))
return SPELL_AURA_PROC_FAILED;
switch (dummySpell->Id)
{
case 29838: triggered_spell_id=29842; break;
case 29834: triggered_spell_id=29841; break;
case 42770: triggered_spell_id=42771; break;
default:
sLog.outError("Unit::HandleDummyAuraProc: non handled spell id: %u (SW)",dummySpell->Id);
return SPELL_AURA_PROC_FAILED;
}
target = this;
break;
}
// Damage Shield
if (dummySpell->SpellIconID == 3214)
{
triggered_spell_id = 59653;
basepoints[0] = GetShieldBlockValue() * triggerAmount / 100;
break;
}
// Sweeping Strikes
if (dummySpell->Id == 12328)
{
// prevent chain of triggered spell from same triggered spell
if(procSpell && procSpell->Id == 26654)
return SPELL_AURA_PROC_FAILED;
target = SelectRandomUnfriendlyTarget(pVictim);
if(!target)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 26654;
break;
}
// Glyph of Sunder Armor
if (dummySpell->Id == 58387)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
target = SelectRandomUnfriendlyTarget(pVictim);
if (!target)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 58567;
break;
}
break;
}
case SPELLFAMILY_WARLOCK:
{
// Seed of Corruption
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000001000000000))
{
Modifier* mod = triggeredByAura->GetModifier();
// if damage is more than need or target die from damage deal finish spell
if( mod->m_amount <= (int32)damage || GetHealth() <= damage )
{
// remember guid before aura delete
ObjectGuid casterGuid = triggeredByAura->GetCasterGuid();
// Remove aura (before cast for prevent infinite loop handlers)
RemoveAurasDueToSpell(triggeredByAura->GetId());
// Cast finish spell (triggeredByAura already not exist!)
CastSpell(this, 27285, true, castItem, NULL, casterGuid);
return SPELL_AURA_PROC_OK; // no hidden cooldown
}
// Damage counting
mod->m_amount-=damage;
return SPELL_AURA_PROC_OK;
}
// Seed of Corruption (Mobs cast) - no die req
if (dummySpell->SpellFamilyFlags == UI64LIT(0x0) && dummySpell->SpellIconID == 1932)
{
Modifier* mod = triggeredByAura->GetModifier();
// if damage is more than need deal finish spell
if( mod->m_amount <= (int32)damage )
{
// remember guid before aura delete
ObjectGuid casterGuid = triggeredByAura->GetCasterGuid();
// Remove aura (before cast for prevent infinite loop handlers)
RemoveAurasDueToSpell(triggeredByAura->GetId());
// Cast finish spell (triggeredByAura already not exist!)
CastSpell(this, 32865, true, castItem, NULL, casterGuid);
return SPELL_AURA_PROC_OK; // no hidden cooldown
}
// Damage counting
mod->m_amount-=damage;
return SPELL_AURA_PROC_OK;
}
// Fel Synergy
if (dummySpell->SpellIconID == 3222)
{
target = GetPet();
if (!target)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = damage * triggerAmount / 100;
triggered_spell_id = 54181;
break;
}
switch(dummySpell->Id)
{
// Nightfall & Glyph of Corruption
case 18094:
case 18095:
case 56218:
{
target = this;
triggered_spell_id = 17941;
break;
}
//Soul Leech
case 30293:
case 30295:
case 30296:
{
// health
basepoints[0] = int32(damage*triggerAmount/100);
target = this;
triggered_spell_id = 30294;
// check for Improved Soul Leech
AuraList const& pDummyAuras = GetAurasByType(SPELL_AURA_DUMMY);
for (AuraList::const_iterator itr = pDummyAuras.begin(); itr != pDummyAuras.end(); ++itr)
{
SpellEntry const* spellInfo = (*itr)->GetSpellProto();
if (spellInfo->SpellFamilyName != SPELLFAMILY_WARLOCK || (*itr)->GetSpellProto()->SpellIconID != 3176)
continue;
if ((*itr)->GetEffIndex() == SpellEffectIndex(0))
{
// energize Proc pet (implicit target is pet)
CastCustomSpell(this, 59118, &((*itr)->GetModifier()->m_amount), NULL, NULL, true, NULL, (*itr));
// energize Proc master
CastCustomSpell(this, 59117, &((*itr)->GetModifier()->m_amount), NULL, NULL, true, NULL, (*itr));
}
else if (roll_chance_i((*itr)->GetModifier()->m_amount))
{
// Replenishment proc
CastSpell(this, 57669, true, NULL, (*itr));
}
}
break;
}
// Shadowflame (Voidheart Raiment set bonus)
case 37377:
{
triggered_spell_id = 37379;
break;
}
// Pet Healing (Corruptor Raiment or Rift Stalker Armor)
case 37381:
{
target = GetPet();
if (!target)
return SPELL_AURA_PROC_FAILED;
// heal amount
basepoints[0] = damage * triggerAmount/100;
triggered_spell_id = 37382;
break;
}
// Shadowflame Hellfire (Voidheart Raiment set bonus)
case 39437:
{
triggered_spell_id = 37378;
break;
}
// Siphon Life
case 63108:
{
// Glyph of Siphon Life
if (Aura *aur = GetAura(56216, EFFECT_INDEX_0))
triggerAmount += triggerAmount * aur->GetModifier()->m_amount / 100;
basepoints[0] = int32(damage * triggerAmount / 100);
triggered_spell_id = 63106;
break;
}
}
break;
}<|fim▁hole|> case SPELLFAMILY_PRIEST:
{
// Vampiric Touch
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000040000000000))
{
if (!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
// pVictim is caster of aura
if (triggeredByAura->GetCasterGuid() != pVictim->GetObjectGuid())
return SPELL_AURA_PROC_FAILED;
// Energize 0.25% of max. mana
pVictim->CastSpell(pVictim, 57669, true, castItem, triggeredByAura);
return SPELL_AURA_PROC_OK; // no hidden cooldown
}
switch(dummySpell->SpellIconID)
{
// Improved Shadowform
case 217:
{
if(!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
RemoveSpellsCausingAura(SPELL_AURA_MOD_ROOT);
RemoveSpellsCausingAura(SPELL_AURA_MOD_DECREASE_SPEED);
break;
}
// Divine Aegis
case 2820:
{
if(!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
// find Divine Aegis on the target and get absorb amount
Aura* DivineAegis = pVictim->GetAura(47753,EFFECT_INDEX_0);
if (DivineAegis)
basepoints[0] = DivineAegis->GetModifier()->m_amount;
basepoints[0] += damage * triggerAmount/100;
// limit absorb amount
int32 levelbonus = pVictim->getLevel()*125;
if (basepoints[0] > levelbonus)
basepoints[0] = levelbonus;
triggered_spell_id = 47753;
break;
}
// Empowered Renew
case 3021:
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
// Renew
Aura* healingAura = pVictim->GetAura(SPELL_AURA_PERIODIC_HEAL, SPELLFAMILY_PRIEST, UI64LIT(0x40), 0, GetGUID());
if (!healingAura)
return SPELL_AURA_PROC_FAILED;
int32 healingfromticks = healingAura->GetModifier()->m_amount * GetSpellAuraMaxTicks(procSpell);
basepoints[0] = healingfromticks * triggerAmount / 100;
triggered_spell_id = 63544;
break;
}
// Improved Devouring Plague
case 3790:
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
Aura* leachAura = pVictim->GetAura(SPELL_AURA_PERIODIC_LEECH, SPELLFAMILY_PRIEST, UI64LIT(0x02000000), 0, GetGUID());
if (!leachAura)
return SPELL_AURA_PROC_FAILED;
int32 damagefromticks = leachAura->GetModifier()->m_amount * GetSpellAuraMaxTicks(procSpell);
basepoints[0] = damagefromticks * triggerAmount / 100;
triggered_spell_id = 63675;
break;
}
}
switch(dummySpell->Id)
{
// Vampiric Embrace
case 15286:
{
// Return if self damage
if (this == pVictim)
return SPELL_AURA_PROC_FAILED;
// Heal amount - Self/Team
int32 team = triggerAmount*damage/500;
int32 self = triggerAmount*damage/100 - team;
CastCustomSpell(this,15290,&team,&self,NULL,true,castItem,triggeredByAura);
return SPELL_AURA_PROC_OK; // no hidden cooldown
}
// Priest Tier 6 Trinket (Ashtongue Talisman of Acumen)
case 40438:
{
// Shadow Word: Pain
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000008000))
triggered_spell_id = 40441;
// Renew
else if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000010))
triggered_spell_id = 40440;
else
return SPELL_AURA_PROC_FAILED;
target = this;
break;
}
// Oracle Healing Bonus ("Garments of the Oracle" set)
case 26169:
{
// heal amount
basepoints[0] = int32(damage * 10/100);
target = this;
triggered_spell_id = 26170;
break;
}
// Frozen Shadoweave (Shadow's Embrace set) warning! its not only priest set
case 39372:
{
if(!procSpell || (GetSpellSchoolMask(procSpell) & (SPELL_SCHOOL_MASK_FROST | SPELL_SCHOOL_MASK_SHADOW))==0 )
return SPELL_AURA_PROC_FAILED;
// heal amount
basepoints[0] = damage * triggerAmount/100;
target = this;
triggered_spell_id = 39373;
break;
}
// Greater Heal (Vestments of Faith (Priest Tier 3) - 4 pieces bonus)
case 28809:
{
triggered_spell_id = 28810;
break;
}
// Glyph of Dispel Magic
case 55677:
{
if(!target->IsFriendlyTo(this))
return SPELL_AURA_PROC_FAILED;
if (target->GetTypeId() == TYPEID_PLAYER)
basepoints[0] = int32(target->GetMaxHealth() * triggerAmount / 100);
else if (Unit* caster = triggeredByAura->GetCaster())
basepoints[0] = int32(caster->GetMaxHealth() * triggerAmount / 100);
// triggered_spell_id in spell data
break;
}
// Item - Priest T10 Healer 4P Bonus
case 70799:
{
if (GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// Circle of Healing
((Player*)this)->RemoveSpellCategoryCooldown(1204, true);
// Penance
((Player*)this)->RemoveSpellCategoryCooldown(1230, true);
return SPELL_AURA_PROC_OK;
}
// Glyph of Prayer of Healing
case 55680:
{
basepoints[0] = int32(damage * triggerAmount / 200); // 10% each tick
triggered_spell_id = 56161;
break;
}
}
break;
}
case SPELLFAMILY_DRUID:
{
switch(dummySpell->Id)
{
// Leader of the Pack
case 24932:
{
// dummy m_amount store health percent (!=0 if Improved Leader of the Pack applied)
int32 heal_percent = triggeredByAura->GetModifier()->m_amount;
if (!heal_percent)
return SPELL_AURA_PROC_FAILED;
// check explicitly only to prevent mana cast when halth cast cooldown
if (cooldown && ((Player*)this)->HasSpellCooldown(34299))
return SPELL_AURA_PROC_FAILED;
// health
triggered_spell_id = 34299;
basepoints[0] = GetMaxHealth() * heal_percent / 100;
target = this;
// mana to caster
if (triggeredByAura->GetCasterGuid() == GetObjectGuid())
{
if (SpellEntry const* manaCastEntry = sSpellStore.LookupEntry(60889))
{
int32 mana_percent = manaCastEntry->CalculateSimpleValue(EFFECT_INDEX_0) * heal_percent;
CastCustomSpell(this, manaCastEntry, &mana_percent, NULL, NULL, true, castItem, triggeredByAura);
}
}
break;
}
// Healing Touch (Dreamwalker Raiment set)
case 28719:
{
// mana back
basepoints[0] = int32(procSpell->manaCost * 30 / 100);
target = this;
triggered_spell_id = 28742;
break;
}
// Healing Touch Refund (Idol of Longevity trinket)
case 28847:
{
target = this;
triggered_spell_id = 28848;
break;
}
// Mana Restore (Malorne Raiment set / Malorne Regalia set)
case 37288:
case 37295:
{
target = this;
triggered_spell_id = 37238;
break;
}
// Druid Tier 6 Trinket
case 40442:
{
float chance;
// Starfire
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000004))
{
triggered_spell_id = 40445;
chance = 25.0f;
}
// Rejuvenation
else if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000010))
{
triggered_spell_id = 40446;
chance = 25.0f;
}
// Mangle (Bear) and Mangle (Cat)
else if (procSpell->SpellFamilyFlags & UI64LIT(0x0000044000000000))
{
triggered_spell_id = 40452;
chance = 40.0f;
}
else
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_f(chance))
return SPELL_AURA_PROC_FAILED;
target = this;
break;
}
// Maim Interrupt
case 44835:
{
// Deadly Interrupt Effect
triggered_spell_id = 32747;
break;
}
// Glyph of Starfire
case 54845:
{
triggered_spell_id = 54846;
break;
}
// Glyph of Shred
case 54815:
{
triggered_spell_id = 63974;
break;
}
// Glyph of Rejuvenation
case 54754:
{
// less 50% health
if (pVictim->GetMaxHealth() < 2 * pVictim->GetHealth())
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
triggered_spell_id = 54755;
break;
}
// Glyph of Rake
case 54821:
{
triggered_spell_id = 54820;
break;
}
// Item - Druid T10 Restoration 4P Bonus (Rejuvenation)
case 70664:
{
if (!procSpell || GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
float radius;
if (procSpell->EffectRadiusIndex[EFFECT_INDEX_0])
radius = GetSpellRadius(sSpellRadiusStore.LookupEntry(procSpell->EffectRadiusIndex[EFFECT_INDEX_0]));
else
radius = GetSpellMaxRange(sSpellRangeStore.LookupEntry(procSpell->rangeIndex));
((Player*)this)->ApplySpellMod(procSpell->Id, SPELLMOD_RADIUS, radius,NULL);
Unit *second = pVictim->SelectRandomFriendlyTarget(pVictim, radius);
if (!second)
return SPELL_AURA_PROC_FAILED;
pVictim->CastSpell(second, procSpell, true, NULL, triggeredByAura, GetGUID());
return SPELL_AURA_PROC_OK;
}
// Item - Druid T10 Balance 4P Bonus
case 70723:
{
basepoints[0] = int32( triggerAmount * damage / 100 );
basepoints[0] = int32( basepoints[0] / 2);
triggered_spell_id = 71023;
break;
}
}
// King of the Jungle
if (dummySpell->SpellIconID == 2850)
{
switch (effIndex)
{
case EFFECT_INDEX_0: // Enrage (bear)
{
// note : aura removal is done in SpellAuraHolder::HandleSpellSpecificBoosts
basepoints[0] = triggerAmount;
triggered_spell_id = 51185;
break;
}
case EFFECT_INDEX_1: // Tiger's Fury (cat)
{
basepoints[0] = triggerAmount;
triggered_spell_id = 51178;
break;
}
default:
return SPELL_AURA_PROC_FAILED;
}
}
// Eclipse
else if (dummySpell->SpellIconID == 2856)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
// Wrath crit
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000001))
{
if (HasAura(48517))
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_i(60))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 48518;
target = this;
break;
}
// Starfire crit
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000004))
{
if (HasAura(48518))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 48517;
target = this;
break;
}
return SPELL_AURA_PROC_FAILED;
}
// Living Seed
else if (dummySpell->SpellIconID == 2860)
{
triggered_spell_id = 48504;
basepoints[0] = triggerAmount * damage / 100;
break;
}
break;
}
case SPELLFAMILY_ROGUE:
{
switch(dummySpell->Id)
{
// Clean Escape
case 23582:
// triggered spell have same masks and etc with main Vanish spell
if (!procSpell || procSpell->Effect[EFFECT_INDEX_0] == SPELL_EFFECT_NONE)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 23583;
break;
// Deadly Throw Interrupt
case 32748:
{
// Prevent cast Deadly Throw Interrupt on self from last effect (apply dummy) of Deadly Throw
if (this == pVictim)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 32747;
break;
}
// Glyph of Backstab
case 56800:
{
if (Aura* aura = target->GetAura(SPELL_AURA_PERIODIC_DAMAGE, SPELLFAMILY_ROGUE, UI64LIT(0x00100000), 0, GetGUID()))
{
uint32 countMin = aura->GetAuraMaxDuration();
uint32 countMax = GetSpellMaxDuration(aura->GetSpellProto());
countMax += 3 * triggerAmount * 1000;
countMax += HasAura(56801) ? 4000 : 0;
if (countMin < countMax)
{
aura->SetAuraDuration(aura->GetAuraDuration() + triggerAmount * 1000);
aura->SetAuraMaxDuration(countMin + triggerAmount * 1000);
aura->GetHolder()->SendAuraUpdate(false);
return SPELL_AURA_PROC_OK;
}
}
return SPELL_AURA_PROC_FAILED;
}
// Tricks of the trade
case 57934:
{
triggered_spell_id = 57933; // Tricks of the Trade, increased damage buff
target = getHostileRefManager().GetThreatRedirectionTarget();
if (!target)
return SPELL_AURA_PROC_FAILED;
CastSpell(this, 59628, true); // Tricks of the Trade (caster timer)
break;
}
}
// Cut to the Chase
if (dummySpell->SpellIconID == 2909)
{
// "refresh your Slice and Dice duration to its 5 combo point maximum"
// lookup Slice and Dice
AuraList const& sd = GetAurasByType(SPELL_AURA_MOD_MELEE_HASTE);
for(AuraList::const_iterator itr = sd.begin(); itr != sd.end(); ++itr)
{
SpellEntry const *spellProto = (*itr)->GetSpellProto();
if (spellProto->SpellFamilyName == SPELLFAMILY_ROGUE &&
(spellProto->SpellFamilyFlags & UI64LIT(0x0000000000040000)))
{
int32 duration = GetSpellMaxDuration(spellProto);
if(GetTypeId() == TYPEID_PLAYER)
static_cast<Player*>(this)->ApplySpellMod(spellProto->Id, SPELLMOD_DURATION, duration);
(*itr)->SetAuraMaxDuration(duration);
(*itr)->GetHolder()->RefreshHolder();
return SPELL_AURA_PROC_OK;
}
}
return SPELL_AURA_PROC_FAILED;
}
// Deadly Brew
if (dummySpell->SpellIconID == 2963)
{
triggered_spell_id = 44289;
break;
}
// Quick Recovery
if (dummySpell->SpellIconID == 2116)
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
//do not proc from spells that do not need combo points
if(!NeedsComboPoints(procSpell))
return SPELL_AURA_PROC_FAILED;
// energy cost save
basepoints[0] = procSpell->manaCost * triggerAmount/100;
if (basepoints[0] <= 0)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 31663;
break;
}
break;
}
case SPELLFAMILY_HUNTER:
{
// Thrill of the Hunt
if (dummySpell->SpellIconID == 2236)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
// mana cost save
int32 mana = procSpell->manaCost + procSpell->ManaCostPercentage * GetCreateMana() / 100;
basepoints[0] = mana * 40/100;
if (basepoints[0] <= 0)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 34720;
break;
}
// Hunting Party
if (dummySpell->SpellIconID == 3406)
{
triggered_spell_id = 57669;
target = this;
break;
}
// Lock and Load
if ( dummySpell->SpellIconID == 3579 )
{
// Proc only from periodic (from trap activation proc another aura of this spell)
if (!(procFlag & PROC_FLAG_ON_DO_PERIODIC) || !roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 56453;
target = this;
break;
}
// Rapid Recuperation
if ( dummySpell->SpellIconID == 3560 )
{
// This effect only from Rapid Killing (mana regen)
if (!(procSpell->SpellFamilyFlags & UI64LIT(0x0100000000000000)))
return SPELL_AURA_PROC_FAILED;
target = this;
switch(dummySpell->Id)
{
case 53228: // Rank 1
triggered_spell_id = 56654;
break;
case 53232: // Rank 2
triggered_spell_id = 58882;
break;
}
break;
}
// Glyph of Mend Pet
if(dummySpell->Id == 57870)
{
pVictim->CastSpell(pVictim, 57894, true, NULL, NULL, GetGUID());
return SPELL_AURA_PROC_OK;
}
// Misdirection
else if(dummySpell->Id == 34477)
{
triggered_spell_id = 35079; // 4 sec buff on self
target = this;
break;
}
// Guard Dog
else if (dummySpell->SpellIconID == 201 && procSpell->SpellIconID == 201)
{
triggered_spell_id = 54445;
target = this;
if (pVictim)
pVictim->AddThreat(this,procSpell->EffectBasePoints[0] * triggerAmount / 100.0f);
break;
}
break;
}
case SPELLFAMILY_PALADIN:
{
// Seal of Righteousness - melee proc dummy (addition ${$MWS*(0.022*$AP+0.044*$SPH)} damage)
if ((dummySpell->SpellFamilyFlags & UI64LIT(0x000000008000000)) && effIndex == EFFECT_INDEX_0)
{
triggered_spell_id = 25742;
float ap = GetTotalAttackPowerValue(BASE_ATTACK);
int32 holy = SpellBaseDamageBonusDone(SPELL_SCHOOL_MASK_HOLY);
if (holy < 0)
holy = 0;
basepoints[0] = GetAttackTime(BASE_ATTACK) * int32(ap*0.022f + 0.044f * holy) / 1000;
break;
}
// Righteous Vengeance
if (dummySpell->SpellIconID == 3025)
{
// 4 damage tick
basepoints[0] = triggerAmount*damage/400;
triggered_spell_id = 61840;
break;
}
// Sheath of Light
if (dummySpell->SpellIconID == 3030)
{
// 4 healing tick
basepoints[0] = triggerAmount*damage/400;
triggered_spell_id = 54203;
break;
}
switch(dummySpell->Id)
{
// Judgement of Light
case 20185:
{
if (pVictim == this)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = int32( pVictim->GetMaxHealth() * triggeredByAura->GetModifier()->m_amount / 100 );
pVictim->CastCustomSpell(pVictim, 20267, &basepoints[0], NULL, NULL, true, NULL, triggeredByAura);
return SPELL_AURA_PROC_OK;
}
// Judgement of Wisdom
case 20186:
{
if (pVictim->getPowerType() == POWER_MANA)
{
// 2% of maximum base mana
basepoints[0] = int32(pVictim->GetCreateMana() * 2 / 100);
pVictim->CastCustomSpell(pVictim, 20268, &basepoints[0], NULL, NULL, true, NULL, triggeredByAura);
}
return SPELL_AURA_PROC_OK;
}
// Heart of the Crusader (Rank 1)
case 20335:
triggered_spell_id = 21183;
break;
// Heart of the Crusader (Rank 2)
case 20336:
triggered_spell_id = 54498;
break;
// Heart of the Crusader (Rank 3)
case 20337:
triggered_spell_id = 54499;
break;
case 20911: // Blessing of Sanctuary
case 25899: // Greater Blessing of Sanctuary
{
target = this;
switch (target->getPowerType())
{
case POWER_MANA:
triggered_spell_id = 57319;
break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Holy Power (Redemption Armor set)
case 28789:
{
if(!pVictim)
return SPELL_AURA_PROC_FAILED;
// Set class defined buff
switch (pVictim->getClass())
{
case CLASS_PALADIN:
case CLASS_PRIEST:
case CLASS_SHAMAN:
case CLASS_DRUID:
triggered_spell_id = 28795; // Increases the friendly target's mana regeneration by $s1 per 5 sec. for $d.
break;
case CLASS_MAGE:
case CLASS_WARLOCK:
triggered_spell_id = 28793; // Increases the friendly target's spell damage and healing by up to $s1 for $d.
break;
case CLASS_HUNTER:
case CLASS_ROGUE:
triggered_spell_id = 28791; // Increases the friendly target's attack power by $s1 for $d.
break;
case CLASS_WARRIOR:
triggered_spell_id = 28790; // Increases the friendly target's armor
break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Spiritual Attunement
case 31785:
case 33776:
{
// if healed by another unit (pVictim)
if (this == pVictim)
return SPELL_AURA_PROC_FAILED;
// dont count overhealing
uint32 diff = GetMaxHealth()-GetHealth();
if (!diff)
return SPELL_AURA_PROC_FAILED;
if (damage > diff)
basepoints[0] = triggerAmount*diff/100;
else
basepoints[0] = triggerAmount*damage/100;
target = this;
triggered_spell_id = 31786;
break;
}
// Seal of Vengeance (damage calc on apply aura)
case 31801:
{
if (effIndex != EFFECT_INDEX_0) // effect 1,2 used by seal unleashing code
return SPELL_AURA_PROC_FAILED;
// At melee attack or Hammer of the Righteous spell damage considered as melee attack
if ((procFlag & PROC_FLAG_SUCCESSFUL_MELEE_HIT) || (procSpell && procSpell->Id == 53595) )
triggered_spell_id = 31803; // Holy Vengeance
// Add 5-stack effect from Holy Vengeance
uint32 stacks = 0;
AuraList const& auras = target->GetAurasByType(SPELL_AURA_PERIODIC_DAMAGE);
for(AuraList::const_iterator itr = auras.begin(); itr!=auras.end(); ++itr)
{
if (((*itr)->GetId() == 31803) && (*itr)->GetCasterGuid() == GetObjectGuid())
{
stacks = (*itr)->GetStackAmount();
break;
}
}
if (stacks >= 5)
CastSpell(target,42463,true,NULL,triggeredByAura);
break;
}
// Judgements of the Wise
case 31876:
case 31877:
case 31878:
// triggered only at casted Judgement spells, not at additional Judgement effects
if(!procSpell || procSpell->Category != 1210)
return SPELL_AURA_PROC_FAILED;
target = this;
triggered_spell_id = 31930;
// Replenishment
CastSpell(this, 57669, true, NULL, triggeredByAura);
break;
// Paladin Tier 6 Trinket (Ashtongue Talisman of Zeal)
case 40470:
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
float chance;
// Flash of light/Holy light
if (procSpell->SpellFamilyFlags & UI64LIT(0x00000000C0000000))
{
triggered_spell_id = 40471;
chance = 15.0f;
}
// Judgement (any)
else if (GetSpellSpecific(procSpell->Id)==SPELL_JUDGEMENT)
{
triggered_spell_id = 40472;
chance = 50.0f;
}
else
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_f(chance))
return SPELL_AURA_PROC_FAILED;
break;
}
// Light's Beacon (heal target area aura)
case 53651:
{
// not do bonus heal for explicit beacon focus healing
if (GetObjectGuid() == triggeredByAura->GetCasterGuid())
return SPELL_AURA_PROC_FAILED;
// beacon
Unit* beacon = triggeredByAura->GetCaster();
if (!beacon)
return SPELL_AURA_PROC_FAILED;
if (procSpell->Id == 20267)
return SPELL_AURA_PROC_FAILED;
// find caster main aura at beacon
Aura* dummy = NULL;
Unit::AuraList const& baa = beacon->GetAurasByType(SPELL_AURA_PERIODIC_TRIGGER_SPELL);
for(Unit::AuraList::const_iterator i = baa.begin(); i != baa.end(); ++i)
{
if ((*i)->GetId() == 53563 && (*i)->GetCasterGuid() == pVictim->GetObjectGuid())
{
dummy = (*i);
break;
}
}
// original heal must be form beacon caster
if (!dummy)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 53652; // Beacon of Light
basepoints[0] = triggeredByAura->GetModifier()->m_amount*damage/100;
// cast with original caster set but beacon to beacon for apply caster mods and avoid LoS check
beacon->CastCustomSpell(beacon,triggered_spell_id,&basepoints[0],NULL,NULL,true,castItem,triggeredByAura,pVictim->GetGUID());
return SPELL_AURA_PROC_OK;
}
// Seal of Corruption (damage calc on apply aura)
case 53736:
{
if (effIndex != EFFECT_INDEX_0) // effect 1,2 used by seal unleashing code
return SPELL_AURA_PROC_FAILED;
// At melee attack or Hammer of the Righteous spell damage considered as melee attack
if ((procFlag & PROC_FLAG_SUCCESSFUL_MELEE_HIT) || (procSpell && procSpell->Id == 53595))
triggered_spell_id = 53742; // Blood Corruption
// Add 5-stack effect from Blood Corruption
uint32 stacks = 0;
AuraList const& auras = target->GetAurasByType(SPELL_AURA_PERIODIC_DAMAGE);
for(AuraList::const_iterator itr = auras.begin(); itr!=auras.end(); ++itr)
{
if (((*itr)->GetId() == 53742) && (*itr)->GetCasterGuid() == GetObjectGuid())
{
stacks = (*itr)->GetStackAmount();
break;
}
}
if (stacks >= 5)
CastSpell(target,53739,true,NULL,triggeredByAura);
break;
}
// Glyph of Holy Light
case 54937:
{
triggered_spell_id = 54968;
basepoints[0] = triggerAmount*damage/100;
break;
}
// Sacred Shield (buff)
case 58597:
{
triggered_spell_id = 66922;
SpellEntry const* triggeredEntry = sSpellStore.LookupEntry(triggered_spell_id);
if (!triggeredEntry)
return SPELL_AURA_PROC_FAILED;
if(pVictim)
if(!pVictim->HasAura(53569, EFFECT_INDEX_0) && !pVictim->HasAura(53576, EFFECT_INDEX_0))
return SPELL_AURA_PROC_FAILED;
basepoints[0] = int32(damage / (GetSpellDuration(triggeredEntry) / triggeredEntry->EffectAmplitude[EFFECT_INDEX_0]));
target = this;
break;
}
// Sacred Shield (talent rank)
case 53601:
{
// triggered_spell_id in spell data
target = this;
break;
}
// Item - Paladin T10 Holy 2P Bonus
case 70755:
{
triggered_spell_id = 71166;
break;
}
// Item - Paladin T10 Retribution 2P Bonus
case 70765:
{
if (GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
((Player*)this)->RemoveSpellCooldown(53385, true);
return SPELL_AURA_PROC_OK;
}
// Anger Capacitor
case 71406: // normal
case 71545: // heroic
{
if (!pVictim)
return SPELL_AURA_PROC_FAILED;
SpellEntry const* mote = sSpellStore.LookupEntry(71432);
if (!mote)
return SPELL_AURA_PROC_FAILED;
uint32 maxStack = mote->StackAmount - (dummySpell->Id == 71545 ? 1 : 0);
SpellAuraHolder *aurHolder = GetSpellAuraHolder(71432);
if (aurHolder && uint32(aurHolder->GetStackAmount() +1) >= maxStack)
{
RemoveAurasDueToSpell(71432); // Mote of Anger
// Manifest Anger (main hand/off hand)
CastSpell(pVictim, !haveOffhandWeapon() || roll_chance_i(50) ? 71433 : 71434, true);
return SPELL_AURA_PROC_OK;
}
else
triggered_spell_id = 71432;
break;
}
// Heartpierce, Item - Icecrown 25 Normal Dagger Proc
case 71880:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
switch (this->getPowerType())
{
case POWER_ENERGY: triggered_spell_id = 71882; break;
case POWER_RAGE: triggered_spell_id = 71883; break;
case POWER_MANA: triggered_spell_id = 71881; break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Heartpierce, Item - Icecrown 25 Heroic Dagger Proc
case 71892:
{
if(GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
switch (this->getPowerType())
{
case POWER_ENERGY: triggered_spell_id = 71887; break;
case POWER_RAGE: triggered_spell_id = 71886; break;
case POWER_MANA: triggered_spell_id = 71888; break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
}
break;
}
case SPELLFAMILY_SHAMAN:
{
switch(dummySpell->Id)
{
// Totemic Power (The Earthshatterer set)
case 28823:
{
if (!pVictim)
return SPELL_AURA_PROC_FAILED;
// Set class defined buff
switch (pVictim->getClass())
{
case CLASS_PALADIN:
case CLASS_PRIEST:
case CLASS_SHAMAN:
case CLASS_DRUID:
triggered_spell_id = 28824; // Increases the friendly target's mana regeneration by $s1 per 5 sec. for $d.
break;
case CLASS_MAGE:
case CLASS_WARLOCK:
triggered_spell_id = 28825; // Increases the friendly target's spell damage and healing by up to $s1 for $d.
break;
case CLASS_HUNTER:
case CLASS_ROGUE:
triggered_spell_id = 28826; // Increases the friendly target's attack power by $s1 for $d.
break;
case CLASS_WARRIOR:
triggered_spell_id = 28827; // Increases the friendly target's armor
break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Lesser Healing Wave (Totem of Flowing Water Relic)
case 28849:
{
target = this;
triggered_spell_id = 28850;
break;
}
// Windfury Weapon (Passive) 1-5 Ranks
case 33757:
{
if(GetTypeId()!=TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
if(!castItem || !castItem->IsEquipped())
return SPELL_AURA_PROC_FAILED;
// custom cooldown processing case
if (cooldown && ((Player*)this)->HasSpellCooldown(dummySpell->Id))
return SPELL_AURA_PROC_FAILED;
// Now amount of extra power stored in 1 effect of Enchant spell
// Get it by item enchant id
uint32 spellId;
switch (castItem->GetEnchantmentId(EnchantmentSlot(TEMP_ENCHANTMENT_SLOT)))
{
case 283: spellId = 8232; break; // 1 Rank
case 284: spellId = 8235; break; // 2 Rank
case 525: spellId = 10486; break; // 3 Rank
case 1669:spellId = 16362; break; // 4 Rank
case 2636:spellId = 25505; break; // 5 Rank
case 3785:spellId = 58801; break; // 6 Rank
case 3786:spellId = 58803; break; // 7 Rank
case 3787:spellId = 58804; break; // 8 Rank
default:
{
sLog.outError("Unit::HandleDummyAuraProc: non handled item enchantment (rank?) %u for spell id: %u (Windfury)",
castItem->GetEnchantmentId(EnchantmentSlot(TEMP_ENCHANTMENT_SLOT)),dummySpell->Id);
return SPELL_AURA_PROC_FAILED;
}
}
SpellEntry const* windfurySpellEntry = sSpellStore.LookupEntry(spellId);
if(!windfurySpellEntry)
{
sLog.outError("Unit::HandleDummyAuraProc: nonexistent spell id: %u (Windfury)",spellId);
return SPELL_AURA_PROC_FAILED;
}
int32 extra_attack_power = CalculateSpellDamage(pVictim, windfurySpellEntry, EFFECT_INDEX_1);
// Totem of Splintering
if (Aura* aura = GetAura(60764, EFFECT_INDEX_0))
extra_attack_power += aura->GetModifier()->m_amount;
// Off-Hand case
if (castItem->GetSlot() == EQUIPMENT_SLOT_OFFHAND)
{
// Value gained from additional AP
basepoints[0] = int32(extra_attack_power/14.0f * GetAttackTime(OFF_ATTACK)/1000/2);
triggered_spell_id = 33750;
}
// Main-Hand case
else
{
// Value gained from additional AP
basepoints[0] = int32(extra_attack_power/14.0f * GetAttackTime(BASE_ATTACK)/1000);
triggered_spell_id = 25504;
}
// apply cooldown before cast to prevent processing itself
if( cooldown )
((Player*)this)->AddSpellCooldown(dummySpell->Id,0,time(NULL) + cooldown);
// Attack Twice
for ( uint32 i = 0; i<2; ++i )
CastCustomSpell(pVictim,triggered_spell_id,&basepoints[0],NULL,NULL,true,castItem,triggeredByAura);
return SPELL_AURA_PROC_OK;
}
// Shaman Tier 6 Trinket
case 40463:
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
float chance;
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000001))
{
triggered_spell_id = 40465; // Lightning Bolt
chance = 15.0f;
}
else if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000080))
{
triggered_spell_id = 40465; // Lesser Healing Wave
chance = 10.0f;
}
else if (procSpell->SpellFamilyFlags & UI64LIT(0x0000001000000000))
{
triggered_spell_id = 40466; // Stormstrike
chance = 50.0f;
}
else
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_f(chance))
return SPELL_AURA_PROC_FAILED;
target = this;
break;
}
// Earthen Power
case 51523:
case 51524:
{
triggered_spell_id = 63532;
break;
}
// Glyph of Healing Wave
case 55440:
{
// Not proc from self heals
if (this==pVictim)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
target = this;
triggered_spell_id = 55533;
break;
}
// Spirit Hunt
case 58877:
{
// Cast on owner
target = GetOwner();
if (!target)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
triggered_spell_id = 58879;
break;
}
// Glyph of Totem of Wrath
case 63280:
{
Totem* totem = GetTotem(TOTEM_SLOT_FIRE);
if (!totem)
return SPELL_AURA_PROC_FAILED;
// find totem aura bonus
AuraList const& spellPower = totem->GetAurasByType(SPELL_AURA_NONE);
for(AuraList::const_iterator i = spellPower.begin();i != spellPower.end(); ++i)
{
// select proper aura for format aura type in spell proto
if ((*i)->GetTarget()==totem && (*i)->GetSpellProto()->EffectApplyAuraName[(*i)->GetEffIndex()] == SPELL_AURA_MOD_HEALING_DONE &&
(*i)->GetSpellProto()->SpellFamilyName == SPELLFAMILY_SHAMAN && (*i)->GetSpellProto()->SpellFamilyFlags & UI64LIT(0x0000000004000000))
{
basepoints[0] = triggerAmount * (*i)->GetModifier()->m_amount / 100;
break;
}
}
if (!basepoints[0])
return SPELL_AURA_PROC_FAILED;
basepoints[1] = basepoints[0];
triggered_spell_id = 63283; // Totem of Wrath, caster bonus
target = this;
break;
}
// Shaman T8 Elemental 4P Bonus
case 64928:
{
basepoints[0] = int32( basepoints[0] / 2); // basepoints is for 1 tick, not all DoT amount
triggered_spell_id = 64930; // Electrified
break;
}
// Shaman T9 Elemental 4P Bonus
case 67228:
{
basepoints[0] = int32( basepoints[0] / 3); // basepoints is for 1 tick, not all DoT amount
triggered_spell_id = 71824;
break;
}
// Item - Shaman T10 Restoration 4P Bonus
case 70808:
{
basepoints[0] = int32( triggerAmount * damage / 100 );
basepoints[0] = int32( basepoints[0] / 3); // basepoints is for 1 tick, not all DoT amount
triggered_spell_id = 70809;
break;
}
// Item - Shaman T10 Elemental 4P Bonus
case 70817:
{
if (Aura *aur = pVictim->GetAura(SPELL_AURA_PERIODIC_DAMAGE, SPELLFAMILY_SHAMAN, UI64LIT(0x0000000010000000), 0, GetGUID()))
{
int32 amount = aur->GetAuraDuration() + triggerAmount * IN_MILLISECONDS;
aur->SetAuraDuration(amount);
aur->UpdateAura(false);
return SPELL_AURA_PROC_OK;
}
return SPELL_AURA_PROC_FAILED;
}
}
// Storm, Earth and Fire
if (dummySpell->SpellIconID == 3063)
{
// Earthbind Totem summon only
if(procSpell->Id != 2484)
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 64695;
break;
}
// Ancestral Awakening
if (dummySpell->SpellIconID == 3065)
{
triggered_spell_id = 52759;
basepoints[0] = triggerAmount * damage / 100;
target = this;
break;
}
// Flametongue Weapon (Passive), Ranks
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000000000200000))
{
if (GetTypeId()!=TYPEID_PLAYER || !castItem)
return SPELL_AURA_PROC_FAILED;
// Only proc for enchanted weapon
Item *usedWeapon = ((Player *)this)->GetWeaponForAttack(procFlag & PROC_FLAG_SUCCESSFUL_OFFHAND_HIT ? OFF_ATTACK : BASE_ATTACK, true, true);
if (usedWeapon != castItem)
return SPELL_AURA_PROC_FAILED;
switch (dummySpell->Id)
{
case 10400: triggered_spell_id = 8026; break; // Rank 1
case 15567: triggered_spell_id = 8028; break; // Rank 2
case 15568: triggered_spell_id = 8029; break; // Rank 3
case 15569: triggered_spell_id = 10445; break; // Rank 4
case 16311: triggered_spell_id = 16343; break; // Rank 5
case 16312: triggered_spell_id = 16344; break; // Rank 6
case 16313: triggered_spell_id = 25488; break; // Rank 7
case 58784: triggered_spell_id = 58786; break; // Rank 8
case 58791: triggered_spell_id = 58787; break; // Rank 9
case 58792: triggered_spell_id = 58788; break; // Rank 10
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Earth Shield
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000040000000000))
{
originalCaster = triggeredByAura->GetCasterGuid();
target = this;
basepoints[0] = triggerAmount;
// Glyph of Earth Shield
if (Aura* aur = GetDummyAura(63279))
{
int32 aur_mod = aur->GetModifier()->m_amount;
basepoints[0] = int32(basepoints[0] * (aur_mod + 100.0f) / 100.0f);
}
triggered_spell_id = 379;
break;
}
// Improved Water Shield
if (dummySpell->SpellIconID == 2287)
{
// Lesser Healing Wave need aditional 60% roll
if ((procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000080)) && !roll_chance_i(60))
return SPELL_AURA_PROC_FAILED;
// Chain Heal needs additional 30% roll
if ((procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000100)) && !roll_chance_i(30))
return SPELL_AURA_PROC_FAILED;
// lookup water shield
AuraList const& vs = GetAurasByType(SPELL_AURA_PROC_TRIGGER_SPELL);
for(AuraList::const_iterator itr = vs.begin(); itr != vs.end(); ++itr)
{
if ((*itr)->GetSpellProto()->SpellFamilyName == SPELLFAMILY_SHAMAN &&
((*itr)->GetSpellProto()->SpellFamilyFlags & UI64LIT(0x0000002000000000)))
{
uint32 spell = (*itr)->GetSpellProto()->EffectTriggerSpell[(*itr)->GetEffIndex()];
CastSpell(this, spell, true, castItem, triggeredByAura);
return SPELL_AURA_PROC_OK;
}
}
return SPELL_AURA_PROC_FAILED;
}
// Lightning Overload
if (dummySpell->SpellIconID == 2018) // only this spell have SpellFamily Shaman SpellIconID == 2018 and dummy aura
{
if(!procSpell || GetTypeId() != TYPEID_PLAYER || !pVictim )
return SPELL_AURA_PROC_FAILED;
// custom cooldown processing case
if( cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(dummySpell->Id))
return SPELL_AURA_PROC_FAILED;
uint32 spellId = 0;
// Every Lightning Bolt and Chain Lightning spell have duplicate vs half damage and zero cost
switch (procSpell->Id)
{
// Lightning Bolt
case 403: spellId = 45284; break; // Rank 1
case 529: spellId = 45286; break; // Rank 2
case 548: spellId = 45287; break; // Rank 3
case 915: spellId = 45288; break; // Rank 4
case 943: spellId = 45289; break; // Rank 5
case 6041: spellId = 45290; break; // Rank 6
case 10391: spellId = 45291; break; // Rank 7
case 10392: spellId = 45292; break; // Rank 8
case 15207: spellId = 45293; break; // Rank 9
case 15208: spellId = 45294; break; // Rank 10
case 25448: spellId = 45295; break; // Rank 11
case 25449: spellId = 45296; break; // Rank 12
case 49237: spellId = 49239; break; // Rank 13
case 49238: spellId = 49240; break; // Rank 14
// Chain Lightning
case 421: spellId = 45297; break; // Rank 1
case 930: spellId = 45298; break; // Rank 2
case 2860: spellId = 45299; break; // Rank 3
case 10605: spellId = 45300; break; // Rank 4
case 25439: spellId = 45301; break; // Rank 5
case 25442: spellId = 45302; break; // Rank 6
case 49270: spellId = 49268; break; // Rank 7
case 49271: spellId = 49269; break; // Rank 8
default:
sLog.outError("Unit::HandleDummyAuraProc: non handled spell id: %u (LO)", procSpell->Id);
return SPELL_AURA_PROC_FAILED;
}
// Remove cooldown (Chain Lightning - have Category Recovery time)
if (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000002))
((Player*)this)->RemoveSpellCooldown(spellId);
CastSpell(pVictim, spellId, true, castItem, triggeredByAura);
if (cooldown && GetTypeId() == TYPEID_PLAYER)
((Player*)this)->AddSpellCooldown(dummySpell->Id, 0, time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
// Static Shock
if(dummySpell->SpellIconID == 3059)
{
// lookup Lightning Shield
AuraList const& vs = GetAurasByType(SPELL_AURA_PROC_TRIGGER_SPELL);
for(AuraList::const_iterator itr = vs.begin(); itr != vs.end(); ++itr)
{
if ((*itr)->GetSpellProto()->SpellFamilyName == SPELLFAMILY_SHAMAN &&
((*itr)->GetSpellProto()->SpellFamilyFlags & UI64LIT(0x0000000000000400)))
{
uint32 spell = 0;
switch ((*itr)->GetId())
{
case 324: spell = 26364; break;
case 325: spell = 26365; break;
case 905: spell = 26366; break;
case 945: spell = 26367; break;
case 8134: spell = 26369; break;
case 10431: spell = 26370; break;
case 10432: spell = 26363; break;
case 25469: spell = 26371; break;
case 25472: spell = 26372; break;
case 49280: spell = 49278; break;
case 49281: spell = 49279; break;
default:
return SPELL_AURA_PROC_FAILED;
}
CastSpell(target, spell, true, castItem, triggeredByAura);
if ((*itr)->GetHolder()->DropAuraCharge())
RemoveAuraHolderFromStack((*itr)->GetId());
return SPELL_AURA_PROC_OK;
}
}
return SPELL_AURA_PROC_FAILED;
}
// Frozen Power
if (dummySpell->SpellIconID == 3780)
{
Unit *caster = triggeredByAura->GetCaster();
if (!procSpell || !caster)
return SPELL_AURA_PROC_FAILED;
float distance = caster->GetDistance(pVictim);
int32 chance = triggerAmount;
if (distance < 15.0f || !roll_chance_i(chance))
return SPELL_AURA_PROC_FAILED;
// make triggered cast apply after current damage spell processing for prevent remove by it
if(Spell* spell = GetCurrentSpell(CURRENT_GENERIC_SPELL))
spell->AddTriggeredSpell(63685);
return SPELL_AURA_PROC_OK;
}
break;
}
case SPELLFAMILY_DEATHKNIGHT:
{
// Butchery
if (dummySpell->SpellIconID == 2664)
{
basepoints[0] = triggerAmount;
triggered_spell_id = 50163;
target = this;
break;
}
// Dancing Rune Weapon
if (dummySpell->Id == 49028)
{
// 1 dummy aura for dismiss rune blade
if (effIndex != EFFECT_INDEX_1)
return SPELL_AURA_PROC_FAILED;
Pet* runeBlade = FindGuardianWithEntry(27893);
if (runeBlade && pVictim && damage && procSpell)
{
int32 procDmg = damage * 0.5;
runeBlade->CastCustomSpell(pVictim, procSpell->Id, &procDmg, NULL, NULL, true, NULL, NULL, runeBlade->GetGUID());
SendSpellNonMeleeDamageLog(pVictim, procSpell->Id, procDmg, SPELL_SCHOOL_MASK_NORMAL, 0, 0, false, 0, false);
break;
}
else
return SPELL_AURA_PROC_FAILED;
}
// Mark of Blood
if (dummySpell->Id == 49005)
{
if (target->GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// TODO: need more info (cooldowns/PPM)
target->CastSpell(target, 61607, true, NULL, triggeredByAura);
return SPELL_AURA_PROC_OK;
}
// Unholy Blight
if (dummySpell->Id == 49194)
{
basepoints[0] = damage * triggerAmount / 100;
// Glyph of Unholy Blight
if (Aura *aura = GetDummyAura(63332))
basepoints[0] += basepoints[0] * aura->GetModifier()->m_amount / 100;
// Split between 10 ticks
basepoints[0] /= 10;
triggered_spell_id = 50536;
break;
}
// Vendetta
if (dummySpell->SpellFamilyFlags & UI64LIT(0x0000000000010000))
{
basepoints[0] = triggerAmount * GetMaxHealth() / 100;
triggered_spell_id = 50181;
target = this;
break;
}
// Necrosis
if (dummySpell->SpellIconID == 2709)
{
// only melee auto attack affected and Rune Strike
if (procSpell && procSpell->Id != 56815)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
triggered_spell_id = 51460;
break;
}
// Threat of Thassarian
if (dummySpell->SpellIconID == 2023)
{
// Must Dual Wield
if (!procSpell || !haveOffhandWeapon())
return SPELL_AURA_PROC_FAILED;
// Chance as basepoints for dummy aura
if (!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
switch (procSpell->Id)
{
// Obliterate
case 49020: // Rank 1
triggered_spell_id = 66198; break;
case 51423: // Rank 2
triggered_spell_id = 66972; break;
case 51424: // Rank 3
triggered_spell_id = 66973; break;
case 51425: // Rank 4
triggered_spell_id = 66974; break;
// Frost Strike
case 49143: // Rank 1
triggered_spell_id = 66196; break;
case 51416: // Rank 2
triggered_spell_id = 66958; break;
case 51417: // Rank 3
triggered_spell_id = 66959; break;
case 51418: // Rank 4
triggered_spell_id = 66960; break;
case 51419: // Rank 5
triggered_spell_id = 66961; break;
case 55268: // Rank 6
triggered_spell_id = 66962; break;
// Plague Strike
case 45462: // Rank 1
triggered_spell_id = 66216; break;
case 49917: // Rank 2
triggered_spell_id = 66988; break;
case 49918: // Rank 3
triggered_spell_id = 66989; break;
case 49919: // Rank 4
triggered_spell_id = 66990; break;
case 49920: // Rank 5
triggered_spell_id = 66991; break;
case 49921: // Rank 6
triggered_spell_id = 66992; break;
// Death Strike
case 49998: // Rank 1
triggered_spell_id = 66188; break;
case 49999: // Rank 2
triggered_spell_id = 66950; break;
case 45463: // Rank 3
triggered_spell_id = 66951; break;
case 49923: // Rank 4
triggered_spell_id = 66952; break;
case 49924: // Rank 5
triggered_spell_id = 66953; break;
// Rune Strike
case 56815:
triggered_spell_id = 66217; break;
// Blood Strike
case 45902: // Rank 1
triggered_spell_id = 66215; break;
case 49926: // Rank 2
triggered_spell_id = 66975; break;
case 49927: // Rank 3
triggered_spell_id = 66976; break;
case 49928: // Rank 4
triggered_spell_id = 66977; break;
case 49929: // Rank 5
triggered_spell_id = 66978; break;
case 49930: // Rank 6
triggered_spell_id = 66979; break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Runic Power Back on Snare/Root
if (dummySpell->Id == 61257)
{
// only for spells and hit/crit (trigger start always) and not start from self casted spells
if (procSpell == 0 || !(procEx & (PROC_EX_NORMAL_HIT|PROC_EX_CRITICAL_HIT)) || this == pVictim)
return SPELL_AURA_PROC_FAILED;
// Need snare or root mechanic
if (!(GetAllSpellMechanicMask(procSpell) & IMMUNE_TO_ROOT_AND_SNARE_MASK))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 61258;
target = this;
break;
}
// Sudden Doom
if (dummySpell->SpellIconID == 1939)
{
if (!target || !target->isAlive() || this->GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// get highest rank of Death Coil spell
const PlayerSpellMap& sp_list = ((Player*)this)->GetSpellMap();
for (PlayerSpellMap::const_iterator itr = sp_list.begin(); itr != sp_list.end(); ++itr)
{
if(!itr->second.active || itr->second.disabled || itr->second.state == PLAYERSPELL_REMOVED)
continue;
SpellEntry const *spellInfo = sSpellStore.LookupEntry(itr->first);
if (!spellInfo)
continue;
if (spellInfo->SpellFamilyName == SPELLFAMILY_DEATHKNIGHT && spellInfo->SpellFamilyFlags & UI64LIT(0x2000))
{
triggered_spell_id = spellInfo->Id;
break;
}
}
break;
}
// Wandering Plague
if (dummySpell->SpellIconID == 1614)
{
if (!roll_chance_f(GetUnitCriticalChance(BASE_ATTACK, pVictim)))
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
triggered_spell_id = 50526;
break;
}
// Blood of the North and Reaping
if (dummySpell->SpellIconID == 3041 || dummySpell->SpellIconID == 22)
{
if(GetTypeId()!=TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
Player *player = (Player*)this;
for (uint32 i = 0; i < MAX_RUNES; ++i)
{
if (player->GetCurrentRune(i) == RUNE_BLOOD)
{
if(!player->GetRuneCooldown(i))
player->ConvertRune(i, RUNE_DEATH, dummySpell->Id);
else
{
// search for another rune that might be available
for (uint32 iter = i; iter < MAX_RUNES; ++iter)
{
if(player->GetCurrentRune(iter) == RUNE_BLOOD && !player->GetRuneCooldown(iter))
{
player->ConvertRune(iter, RUNE_DEATH, dummySpell->Id);
triggeredByAura->SetAuraPeriodicTimer(0);
return SPELL_AURA_PROC_OK;
}
}
player->SetNeedConvertRune(i, true, dummySpell->Id);
}
triggeredByAura->SetAuraPeriodicTimer(0);
return SPELL_AURA_PROC_OK;
}
}
return SPELL_AURA_PROC_FAILED;
}
// Death Rune Mastery
if (dummySpell->SpellIconID == 2622)
{
if(GetTypeId()!=TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
Player *player = (Player*)this;
for (uint32 i = 0; i < MAX_RUNES; ++i)
{
RuneType currRune = player->GetCurrentRune(i);
if (currRune == RUNE_UNHOLY || currRune == RUNE_FROST)
{
uint16 cd = player->GetRuneCooldown(i);
if(!cd)
player->ConvertRune(i, RUNE_DEATH, dummySpell->Id);
else // there is a cd
player->SetNeedConvertRune(i, true, dummySpell->Id);
// no break because it converts all
}
}
triggeredByAura->SetAuraPeriodicTimer(0);
return SPELL_AURA_PROC_OK;
}
// Hungering Cold - not break from diseases
if (dummySpell->SpellIconID == 2797)
{
if (procSpell && procSpell->Dispel == DISPEL_DISEASE)
return SPELL_AURA_PROC_FAILED;
}
// Blood-Caked Blade
if (dummySpell->SpellIconID == 138)
{
// only main hand melee auto attack affected and Rune Strike
if ((procFlag & PROC_FLAG_SUCCESSFUL_OFFHAND_HIT) || procSpell && procSpell->Id != 56815)
return SPELL_AURA_PROC_FAILED;
// triggered_spell_id in spell data
break;
}
break;
}
case SPELLFAMILY_PET:
{
// Improved Cower
if (dummySpell->SpellIconID == 958 && procSpell->SpellIconID == 958)
{
triggered_spell_id = dummySpell->Id == 53180 ? 54200 : 54201;
target = this;
break;
}
// Guard Dog
if (dummySpell->SpellIconID == 201 && procSpell->SpellIconID == 201)
{
triggered_spell_id = 54445;
target = this;
break;
}
// Silverback
if (dummySpell->SpellIconID == 1582 && procSpell->SpellIconID == 201)
{
triggered_spell_id = dummySpell->Id == 62764 ? 62800 : 62801;
target = this;
break;
}
break;
}
default:
break;
}
// processed charge only counting case
if (!triggered_spell_id)
return SPELL_AURA_PROC_OK;
SpellEntry const* triggerEntry = sSpellStore.LookupEntry(triggered_spell_id);
if (!triggerEntry)
{
sLog.outError("Unit::HandleDummyAuraProc: Spell %u have nonexistent triggered spell %u",dummySpell->Id,triggered_spell_id);
return SPELL_AURA_PROC_FAILED;
}
// default case
if (!target || (target != this && !target->isAlive()))
return SPELL_AURA_PROC_FAILED;
if (cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(triggered_spell_id))
return SPELL_AURA_PROC_FAILED;
if (basepoints[EFFECT_INDEX_0] || basepoints[EFFECT_INDEX_1] || basepoints[EFFECT_INDEX_2])
CastCustomSpell(target, triggerEntry,
basepoints[EFFECT_INDEX_0] ? &basepoints[EFFECT_INDEX_0] : NULL,
basepoints[EFFECT_INDEX_1] ? &basepoints[EFFECT_INDEX_1] : NULL,
basepoints[EFFECT_INDEX_2] ? &basepoints[EFFECT_INDEX_2] : NULL,
true, castItem, triggeredByAura, originalCaster);
else
CastSpell(target, triggerEntry, true, castItem, triggeredByAura);
if (cooldown && GetTypeId()==TYPEID_PLAYER)
((Player*)this)->AddSpellCooldown(triggered_spell_id,0,time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleProcTriggerSpellAuraProc(Unit *pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 procFlags, uint32 procEx, uint32 cooldown)
{
// Get triggered aura spell info
SpellEntry const* auraSpellInfo = triggeredByAura->GetSpellProto();
// Basepoints of trigger aura
int32 triggerAmount = triggeredByAura->GetModifier()->m_amount;
// Set trigger spell id, target, custom basepoints
uint32 trigger_spell_id = auraSpellInfo->EffectTriggerSpell[triggeredByAura->GetEffIndex()];
Unit* target = NULL;
int32 basepoints[MAX_EFFECT_INDEX] = {0, 0, 0};
if(triggeredByAura->GetModifier()->m_auraname == SPELL_AURA_PROC_TRIGGER_SPELL_WITH_VALUE)
basepoints[0] = triggerAmount;
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
// Try handle unknown trigger spells
// Custom requirements (not listed in procEx) Warning! damage dealing after this
// Custom triggered spells
switch (auraSpellInfo->SpellFamilyName)
{
case SPELLFAMILY_GENERIC:
switch(auraSpellInfo->Id)
{
//case 191: // Elemental Response
// switch (procSpell->School)
// {
// case SPELL_SCHOOL_FIRE: trigger_spell_id = 34192; break;
// case SPELL_SCHOOL_FROST: trigger_spell_id = 34193; break;
// case SPELL_SCHOOL_ARCANE:trigger_spell_id = 34194; break;
// case SPELL_SCHOOL_NATURE:trigger_spell_id = 34195; break;
// case SPELL_SCHOOL_SHADOW:trigger_spell_id = 34196; break;
// case SPELL_SCHOOL_HOLY: trigger_spell_id = 34197; break;
// case SPELL_SCHOOL_NORMAL:trigger_spell_id = 34198; break;
// }
// break;
//case 5301: break; // Defensive State (DND)
//case 7137: break: // Shadow Charge (Rank 1)
//case 7377: break: // Take Immune Periodic Damage <Not Working>
//case 13358: break; // Defensive State (DND)
//case 16092: break; // Defensive State (DND)
//case 18943: break; // Double Attack
//case 19194: break; // Double Attack
//case 19817: break; // Double Attack
//case 19818: break; // Double Attack
//case 22835: break; // Drunken Rage
// trigger_spell_id = 14822; break;
case 23780: // Aegis of Preservation (Aegis of Preservation trinket)
trigger_spell_id = 23781;
break;
//case 24949: break; // Defensive State 2 (DND)
case 27522: // Mana Drain Trigger
case 40336: // Mana Drain Trigger
// On successful melee or ranged attack gain $29471s1 mana and if possible drain $27526s1 mana from the target.
if (isAlive())
CastSpell(this, 29471, true, castItem, triggeredByAura);
if (pVictim && pVictim->isAlive())
CastSpell(pVictim, 27526, true, castItem, triggeredByAura);
return SPELL_AURA_PROC_OK;
case 31255: // Deadly Swiftness (Rank 1)
// whenever you deal damage to a target who is below 20% health.
if (pVictim->GetHealth() > pVictim->GetMaxHealth() / 5)
return SPELL_AURA_PROC_FAILED;
target = this;
trigger_spell_id = 22588;
break;
//case 33207: break; // Gossip NPC Periodic - Fidget
case 33896: // Desperate Defense (Stonescythe Whelp, Stonescythe Alpha, Stonescythe Ambusher)
trigger_spell_id = 33898;
break;
//case 34082: break; // Advantaged State (DND)
//case 34783: break: // Spell Reflection
//case 35205: break: // Vanish
//case 35321: break; // Gushing Wound
//case 36096: break: // Spell Reflection
//case 36207: break: // Steal Weapon
//case 36576: break: // Shaleskin (Shaleskin Flayer, Shaleskin Ripper) 30023 trigger
//case 37030: break; // Chaotic Temperament
case 38164: // Unyielding Knights
if (pVictim->GetEntry() != 19457)
return SPELL_AURA_PROC_FAILED;
break;
//case 38363: break; // Gushing Wound
//case 39215: break; // Gushing Wound
//case 40250: break; // Improved Duration
//case 40329: break; // Demo Shout Sensor
//case 40364: break; // Entangling Roots Sensor
//case 41054: break; // Copy Weapon
// trigger_spell_id = 41055; break;
//case 41248: break; // Consuming Strikes
// trigger_spell_id = 41249; break;
//case 42730: break: // Woe Strike
//case 43453: break: // Rune Ward
//case 43504: break; // Alterac Valley OnKill Proc Aura
//case 44326: break: // Pure Energy Passive
//case 44526: break; // Hate Monster (Spar) (30 sec)
//case 44527: break; // Hate Monster (Spar Buddy) (30 sec)
//case 44819: break; // Hate Monster (Spar Buddy) (>30% Health)
//case 44820: break; // Hate Monster (Spar) (<30%)
case 45057: // Evasive Maneuvers (Commendation of Kael`thas trinket)
// reduce you below $s1% health
if (GetHealth() - damage > GetMaxHealth() * triggerAmount / 100)
return SPELL_AURA_PROC_FAILED;
break;
//case 45903: break: // Offensive State
//case 46146: break: // [PH] Ahune Spanky Hands
//case 46939: break; // Black Bow of the Betrayer
// trigger_spell_id = 29471; - gain mana
// 27526; - drain mana if possible
case 43820: // Charm of the Witch Doctor (Amani Charm of the Witch Doctor trinket)
// Pct value stored in dummy
basepoints[0] = pVictim->GetCreateHealth() * auraSpellInfo->CalculateSimpleValue(EFFECT_INDEX_1) / 100;
target = pVictim;
break;
//case 45205: break; // Copy Offhand Weapon
//case 45343: break; // Dark Flame Aura
//case 47300: break; // Dark Flame Aura
//case 48876: break; // Beast's Mark
// trigger_spell_id = 48877; break;
//case 49059: break; // Horde, Hate Monster (Spar Buddy) (>30% Health)
//case 50051: break; // Ethereal Pet Aura
//case 50689: break; // Blood Presence (Rank 1)
//case 50844: break; // Blood Mirror
//case 52856: break; // Charge
//case 54072: break; // Knockback Ball Passive
//case 54476: break; // Blood Presence
//case 54775: break; // Abandon Vehicle on Poly
case 56702: //
{
trigger_spell_id = 56701;
break;
}
case 57345: // Darkmoon Card: Greatness
{
float stat = 0.0f;
// strength
if (GetStat(STAT_STRENGTH) > stat) { trigger_spell_id = 60229;stat = GetStat(STAT_STRENGTH); }
// agility
if (GetStat(STAT_AGILITY) > stat) { trigger_spell_id = 60233;stat = GetStat(STAT_AGILITY); }
// intellect
if (GetStat(STAT_INTELLECT)> stat) { trigger_spell_id = 60234;stat = GetStat(STAT_INTELLECT);}
// spirit
if (GetStat(STAT_SPIRIT) > stat) { trigger_spell_id = 60235; }
break;
}
//case 55580: break: // Mana Link
//case 57587: break: // Steal Ranged ()
//case 57594: break; // Copy Ranged Weapon
//case 59237: break; // Beast's Mark
// trigger_spell_id = 59233; break;
//case 59288: break; // Infra-Green Shield
//case 59532: break; // Abandon Passengers on Poly
//case 59735: break: // Woe Strike
case 64415: // // Val'anyr Hammer of Ancient Kings - Equip Effect
{
// for DOT procs
if (!IsPositiveSpell(procSpell->Id))
return SPELL_AURA_PROC_FAILED;
break;
}
case 64440: // Blade Warding
{
trigger_spell_id = 64442;
// need scale damage base at stack size
if (SpellEntry const* trigEntry = sSpellStore.LookupEntry(trigger_spell_id))
basepoints[EFFECT_INDEX_0] = trigEntry->CalculateSimpleValue(EFFECT_INDEX_0) * triggeredByAura->GetStackAmount();
break;
}
case 64568: // Blood Reserve
{
// Check health condition - should drop to less 35%
if (!(10*(int32(GetHealth() - damage)) < 3.5 * GetMaxHealth()))
return SPELL_AURA_PROC_FAILED;
if (!roll_chance_f(50))
return SPELL_AURA_PROC_FAILED;
trigger_spell_id = 64569;
basepoints[0] = triggerAmount;
break;
}
case 67702: // Death's Choice, Item - Coliseum 25 Normal Melee Trinket
{
float stat = 0.0f;
// strength
if (GetStat(STAT_STRENGTH) > stat) { trigger_spell_id = 67708;stat = GetStat(STAT_STRENGTH); }
// agility
if (GetStat(STAT_AGILITY) > stat) { trigger_spell_id = 67703; }
break;
}
case 67771: // Death's Choice (heroic), Item - Coliseum 25 Heroic Melee Trinket
{
float stat = 0.0f;
// strength
if (GetStat(STAT_STRENGTH) > stat) { trigger_spell_id = 67773;stat = GetStat(STAT_STRENGTH); }
// agility
if (GetStat(STAT_AGILITY) > stat) { trigger_spell_id = 67772; }
break;
}
case 72178: // Blood link Saurfang aura
{
target = this;
trigger_spell_id = 72195;
break;
}
}
break;
case SPELLFAMILY_MAGE:
if (auraSpellInfo->SpellIconID == 2127) // Blazing Speed
{
switch (auraSpellInfo->Id)
{
case 31641: // Rank 1
case 31642: // Rank 2
trigger_spell_id = 31643;
break;
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u miss possibly Blazing Speed",auraSpellInfo->Id);
return SPELL_AURA_PROC_FAILED;
}
}
else if(auraSpellInfo->Id == 26467) // Persistent Shield (Scarab Brooch trinket)
{
// This spell originally trigger 13567 - Dummy Trigger (vs dummy effect)
basepoints[0] = damage * 15 / 100;
target = pVictim;
trigger_spell_id = 26470;
}
else if(auraSpellInfo->Id == 71761) // Deep Freeze Immunity State
{
// spell applied only to permanent immunes to stun targets (bosses)
if (pVictim->GetTypeId() != TYPEID_UNIT ||
(((Creature*)pVictim)->GetCreatureInfo()->MechanicImmuneMask & (1 << (MECHANIC_STUN - 1))) == 0)
return SPELL_AURA_PROC_FAILED;
}
break;
case SPELLFAMILY_WARRIOR:
// Deep Wounds (replace triggered spells to directly apply DoT), dot spell have familyflags
if (auraSpellInfo->SpellFamilyFlags == UI64LIT(0x0) && auraSpellInfo->SpellIconID == 243)
{
float weaponDamage;
// DW should benefit of attack power, damage percent mods etc.
// TODO: check if using offhand damage is correct and if it should be divided by 2
if (haveOffhandWeapon() && getAttackTimer(BASE_ATTACK) > getAttackTimer(OFF_ATTACK))
weaponDamage = (GetFloatValue(UNIT_FIELD_MINOFFHANDDAMAGE) + GetFloatValue(UNIT_FIELD_MAXOFFHANDDAMAGE))/2;
else
weaponDamage = (GetFloatValue(UNIT_FIELD_MINDAMAGE) + GetFloatValue(UNIT_FIELD_MAXDAMAGE))/2;
switch (auraSpellInfo->Id)
{
case 12834: basepoints[0] = int32(weaponDamage * 16 / 100); break;
case 12849: basepoints[0] = int32(weaponDamage * 32 / 100); break;
case 12867: basepoints[0] = int32(weaponDamage * 48 / 100); break;
// Impossible case
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: DW unknown spell rank %u",auraSpellInfo->Id);
return SPELL_AURA_PROC_FAILED;
}
// 1 tick/sec * 6 sec = 6 ticks
basepoints[0] /= 6;
trigger_spell_id = 12721;
break;
}
if (auraSpellInfo->Id == 50421) // Scent of Blood
trigger_spell_id = 50422;
break;
case SPELLFAMILY_WARLOCK:
{
// Drain Soul
if (auraSpellInfo->SpellFamilyFlags & UI64LIT(0x0000000000004000))
{
// search for "Improved Drain Soul" dummy aura
Unit::AuraList const& mDummyAura = GetAurasByType(SPELL_AURA_DUMMY);
for(Unit::AuraList::const_iterator i = mDummyAura.begin(); i != mDummyAura.end(); ++i)
{
if ((*i)->GetSpellProto()->SpellFamilyName == SPELLFAMILY_WARLOCK && (*i)->GetSpellProto()->SpellIconID == 113)
{
// basepoints of trigger spell stored in dummyeffect of spellProto
int32 basepoints = GetMaxPower(POWER_MANA) * (*i)->GetSpellProto()->CalculateSimpleValue(EFFECT_INDEX_2) / 100;
CastCustomSpell(this, 18371, &basepoints, NULL, NULL, true, castItem, triggeredByAura);
break;
}
}
// Not remove charge (aura removed on death in any cases)
// Need for correct work Drain Soul SPELL_AURA_CHANNEL_DEATH_ITEM aura
return SPELL_AURA_PROC_FAILED;
}
// Nether Protection
else if (auraSpellInfo->SpellIconID == 1985)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
switch(GetFirstSchoolInMask(GetSpellSchoolMask(procSpell)))
{
case SPELL_SCHOOL_NORMAL:
return SPELL_AURA_PROC_FAILED; // ignore
case SPELL_SCHOOL_HOLY: trigger_spell_id = 54370; break;
case SPELL_SCHOOL_FIRE: trigger_spell_id = 54371; break;
case SPELL_SCHOOL_NATURE: trigger_spell_id = 54375; break;
case SPELL_SCHOOL_FROST: trigger_spell_id = 54372; break;
case SPELL_SCHOOL_SHADOW: trigger_spell_id = 54374; break;
case SPELL_SCHOOL_ARCANE: trigger_spell_id = 54373; break;
default:
return SPELL_AURA_PROC_FAILED;
}
}
// Cheat Death
else if (auraSpellInfo->Id == 28845)
{
// When your health drops below 20% ....
if (GetHealth() - damage > GetMaxHealth() / 5 || GetHealth() < GetMaxHealth() / 5)
return SPELL_AURA_PROC_FAILED;
}
// Decimation
else if (auraSpellInfo->Id == 63156 || auraSpellInfo->Id == 63158)
{
// Looking for dummy effect
Aura *aur = GetAura(auraSpellInfo->Id, EFFECT_INDEX_1);
if (!aur)
return SPELL_AURA_PROC_FAILED;
// If target's health is not below equal certain value (35%) not proc
if (int32(pVictim->GetHealth() * 100 / pVictim->GetMaxHealth()) > aur->GetModifier()->m_amount)
return SPELL_AURA_PROC_FAILED;
}
break;
}
case SPELLFAMILY_PRIEST:
{
// Greater Heal Refund (Avatar Raiment set)
if (auraSpellInfo->Id==37594)
{
// Not give if target already have full health
if (pVictim->GetHealth() == pVictim->GetMaxHealth())
return SPELL_AURA_PROC_FAILED;
// If your Greater Heal brings the target to full health, you gain $37595s1 mana.
if (pVictim->GetHealth() + damage < pVictim->GetMaxHealth())
return SPELL_AURA_PROC_FAILED;
trigger_spell_id = 37595;
}
// Blessed Recovery
else if (auraSpellInfo->SpellIconID == 1875)
{
switch (auraSpellInfo->Id)
{
case 27811: trigger_spell_id = 27813; break;
case 27815: trigger_spell_id = 27817; break;
case 27816: trigger_spell_id = 27818; break;
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u not handled in BR", auraSpellInfo->Id);
return SPELL_AURA_PROC_FAILED;
}
basepoints[0] = damage * triggerAmount / 100 / 3;
target = this;
}
// Glyph of Shadow Word: Pain
else if (auraSpellInfo->Id == 55681)
basepoints[0] = triggerAmount * GetCreateMana() / 100;
break;
}
case SPELLFAMILY_DRUID:
{
// Druid Forms Trinket
if (auraSpellInfo->Id==37336)
{
switch(GetShapeshiftForm())
{
case FORM_NONE: trigger_spell_id = 37344;break;
case FORM_CAT: trigger_spell_id = 37341;break;
case FORM_BEAR:
case FORM_DIREBEAR: trigger_spell_id = 37340;break;
case FORM_TREE: trigger_spell_id = 37342;break;
case FORM_MOONKIN: trigger_spell_id = 37343;break;
default:
return SPELL_AURA_PROC_FAILED;
}
}
// Druid T9 Feral Relic (Lacerate, Swipe, Mangle, and Shred)
else if (auraSpellInfo->Id==67353)
{
switch(GetShapeshiftForm())
{
case FORM_CAT: trigger_spell_id = 67355; break;
case FORM_BEAR:
case FORM_DIREBEAR: trigger_spell_id = 67354; break;
default:
return SPELL_AURA_PROC_FAILED;
}
}
break;
}
case SPELLFAMILY_ROGUE:
// Item - Rogue T10 2P Bonus
if (auraSpellInfo->Id == 70805)
{
if (pVictim != this)
return SPELL_AURA_PROC_FAILED;
}
// Item - Rogue T10 4P Bonus
else if (auraSpellInfo->Id == 70803)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
// only allow melee finishing move to proc
if (!(procSpell->AttributesEx & SPELL_ATTR_EX_REQ_TARGET_COMBO_POINTS) || procSpell->Id == 26679)
return SPELL_AURA_PROC_FAILED;
trigger_spell_id = 70802;
target = this;
}
break;
case SPELLFAMILY_HUNTER:
{
// Piercing Shots
if (auraSpellInfo->SpellIconID == 3247 && auraSpellInfo->SpellVisual[0] == 0)
{
basepoints[0] = damage * triggerAmount / 100 / 8;
trigger_spell_id = 63468;
target = pVictim;
}
// Rapid Recuperation
else if (auraSpellInfo->Id == 53228 || auraSpellInfo->Id == 53232)
{
// This effect only from Rapid Fire (ability cast)
if (!(procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000020)))
return SPELL_AURA_PROC_FAILED;
}
// Entrapment correction
else if ((auraSpellInfo->Id == 19184 || auraSpellInfo->Id == 19387 || auraSpellInfo->Id == 19388) &&
!(procSpell->SpellFamilyFlags & UI64LIT(0x200000000000) || procSpell->SpellFamilyFlags2 & UI64LIT(0x40000)))
return SPELL_AURA_PROC_FAILED;
// Lock and Load
else if (auraSpellInfo->SpellIconID == 3579)
{
// Check for Lock and Load Marker
if (HasAura(67544))
return SPELL_AURA_PROC_FAILED;
}
break;
}
case SPELLFAMILY_PALADIN:
{
/*
// Blessed Life
if (auraSpellInfo->SpellIconID == 2137)
{
switch (auraSpellInfo->Id)
{
case 31828: // Rank 1
case 31829: // Rank 2
case 31830: // Rank 3
break;
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u miss posibly Blessed Life", auraSpellInfo->Id);
return SPELL_AURA_PROC_FAILED;
}
}
*/
// Healing Discount
if (auraSpellInfo->Id==37705)
{
trigger_spell_id = 37706;
target = this;
}
// Soul Preserver
if (auraSpellInfo->Id==60510)
{
trigger_spell_id = 60515;
target = this;
}
// Illumination
else if (auraSpellInfo->SpellIconID==241)
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
// procspell is triggered spell but we need mana cost of original casted spell
uint32 originalSpellId = procSpell->Id;
// Holy Shock heal
if (procSpell->SpellFamilyFlags & UI64LIT(0x0001000000000000))
{
switch(procSpell->Id)
{
case 25914: originalSpellId = 20473; break;
case 25913: originalSpellId = 20929; break;
case 25903: originalSpellId = 20930; break;
case 27175: originalSpellId = 27174; break;
case 33074: originalSpellId = 33072; break;
case 48820: originalSpellId = 48824; break;
case 48821: originalSpellId = 48825; break;
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u not handled in HShock",procSpell->Id);
return SPELL_AURA_PROC_FAILED;
}
}
SpellEntry const *originalSpell = sSpellStore.LookupEntry(originalSpellId);
if(!originalSpell)
{
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u unknown but selected as original in Illu",originalSpellId);
return SPELL_AURA_PROC_FAILED;
}
// percent stored in effect 1 (class scripts) base points
int32 cost = originalSpell->manaCost + originalSpell->ManaCostPercentage * GetCreateMana() / 100;
basepoints[0] = cost*auraSpellInfo->CalculateSimpleValue(EFFECT_INDEX_1)/100;
trigger_spell_id = 20272;
target = this;
}
// Lightning Capacitor
else if (auraSpellInfo->Id==37657)
{
if(!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
// stacking
CastSpell(this, 37658, true, NULL, triggeredByAura);
Aura * dummy = GetDummyAura(37658);
// release at 3 aura in stack (cont contain in basepoint of trigger aura)
if(!dummy || dummy->GetStackAmount() < uint32(triggerAmount))
return SPELL_AURA_PROC_FAILED;
RemoveAurasDueToSpell(37658);
trigger_spell_id = 37661;
target = pVictim;
}
// Bonus Healing (Crystal Spire of Karabor mace)
else if (auraSpellInfo->Id == 40971)
{
// If your target is below $s1% health
if (pVictim->GetHealth() > pVictim->GetMaxHealth() * triggerAmount / 100)
return SPELL_AURA_PROC_FAILED;
}
// Thunder Capacitor
else if (auraSpellInfo->Id == 54841)
{
if(!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
// stacking
CastSpell(this, 54842, true, NULL, triggeredByAura);
// counting
Aura * dummy = GetDummyAura(54842);
// release at 3 aura in stack (cont contain in basepoint of trigger aura)
if(!dummy || dummy->GetStackAmount() < uint32(triggerAmount))
return SPELL_AURA_PROC_FAILED;
RemoveAurasDueToSpell(54842);
trigger_spell_id = 54843;
target = pVictim;
}
break;
}
case SPELLFAMILY_SHAMAN:
{
// Lightning Shield (overwrite non existing triggered spell call in spell.dbc
if (auraSpellInfo->SpellFamilyFlags & UI64LIT(0x0000000000000400) && auraSpellInfo->SpellVisual[0] == 37)
{
switch(auraSpellInfo->Id)
{
case 324: // Rank 1
trigger_spell_id = 26364; break;
case 325: // Rank 2
trigger_spell_id = 26365; break;
case 905: // Rank 3
trigger_spell_id = 26366; break;
case 945: // Rank 4
trigger_spell_id = 26367; break;
case 8134: // Rank 5
trigger_spell_id = 26369; break;
case 10431: // Rank 6
trigger_spell_id = 26370; break;
case 10432: // Rank 7
trigger_spell_id = 26363; break;
case 25469: // Rank 8
trigger_spell_id = 26371; break;
case 25472: // Rank 9
trigger_spell_id = 26372; break;
case 49280: // Rank 10
trigger_spell_id = 49278; break;
case 49281: // Rank 11
trigger_spell_id = 49279; break;
default:
sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u not handled in LShield", auraSpellInfo->Id);
return SPELL_AURA_PROC_FAILED;
}
}
// Lightning Shield (The Ten Storms set)
else if (auraSpellInfo->Id == 23551)
{
trigger_spell_id = 23552;
target = pVictim;
}
// Damage from Lightning Shield (The Ten Storms set)
else if (auraSpellInfo->Id == 23552)
trigger_spell_id = 27635;
// Mana Surge (The Earthfury set)
else if (auraSpellInfo->Id == 23572)
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = procSpell->manaCost * 35 / 100;
trigger_spell_id = 23571;
target = this;
}
// Nature's Guardian
else if (auraSpellInfo->SpellIconID == 2013)
{
// Check health condition - should drop to less 30% (damage deal after this!)
if (!(10*(int32(GetHealth() - damage)) < int32(3 * GetMaxHealth())))
return SPELL_AURA_PROC_FAILED;
if(pVictim && pVictim->isAlive())
pVictim->getThreatManager().modifyThreatPercent(this,-10);
basepoints[0] = triggerAmount * GetMaxHealth() / 100;
trigger_spell_id = 31616;
target = this;
}
// Item - Shaman T10 Restoration 2P Bonus
else if (auraSpellInfo->Id == 70807)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
// only allow Riptide to proc
switch(procSpell->Id)
{
case 61295: // Rank 1
case 61299: // Rank 2
case 61300: // Rank 3
case 61301: // Rank 4
break;
default:
return SPELL_AURA_PROC_FAILED;
}
trigger_spell_id = 70806;
target = this;
}
break;
}
case SPELLFAMILY_DEATHKNIGHT:
{
// Acclimation
if (auraSpellInfo->SpellIconID == 1930)
{
if (!procSpell)
return SPELL_AURA_PROC_FAILED;
switch(GetFirstSchoolInMask(GetSpellSchoolMask(procSpell)))
{
case SPELL_SCHOOL_NORMAL:
return SPELL_AURA_PROC_FAILED; // ignore
case SPELL_SCHOOL_HOLY: trigger_spell_id = 50490; break;
case SPELL_SCHOOL_FIRE: trigger_spell_id = 50362; break;
case SPELL_SCHOOL_NATURE: trigger_spell_id = 50488; break;
case SPELL_SCHOOL_FROST: trigger_spell_id = 50485; break;
case SPELL_SCHOOL_SHADOW: trigger_spell_id = 50489; break;
case SPELL_SCHOOL_ARCANE: trigger_spell_id = 50486; break;
default:
return SPELL_AURA_PROC_FAILED;
}
}
// Glyph of Death's Embrace
else if (auraSpellInfo->Id == 58677)
{
if (procSpell->Id != 47633)
return SPELL_AURA_PROC_FAILED;
}
//Glyph of Death Grip
if (auraSpellInfo->Id == 62259)
{
//remove cooldown of Death Grip
if (GetTypeId() == TYPEID_PLAYER)
((Player*)this)->RemoveSpellCooldown(49576, true);
return SPELL_AURA_PROC_OK;
}
// Item - Death Knight T10 Melee 4P Bonus
else if (auraSpellInfo->Id == 70656)
{
if (GetTypeId() != TYPEID_PLAYER || getClass() != CLASS_DEATH_KNIGHT)
return SPELL_AURA_PROC_FAILED;
for(uint32 i = 0; i < MAX_RUNES; ++i)
if (((Player*)this)->GetRuneCooldown(i) == 0)
return SPELL_AURA_PROC_FAILED;
}
// Blade Barrier
else if (auraSpellInfo->SpellIconID == 85)
{
if (GetTypeId() != TYPEID_PLAYER || getClass() != CLASS_DEATH_KNIGHT ||
!((Player*)this)->IsBaseRuneSlotsOnCooldown(RUNE_BLOOD))
return SPELL_AURA_PROC_FAILED;
}
// Improved Blood Presence
else if (auraSpellInfo->Id == 63611)
{
if (GetTypeId() != TYPEID_PLAYER || !((Player*)this)->isHonorOrXPTarget(pVictim) || !damage)
return SPELL_AURA_PROC_FAILED;
basepoints[0] = triggerAmount * damage / 100;
trigger_spell_id = 50475;
}
// Glyph of Death's Embrace
else if (auraSpellInfo->Id == 58677)
{
if (procSpell->Id != 47633)
return SPELL_AURA_PROC_FAILED;
}
break;
}
default:
break;
}
// All ok. Check current trigger spell
SpellEntry const* triggerEntry = sSpellStore.LookupEntry(trigger_spell_id);
if (!triggerEntry)
{
// Not cast unknown spell
// sLog.outError("Unit::HandleProcTriggerSpellAuraProc: Spell %u have 0 in EffectTriggered[%d], not handled custom case?",auraSpellInfo->Id,triggeredByAura->GetEffIndex());
return SPELL_AURA_PROC_FAILED;
}
// not allow proc extra attack spell at extra attack
if (m_extraAttacks && IsSpellHaveEffect(triggerEntry, SPELL_EFFECT_ADD_EXTRA_ATTACKS))
return SPELL_AURA_PROC_FAILED;
// Custom basepoints/target for exist spell
// dummy basepoints or other customs
switch(trigger_spell_id)
{
// Cast positive spell on enemy target
case 7099: // Curse of Mending
case 39647: // Curse of Mending
case 29494: // Temptation
case 20233: // Improved Lay on Hands (cast on target)
{
target = pVictim;
break;
}
// Combo points add triggers (need add combopoint only for main target, and after possible combopoints reset)
case 15250: // Rogue Setup
{
if(!pVictim || pVictim != getVictim()) // applied only for main target
return SPELL_AURA_PROC_FAILED;
break; // continue normal case
}
// Finishing moves that add combo points
case 14189: // Seal Fate (Netherblade set)
case 14157: // Ruthlessness
case 70802: // Mayhem (Shadowblade sets)
{
// Need add combopoint AFTER finishing move (or they get dropped in finish phase)
if (Spell* spell = GetCurrentSpell(CURRENT_GENERIC_SPELL))
{
spell->AddTriggeredSpell(trigger_spell_id);
return SPELL_AURA_PROC_OK;
}
return SPELL_AURA_PROC_FAILED;
}
// Bloodthirst (($m/100)% of max health)
case 23880:
{
basepoints[0] = int32(GetMaxHealth() * triggerAmount / 100);
break;
}
// Shamanistic Rage triggered spell
case 30824:
{
basepoints[0] = int32(GetTotalAttackPowerValue(BASE_ATTACK) * triggerAmount / 100);
break;
}
// Enlightenment (trigger only from mana cost spells)
case 35095:
{
if(!procSpell || procSpell->powerType!=POWER_MANA || procSpell->manaCost==0 && procSpell->ManaCostPercentage==0 && procSpell->manaCostPerlevel==0)
return SPELL_AURA_PROC_FAILED;
break;
}
// Demonic Pact
case 48090:
{
// As the spell is proced from pet's attack - find owner
Unit* owner = GetOwner();
if (!owner || owner->GetTypeId() != TYPEID_PLAYER)
return SPELL_AURA_PROC_FAILED;
// This spell doesn't stack, but refreshes duration. So we receive current bonuses to minus them later.
int32 curBonus = 0;
if (Aura* aur = owner->GetAura(48090, EFFECT_INDEX_0))
curBonus = aur->GetModifier()->m_amount;
int32 spellDamage = owner->SpellBaseDamageBonusDone(SPELL_SCHOOL_MASK_MAGIC) - curBonus;
if(spellDamage <= 0)
return SPELL_AURA_PROC_FAILED;
// percent stored in owner talent dummy
AuraList const& dummyAuras = owner->GetAurasByType(SPELL_AURA_DUMMY);
for (AuraList::const_iterator i = dummyAuras.begin(); i != dummyAuras.end(); ++i)
{
if ((*i)->GetSpellProto()->SpellIconID == 3220)
{
basepoints[0] = basepoints[1] = int32(spellDamage * (*i)->GetModifier()->m_amount / 100);
break;
}
}
break;
}
// Sword and Board
case 50227:
{
// Remove cooldown on Shield Slam
if (GetTypeId() == TYPEID_PLAYER)
((Player*)this)->RemoveSpellCategoryCooldown(1209, true);
break;
}
// Maelstrom Weapon
case 53817:
{
// Item - Shaman T10 Enhancement 4P Bonus
// Calculate before roll_chance of ranks
if (Aura * dummy = GetDummyAura(70832))
{
if (SpellAuraHolder *aurHolder = GetSpellAuraHolder(53817))
if ((aurHolder->GetStackAmount() == aurHolder->GetSpellProto()->StackAmount) && roll_chance_i(dummy->GetBasePoints()))
CastSpell(this,70831,true,castItem,triggeredByAura);
}
// have rank dependent proc chance, ignore too often cases
// PPM = 2.5 * (rank of talent),
uint32 rank = sSpellMgr.GetSpellRank(auraSpellInfo->Id);
// 5 rank -> 100% 4 rank -> 80% and etc from full rate
if(!roll_chance_i(20*rank))
return SPELL_AURA_PROC_FAILED;
// Item - Shaman T10 Enhancement 4P Bonus
if (Aura *aur = GetAura(70832, EFFECT_INDEX_0))
{
Aura *maelBuff = GetAura(trigger_spell_id, EFFECT_INDEX_0);
if (maelBuff && maelBuff->GetStackAmount() + 1 == maelBuff->GetSpellProto()->StackAmount)
if (roll_chance_i(aur->GetModifier()->m_amount))
CastSpell(this, 70831, true, NULL, aur);
}
break;
}
// Brain Freeze
case 57761:
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
// For trigger from Blizzard need exist Improved Blizzard
if (procSpell->SpellFamilyName==SPELLFAMILY_MAGE && (procSpell->SpellFamilyFlags & UI64LIT(0x0000000000000080)))
{
bool found = false;
AuraList const& mOverrideClassScript = GetAurasByType(SPELL_AURA_OVERRIDE_CLASS_SCRIPTS);
for(AuraList::const_iterator i = mOverrideClassScript.begin(); i != mOverrideClassScript.end(); ++i)
{
int32 script = (*i)->GetModifier()->m_miscvalue;
if(script==836 || script==988 || script==989)
{
found=true;
break;
}
}
if(!found)
return SPELL_AURA_PROC_FAILED;
}
break;
}
// Astral Shift
case 52179:
{
if (procSpell == 0 || !(procEx & (PROC_EX_NORMAL_HIT|PROC_EX_CRITICAL_HIT)) || this == pVictim)
return SPELL_AURA_PROC_FAILED;
// Need stun, fear or silence mechanic
if (!(GetAllSpellMechanicMask(procSpell) & IMMUNE_TO_SILENCE_AND_STUN_AND_FEAR_MASK))
return SPELL_AURA_PROC_FAILED;
break;
}
// Burning Determination
case 54748:
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
// Need Interrupt or Silenced mechanic
if (!(GetAllSpellMechanicMask(procSpell) & IMMUNE_TO_INTERRUPT_AND_SILENCE_MASK))
return SPELL_AURA_PROC_FAILED;
break;
}
// Lock and Load
case 56453:
{
// Proc only from trap activation (from periodic proc another aura of this spell)
// because some spells have both flags (ON_TRAP_ACTIVATION and ON_PERIODIC), but should only proc ON_PERIODIC!!
if (!(procFlags & PROC_FLAG_ON_TRAP_ACTIVATION) || !procSpell ||
!(procSpell->SchoolMask & SPELL_SCHOOL_MASK_FROST) || !roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
break;
}
// Freezing Fog (Rime triggered)
case 59052:
{
// Howling Blast cooldown reset
if (GetTypeId() == TYPEID_PLAYER)
((Player*)this)->RemoveSpellCategoryCooldown(1248, true);
break;
}
// Druid - Savage Defense
case 62606:
{
basepoints[0] = int32(GetTotalAttackPowerValue(BASE_ATTACK) * triggerAmount / 100);
break;
}
// Hack for Blood mark (ICC Saurfang)
case 72255:
case 72444:
case 72445:
case 72446:
{
float radius = GetSpellRadius(sSpellRadiusStore.LookupEntry(auraSpellInfo->EffectRadiusIndex[EFFECT_INDEX_0]));
Map::PlayerList const& pList = GetMap()->GetPlayers();
for (Map::PlayerList::const_iterator itr = pList.begin(); itr != pList.end(); ++itr)
if (itr->getSource() && itr->getSource()->IsWithinDistInMap(this,radius) && itr->getSource()->HasAura(triggerEntry->targetAuraSpell))
{
target = itr->getSource();
break;
}
break;
}
}
if (cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(trigger_spell_id))
return SPELL_AURA_PROC_FAILED;
// try detect target manually if not set
if (target == NULL)
target = !(procFlags & PROC_FLAG_SUCCESSFUL_POSITIVE_SPELL) && IsPositiveSpell(trigger_spell_id) ? this : pVictim;
// default case
if (!target || (target != this && !target->isAlive()))
return SPELL_AURA_PROC_FAILED;
if (SpellEntry const* triggeredSpellInfo = sSpellStore.LookupEntry(trigger_spell_id))
{
if (basepoints[EFFECT_INDEX_0] || basepoints[EFFECT_INDEX_1] || basepoints[EFFECT_INDEX_2])
CastCustomSpell(target,triggeredSpellInfo,
basepoints[EFFECT_INDEX_0] ? &basepoints[EFFECT_INDEX_0] : NULL,
basepoints[EFFECT_INDEX_1] ? &basepoints[EFFECT_INDEX_1] : NULL,
basepoints[EFFECT_INDEX_2] ? &basepoints[EFFECT_INDEX_2] : NULL,
true, castItem, triggeredByAura);
else
CastSpell(target,triggeredSpellInfo,true,castItem,triggeredByAura);
}
else
{
sLog.outError("HandleProcTriggerSpellAuraProc: unknown spell id %u by caster: %s triggered by aura %u (eff %u)", trigger_spell_id, GetGuidStr().c_str(), triggeredByAura->GetId(), triggeredByAura->GetEffIndex());
return SPELL_AURA_PROC_FAILED;
}
if (cooldown && GetTypeId()==TYPEID_PLAYER)
((Player*)this)->AddSpellCooldown(trigger_spell_id,0,time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleProcTriggerDamageAuraProc(Unit *pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 procFlags, uint32 procEx, uint32 cooldown)
{
SpellEntry const *spellInfo = triggeredByAura->GetSpellProto();
DEBUG_FILTER_LOG(LOG_FILTER_SPELL_CAST, "ProcDamageAndSpell: doing %u damage from spell id %u (triggered by auratype %u of spell %u)",
triggeredByAura->GetModifier()->m_amount, spellInfo->Id, triggeredByAura->GetModifier()->m_auraname, triggeredByAura->GetId());
SpellNonMeleeDamage damageInfo(this, pVictim, spellInfo->Id, SpellSchoolMask(spellInfo->SchoolMask));
CalculateSpellDamage(&damageInfo, triggeredByAura->GetModifier()->m_amount, spellInfo);
damageInfo.target->CalculateAbsorbResistBlock(this, &damageInfo, spellInfo);
DealDamageMods(damageInfo.target,damageInfo.damage,&damageInfo.absorb);
SendSpellNonMeleeDamageLog(&damageInfo);
DealSpellDamage(&damageInfo, true);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleOverrideClassScriptAuraProc(Unit *pVictim, uint32 /*damage*/, Aura *triggeredByAura, SpellEntry const *procSpell, uint32 /*procFlag*/, uint32 /*procEx*/ ,uint32 cooldown)
{
int32 scriptId = triggeredByAura->GetModifier()->m_miscvalue;
if(!pVictim || !pVictim->isAlive())
return SPELL_AURA_PROC_FAILED;
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
// Basepoints of trigger aura
int32 triggerAmount = triggeredByAura->GetModifier()->m_amount;
uint32 triggered_spell_id = 0;
switch(scriptId)
{
case 836: // Improved Blizzard (Rank 1)
{
if (!procSpell || procSpell->SpellVisual[0]!=9487)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 12484;
break;
}
case 988: // Improved Blizzard (Rank 2)
{
if (!procSpell || procSpell->SpellVisual[0]!=9487)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 12485;
break;
}
case 989: // Improved Blizzard (Rank 3)
{
if (!procSpell || procSpell->SpellVisual[0]!=9487)
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 12486;
break;
}
case 4086: // Improved Mend Pet (Rank 1)
case 4087: // Improved Mend Pet (Rank 2)
{
if(!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
triggered_spell_id = 24406;
break;
}
case 4533: // Dreamwalker Raiment 2 pieces bonus
{
// Chance 50%
if (!roll_chance_i(50))
return SPELL_AURA_PROC_FAILED;
switch (pVictim->getPowerType())
{
case POWER_MANA: triggered_spell_id = 28722; break;
case POWER_RAGE: triggered_spell_id = 28723; break;
case POWER_ENERGY: triggered_spell_id = 28724; break;
default:
return SPELL_AURA_PROC_FAILED;
}
break;
}
case 4537: // Dreamwalker Raiment 6 pieces bonus
triggered_spell_id = 28750; // Blessing of the Claw
break;
case 5497: // Improved Mana Gems (Serpent-Coil Braid)
triggered_spell_id = 37445; // Mana Surge
break;
case 6953: // Warbringer
RemoveAurasAtMechanicImmunity(IMMUNE_TO_ROOT_AND_SNARE_MASK,0,true);
return SPELL_AURA_PROC_OK;
case 7010: // Revitalize (rank 1)
case 7011: // Revitalize (rank 2)
case 7012: // Revitalize (rank 3)
{
if(!roll_chance_i(triggerAmount))
return SPELL_AURA_PROC_FAILED;
switch( pVictim->getPowerType() )
{
case POWER_MANA: triggered_spell_id = 48542; break;
case POWER_RAGE: triggered_spell_id = 48541; break;
case POWER_ENERGY: triggered_spell_id = 48540; break;
case POWER_RUNIC_POWER: triggered_spell_id = 48543; break;
default: return SPELL_AURA_PROC_FAILED;
}
break;
}
case 7282: // Crypt Fever & Ebon Plaguebringer
{
if (!procSpell || pVictim == this)
return SPELL_AURA_PROC_FAILED;
bool HasEP = false;
Unit::AuraList const& scriptAuras = GetAurasByType(SPELL_AURA_OVERRIDE_CLASS_SCRIPTS);
for(Unit::AuraList::const_iterator i = scriptAuras.begin(); i != scriptAuras.end(); ++i)
{
if ((*i)->GetSpellProto()->SpellIconID == 1766)
{
HasEP = true;
break;
}
}
if (!HasEP)
switch(triggeredByAura->GetId())
{
case 49032: triggered_spell_id = 50508; break;
case 49631: triggered_spell_id = 50509; break;
case 49632: triggered_spell_id = 50510; break;
default: return SPELL_AURA_PROC_FAILED;
}
else
switch(triggeredByAura->GetId())
{
case 51099: triggered_spell_id = 51726; break;
case 51160: triggered_spell_id = 51734; break;
case 51161: triggered_spell_id = 51735; break;
default: return SPELL_AURA_PROC_FAILED;
}
break;
}
}
// not processed
if(!triggered_spell_id)
return SPELL_AURA_PROC_FAILED;
// standard non-dummy case
SpellEntry const* triggerEntry = sSpellStore.LookupEntry(triggered_spell_id);
if(!triggerEntry)
{
sLog.outError("Unit::HandleOverrideClassScriptAuraProc: Spell %u triggering for class script id %u",triggered_spell_id,scriptId);
return SPELL_AURA_PROC_FAILED;
}
if( cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(triggered_spell_id))
return SPELL_AURA_PROC_FAILED;
CastSpell(pVictim, triggered_spell_id, true, castItem, triggeredByAura);
if( cooldown && GetTypeId()==TYPEID_PLAYER )
((Player*)this)->AddSpellCooldown(triggered_spell_id,0,time(NULL) + cooldown);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleMendingAuraProc( Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const* /*procSpell*/, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/ )
{
// aura can be deleted at casts
SpellEntry const* spellProto = triggeredByAura->GetSpellProto();
SpellEffectIndex effIdx = triggeredByAura->GetEffIndex();
int32 heal = triggeredByAura->GetModifier()->m_amount;
ObjectGuid caster_guid = triggeredByAura->GetCasterGuid();
// jumps
int32 jumps = triggeredByAura->GetHolder()->GetAuraCharges()-1;
// current aura expire
triggeredByAura->GetHolder()->SetAuraCharges(1); // will removed at next charges decrease
// next target selection
if (jumps > 0 && GetTypeId()==TYPEID_PLAYER && caster_guid.IsPlayer())
{
float radius;
if (spellProto->EffectRadiusIndex[effIdx])
radius = GetSpellRadius(sSpellRadiusStore.LookupEntry(spellProto->EffectRadiusIndex[effIdx]));
else
radius = GetSpellMaxRange(sSpellRangeStore.LookupEntry(spellProto->rangeIndex));
if(Player* caster = ((Player*)triggeredByAura->GetCaster()))
{
caster->ApplySpellMod(spellProto->Id, SPELLMOD_RADIUS, radius,NULL);
if(Player* target = ((Player*)this)->GetNextRandomRaidMember(radius))
{
// aura will applied from caster, but spell casted from current aura holder
SpellModifier *mod = new SpellModifier(SPELLMOD_CHARGES,SPELLMOD_FLAT,jumps-5,spellProto->Id,spellProto->SpellFamilyFlags,spellProto->SpellFamilyFlags2);
// remove before apply next (locked against deleted)
triggeredByAura->SetInUse(true);
RemoveAurasByCasterSpell(spellProto->Id,caster->GetGUID());
caster->AddSpellMod(mod, true);
CastCustomSpell(target,spellProto->Id,&heal,NULL,NULL,true,NULL,triggeredByAura,caster->GetGUID());
caster->AddSpellMod(mod, false);
triggeredByAura->SetInUse(false);
}
}
}
// heal
CastCustomSpell(this,33110,&heal,NULL,NULL,true,NULL,NULL,caster_guid);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleModCastingSpeedNotStackAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* /*triggeredByAura*/, SpellEntry const* procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
// Skip melee hits or instant cast spells
return !(procSpell == NULL || GetSpellCastTime(procSpell) == 0) ? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED;
}
SpellAuraProcResult Unit::HandleReflectSpellsSchoolAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const* procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
// Skip Melee hits and spells ws wrong school
return !(procSpell == NULL || (triggeredByAura->GetModifier()->m_miscvalue & procSpell->SchoolMask) == 0) ? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED;
}
SpellAuraProcResult Unit::HandleModPowerCostSchoolAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const* procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
// Skip melee hits and spells ws wrong school or zero cost
return !(procSpell == NULL ||
(procSpell->manaCost == 0 && procSpell->ManaCostPercentage == 0) || // Cost check
(triggeredByAura->GetModifier()->m_miscvalue & procSpell->SchoolMask) == 0) ? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED; // School check
}
SpellAuraProcResult Unit::HandleMechanicImmuneResistanceAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const* procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
// Compare mechanic
return !(procSpell==NULL || int32(procSpell->Mechanic) != triggeredByAura->GetModifier()->m_miscvalue)
? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED;
}
SpellAuraProcResult Unit::HandleModDamageFromCasterAuraProc(Unit* pVictim, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const* /*procSpell*/, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
// Compare casters
return triggeredByAura->GetCasterGuid() == pVictim->GetObjectGuid() ? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED;
}
SpellAuraProcResult Unit::HandleAddFlatModifierAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const * /*procSpell*/, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
SpellEntry const *spellInfo = triggeredByAura->GetSpellProto();
if (spellInfo->Id == 55166) // Tidal Force
{
// Remove only single aura from stack
if (triggeredByAura->GetStackAmount() > 1 && !triggeredByAura->GetHolder()->ModStackAmount(-1))
return SPELL_AURA_PROC_CANT_TRIGGER;
}
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleAddPctModifierAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 /*procFlag*/, uint32 procEx, uint32 /*cooldown*/)
{
SpellEntry const *spellInfo = triggeredByAura->GetSpellProto();
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
switch(spellInfo->SpellFamilyName)
{
case SPELLFAMILY_MAGE:
{
// Combustion
if (spellInfo->Id == 11129)
{
//last charge and crit
if (triggeredByAura->GetHolder()->GetAuraCharges() <= 1 && (procEx & PROC_EX_CRITICAL_HIT) )
return SPELL_AURA_PROC_OK; // charge counting (will removed)
CastSpell(this, 28682, true, castItem, triggeredByAura);
return (procEx & PROC_EX_CRITICAL_HIT) ? SPELL_AURA_PROC_OK : SPELL_AURA_PROC_FAILED; // charge update only at crit hits, no hidden cooldowns
}
break;
}
case SPELLFAMILY_PRIEST:
{
// Serendipity
if (spellInfo->SpellIconID == 2900)
{
RemoveAurasDueToSpell(spellInfo->Id);
return SPELL_AURA_PROC_OK;
}
break;
}
case SPELLFAMILY_PALADIN:
{
// Glyph of Divinity
if (spellInfo->Id == 11129)
{
// Lookup base amount mana restore
for (int i = 0; i < MAX_EFFECT_INDEX; ++i)
{
if (procSpell->Effect[i] == SPELL_EFFECT_ENERGIZE)
{
int32 mana = procSpell->CalculateSimpleValue(SpellEffectIndex(i));
CastCustomSpell(this, 54986, NULL, &mana, NULL, true, castItem, triggeredByAura);
break;
}
}
return SPELL_AURA_PROC_OK;
}
break;
}
}
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleModDamagePercentDoneAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 /*procFlag*/, uint32 procEx, uint32 cooldown)
{
SpellEntry const *spellInfo = triggeredByAura->GetSpellProto();
Item* castItem = !triggeredByAura->GetCastItemGuid().IsEmpty() && GetTypeId()==TYPEID_PLAYER
? ((Player*)this)->GetItemByGuid(triggeredByAura->GetCastItemGuid()) : NULL;
// Aspect of the Viper
if (spellInfo->SpellFamilyName == SPELLFAMILY_HUNTER && spellInfo->SpellFamilyFlags & UI64LIT(0x4000000000000))
{
uint32 maxmana = GetMaxPower(POWER_MANA);
int32 bp = int32(maxmana* GetAttackTime(RANGED_ATTACK)/1000.0f/100.0f);
if(cooldown && GetTypeId()==TYPEID_PLAYER && ((Player*)this)->HasSpellCooldown(34075))
return SPELL_AURA_PROC_FAILED;
CastCustomSpell(this, 34075, &bp, NULL, NULL, true, castItem, triggeredByAura);
}
// Arcane Blast
else if (spellInfo->Id == 36032 && procSpell->SpellFamilyName == SPELLFAMILY_MAGE && procSpell->SpellIconID == 2294)
// prevent proc from self(spell that triggered this aura)
return SPELL_AURA_PROC_FAILED;
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandlePeriodicDummyAuraProc(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
if (!triggeredByAura)
return SPELL_AURA_PROC_FAILED;
SpellEntry const *spellProto = triggeredByAura->GetSpellProto();
if (!spellProto)
return SPELL_AURA_PROC_FAILED;
switch (spellProto->SpellFamilyName)
{
case SPELLFAMILY_DEATHKNIGHT:
{
switch (spellProto->SpellIconID)
{
// Reaping
// Death Rune Mastery
// Blood of the North
case 22:
case 2622:
case 3041:
{
if(!procSpell)
return SPELL_AURA_PROC_FAILED;
if (getClass() != CLASS_DEATH_KNIGHT)
return SPELL_AURA_PROC_FAILED;
Player * plr = GetTypeId() == TYPEID_PLAYER? ((Player*)this) : NULL;
if (!plr)
return SPELL_AURA_PROC_FAILED;
//get spell rune cost
SpellRuneCostEntry const *runeCost = sSpellRuneCostStore.LookupEntry(procSpell->runeCostID);
if (!runeCost)
return SPELL_AURA_PROC_FAILED;
//convert runes to death
for (uint32 i = 0; i < NUM_RUNE_TYPES -1/*don't count death rune*/; ++i)
{
uint32 remainingCost = runeCost->RuneCost[i];
while(remainingCost)
{
int32 convertedRuneCooldown = -1;
uint32 convertedRune = i;
for(uint32 j = 0; j < MAX_RUNES; ++j)
{
// convert only valid runes
if (RuneType(i) != plr->GetCurrentRune(j) &&
RuneType(i) != plr->GetBaseRune(j))
continue;
// select rune with longest cooldown
if (convertedRuneCooldown < plr->GetRuneCooldown(j))
{
convertedRuneCooldown = int32(plr->GetRuneCooldown(j));
convertedRune = j;
}
}
if (convertedRuneCooldown >= 0)
plr->ConvertRune(convertedRune, RUNE_DEATH);
--remainingCost;
}
}
return SPELL_AURA_PROC_OK;
}
default:
break;
}
break;
}
default:
break;
}
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleModRating(Unit* /*pVictim*/, uint32 /*damage*/, Aura* triggeredByAura, SpellEntry const * /*procSpell*/, uint32 /*procFlag*/, uint32 /*procEx*/, uint32 /*cooldown*/)
{
SpellEntry const *spellInfo = triggeredByAura->GetSpellProto();
if (spellInfo->Id == 71564) // Deadly Precision
{
// Remove only single aura from stack
if (triggeredByAura->GetStackAmount() > 1 && !triggeredByAura->GetHolder()->ModStackAmount(-1))
return SPELL_AURA_PROC_CANT_TRIGGER;
}
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleRemoveByDamageProc(Unit* pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 procFlag, uint32 procEx, uint32 cooldown)
{
triggeredByAura->SetInUse(true);
RemoveAurasByCasterSpell(triggeredByAura->GetSpellProto()->Id, triggeredByAura->GetCasterGUID());
triggeredByAura->SetInUse(false);
return SPELL_AURA_PROC_OK;
}
SpellAuraProcResult Unit::HandleRemoveByDamageChanceProc(Unit* pVictim, uint32 damage, Aura* triggeredByAura, SpellEntry const *procSpell, uint32 procFlag, uint32 procEx, uint32 cooldown)
{
// The chance to dispel an aura depends on the damage taken with respect to the casters level.
uint32 max_dmg = getLevel() > 8 ? 25 * getLevel() - 150 : 50;
float chance = float(damage) / max_dmg * 100.0f;
if (roll_chance_f(chance))
return HandleRemoveByDamageProc(pVictim, damage, triggeredByAura, procSpell, procFlag, procEx, cooldown);
return SPELL_AURA_PROC_FAILED;
}<|fim▁end|> | |
<|file_name|>eggfunctions.py<|end_file_name|><|fim▁begin|>"""Some eastereggs just for fun"""
from utils import Filehandler
class EasterEggFunctions(object):
"""Easter Egg functions"""
def __init__(self):
self.fhandler = Filehandler()
<|fim▁hole|> def darkwing(self, channel, callback, msg=None, nck=None, hq=None, keys=None, pb=None):
"""Post a random line"""
filename = "./mylines/darkwing.txt"
myline = self.fhandler.getrandomline(filename)
callback.say(channel, myline)
def balu(self, channel, callback, msg=None, nck=None, hq=None, keys=None, pb=None):
"""Post a random line"""
filename = "./mylines/balu.txt"
myline = self.fhandler.getrandomline(filename)
callback.say(channel, myline)
def raspel(self, channel, callback, msg=None, nck=None, hq=None, keys=None, pb=None):
"""Post url to raspel"""
filename = "./myurls/raspel.url"
url = self.fhandler.getcontent(filename)
callback.say(channel, url)<|fim▁end|> | |
<|file_name|>TestSystemPropertiesInvariantRule.java<|end_file_name|><|fim▁begin|>package org.apache.lucene.util.junitcompat;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Properties;
import org.junit.*;
import org.junit.rules.TestRule;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
/**
* @see SystemPropertiesRestoreRule
* @see SystemPropertiesInvariantRule
*/
public class TestSystemPropertiesInvariantRule extends WithNestedTests {
public static final String PROP_KEY1 = "new-property-1";
public static final String VALUE1 = "new-value-1";
public TestSystemPropertiesInvariantRule() {
super(true);
}
public static class Base extends WithNestedTests.AbstractNestedTest {
public void testEmpty() {}
}
public static class InBeforeClass extends Base {
@BeforeClass
public static void beforeClass() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
public static class InAfterClass extends Base {
@AfterClass
public static void afterClass() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
public static class InTestMethod extends Base {
public void testMethod1() {
if (System.getProperty(PROP_KEY1) != null) {
throw new RuntimeException("Shouldn't be here.");
}
System.setProperty(PROP_KEY1, VALUE1);
}
public void testMethod2() {
testMethod1();
}
}
public static class NonStringProperties extends Base {
public void testMethod1() {
if (System.getProperties().get(PROP_KEY1) != null) {
throw new RuntimeException("Will pass.");
}
Properties properties = System.getProperties();
properties.put(PROP_KEY1, new Object());
Assert.assertTrue(System.getProperties().get(PROP_KEY1) != null);
}
public void testMethod2() {
testMethod1();
}
@AfterClass
public static void cleanup() {
System.getProperties().remove(PROP_KEY1);
}
}
public static class IgnoredProperty {
@Rule
public TestRule invariant = new SystemPropertiesInvariantRule(PROP_KEY1);
@Test
public void testMethod1() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
@Before
@After
public void cleanup() {
System.clearProperty(PROP_KEY1);
}
@Test
public void testRuleInvariantBeforeClass() {
Result runClasses = JUnitCore.runClasses(InBeforeClass.class);
Assert.assertEquals(1, runClasses.getFailureCount());<|fim▁hole|> }
@Test
public void testRuleInvariantAfterClass() {
Result runClasses = JUnitCore.runClasses(InAfterClass.class);
Assert.assertEquals(1, runClasses.getFailureCount());
Assert.assertTrue(runClasses.getFailures().get(0).getMessage()
.contains(PROP_KEY1));
Assert.assertNull(System.getProperty(PROP_KEY1));
}
@Test
public void testRuleInvariantInTestMethod() {
Result runClasses = JUnitCore.runClasses(InTestMethod.class);
Assert.assertEquals(2, runClasses.getFailureCount());
for (Failure f : runClasses.getFailures()) {
Assert.assertTrue(f.getMessage().contains(PROP_KEY1));
}
Assert.assertNull(System.getProperty(PROP_KEY1));
}
@Test
public void testNonStringProperties() {
Result runClasses = JUnitCore.runClasses(NonStringProperties.class);
Assert.assertEquals(1, runClasses.getFailureCount());
Assert.assertTrue(runClasses.getFailures().get(0).getMessage().contains("Will pass"));
Assert.assertEquals(3, runClasses.getRunCount());
}
@Test
public void testIgnoredProperty() {
System.clearProperty(PROP_KEY1);
try {
Result runClasses = JUnitCore.runClasses(IgnoredProperty.class);
Assert.assertEquals(0, runClasses.getFailureCount());
Assert.assertEquals(VALUE1, System.getProperty(PROP_KEY1));
} finally {
System.clearProperty(PROP_KEY1);
}
}
}<|fim▁end|> | Assert.assertTrue(runClasses.getFailures().get(0).getMessage()
.contains(PROP_KEY1));
Assert.assertNull(System.getProperty(PROP_KEY1)); |
<|file_name|>TitleProvider.java<|end_file_name|><|fim▁begin|>package com.nvapp.android.libs.view;
public interface TitleProvider {
/**
* Returns the title of the view at position<|fim▁hole|> *
* @param position
* @return
*/
public String getTitle(int position);
}<|fim▁end|> | |
<|file_name|>MultiValue.cpp<|end_file_name|><|fim▁begin|>/* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Copyright (c) 2014-2020 The plumed team
(see the PEOPLE file at the root of the distribution for a list of names)
See http://www.plumed.org for more information.
This file is part of plumed, version 2.
plumed is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
plumed is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with plumed. If not, see <http://www.gnu.org/licenses/>.
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
#include "MultiValue.h"
namespace PLMD {
MultiValue::MultiValue( const size_t& nvals, const size_t& nder ):
values(nvals),
nderivatives(nder),
derivatives(nvals*nder),
tmpval(0),
tmpder(nder),
atLeastOneSet(false)
{
std::vector<unsigned> myind( nder );
for(unsigned i=0; i<nder; ++i) myind[i]=i;
hasDerivatives.createIndexListFromVector( myind );
}
void MultiValue::resize( const size_t& nvals, const size_t& nder ) {
values.resize(nvals); nderivatives=nder; derivatives.resize( nvals*nder );
tmpder.resize( nder ); hasDerivatives.clear(); std::vector<unsigned> myind( nder );
for(unsigned i=0; i<nder; ++i) myind[i]=i;
hasDerivatives.createIndexListFromVector( myind );
atLeastOneSet=false;
}
void MultiValue::clearAll() {
if( atLeastOneSet && !hasDerivatives.updateComplete() ) hasDerivatives.updateActiveMembers();
for(unsigned i=0; i<values.size(); ++i) clear(i);
clearTemporyDerivatives(); hasDerivatives.deactivateAll(); atLeastOneSet=false;
}
void MultiValue::clear( const unsigned& ival ) {
values[ival]=0;
unsigned base=ival*nderivatives, ndert=hasDerivatives.getNumberActive();
for(unsigned i=0; i<ndert; ++i) derivatives[ base+hasDerivatives[i] ]=0.;
}
void MultiValue::clearTemporyDerivatives() {
unsigned ndert=hasDerivatives.getNumberActive(); tmpval=0.;
for(unsigned i=0; i<ndert; ++i) tmpder[ hasDerivatives[i] ]=0.;
}
void MultiValue::chainRule( const unsigned& ival, const unsigned& iout, const unsigned& stride, const unsigned& off,
const double& df, const unsigned& bufstart, std::vector<double>& buffer ) {
if( !hasDerivatives.updateComplete() ) hasDerivatives.updateActiveMembers();
plumed_dbg_assert( off<stride );
unsigned base=nderivatives*ival, ndert=hasDerivatives.getNumberActive();
unsigned start=bufstart+stride*(nderivatives+1)*iout + stride;
for(unsigned i=0; i<ndert; ++i) {
unsigned jder=hasDerivatives[i];
buffer[start+jder*stride] += df*derivatives[base+jder];
}
}
void MultiValue::copyValues( MultiValue& outvals ) const {
plumed_dbg_assert( values.size()<=outvals.getNumberOfValues() );
for(unsigned i=0; i<values.size(); ++i) outvals.setValue( i, values[i] );
}
void MultiValue::copyDerivatives( MultiValue& outvals ) {
plumed_dbg_assert( values.size()<=outvals.getNumberOfValues() && nderivatives<=outvals.getNumberOfDerivatives() );
if( !hasDerivatives.updateComplete() ) hasDerivatives.updateActiveMembers();
outvals.atLeastOneSet=true; unsigned ndert=hasDerivatives.getNumberActive();
for(unsigned j=0; j<ndert; ++j) {
unsigned jder=hasDerivatives[j]; outvals.hasDerivatives.activate(jder);
}
unsigned base=0, obase=0;
for(unsigned i=0; i<values.size(); ++i) {
for(unsigned j=0; j<ndert; ++j) {
unsigned jder=hasDerivatives[j];
outvals.derivatives[obase+jder] += derivatives[base+jder];
}
obase+=outvals.nderivatives; base+=nderivatives;
}
}
void MultiValue::quotientRule( const unsigned& nder, const unsigned& oder ) {
plumed_dbg_assert( nder<values.size() && oder<values.size() );
if( !hasDerivatives.updateComplete() ) hasDerivatives.updateActiveMembers();
unsigned ndert=hasDerivatives.getNumberActive(); double wpref;
unsigned obase=oder*nderivatives, nbase=nder*nderivatives;
if( fabs(tmpval)>epsilon ) { wpref=1.0/tmpval; }
else { wpref=1.0; }
double pref = values[nder]*wpref*wpref;
for(unsigned j=0; j<ndert; ++j) {
unsigned jder=hasDerivatives[j];
derivatives[obase+jder] = wpref*derivatives[nbase+jder] - pref*tmpder[jder];
}
values[oder] = wpref*values[nder];<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>default_app_config = 'comet.apps.CometIndicatorConfig'<|fim▁end|> | |
<|file_name|>noteGroup.js<|end_file_name|><|fim▁begin|>var pulse = pulse || {};<|fim▁hole|>
model: pulse.Note,
});<|fim▁end|> |
pulse.NoteGroup = Backbone.Collection.extend({ |
<|file_name|>_peer_express_route_circuit_connections_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PeerExpressRouteCircuitConnectionsOperations(object):
"""PeerExpressRouteCircuitConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.PeerExpressRouteCircuitConnection"
"""Gets the specified Peer Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the peer express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PeerExpressRouteCircuitConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01.models.PeerExpressRouteCircuitConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PeerExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/peerConnections/{connectionName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PeerExpressRouteCircuitConnectionListResult"]
"""Gets all global reach peer connections associated with a private peering in an express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PeerExpressRouteCircuitConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_05_01.models.PeerExpressRouteCircuitConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PeerExpressRouteCircuitConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
<|fim▁hole|><|fim▁end|> | return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/peerConnections'} # type: ignore |
<|file_name|>read.rs<|end_file_name|><|fim▁begin|>extern crate rss;
use std::collections::HashMap;
use rss::Channel;
use rss::extension::Extension;
use rss::extension::dublincore::DublinCoreExtension;
fn get_extension_values<'a>(
map: &'a HashMap<String, Vec<Extension>>,
key: &str,
) -> Option<Vec<&'a str>> {
map.get(key)
.map(|v| v.iter().filter_map(|ext| ext.value()).collect::<Vec<_>>())
}
#[test]
fn read_rss090() {
let input = include_str!("data/rss090.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "Mozilla Dot Org");
assert_eq!(channel.link(), "http://www.mozilla.org");
assert_eq!(
channel.description(),
"the Mozilla Organization\n web site"
);
let image = channel.image().unwrap();
assert_eq!(image.title(), "Mozilla");
assert_eq!(image.url(), "http://www.mozilla.org/images/moz.gif");
assert_eq!(image.link(), "http://www.mozilla.org");
assert_eq!(channel.items().len(), 5);
let item = channel.items().get(0).unwrap();
assert_eq!(item.title(), Some("New Status Updates"));
assert_eq!(item.link(), Some("http://www.mozilla.org/status/"));
}
#[test]
fn read_rss091() {
let input = include_str!("data/rss091.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "WriteTheWeb");
assert_eq!(channel.link(), "http://writetheweb.com");
assert_eq!(channel.description(), "News for web users that write back");
assert_eq!(channel.language(), Some("en-us"));
assert_eq!(
channel.copyright(),
Some("Copyright 2000, WriteTheWeb team.")
);
assert_eq!(channel.managing_editor(), Some("[email protected]"));
assert_eq!(channel.webmaster(), Some("[email protected]"));
let image = channel.image().unwrap();
assert_eq!(image.title(), "WriteTheWeb");
assert_eq!(
image.url(),
"http://writetheweb.com/images/mynetscape88.gif"
);
assert_eq!(image.link(), "http://writetheweb.com");
assert_eq!(image.width(), Some("88"));
assert_eq!(image.height(), Some("31"));
assert_eq!(
image.description(),
Some("News for web users that write back")
);
assert_eq!(channel.items().len(), 6);
let item = channel.items().get(0).unwrap();
assert_eq!(item.title(), Some("Giving the world a pluggable Gnutella"));
assert_eq!(item.link(), Some("http://writetheweb.com/read.php?item=24"));
assert_eq!(
item.description(),
Some(
"WorldOS is a framework on which to build programs that work like Freenet or \
Gnutella -allowing distributed applications using peer-to-peer routing.",
)
);
}
#[test]
fn read_rss092() {
let input = include_str!("data/rss092.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "Dave Winer: Grateful Dead");
assert_eq!(
channel.link(),
"http://www.scripting.com/blog/categories/gratefulDead.html"
);
assert_eq!(
channel.description(),
"A high-fidelity Grateful Dead song every day. This is where we're experimenting \
with enclosures on RSS news items that download when you're not using your \
computer. If it works (it will) it will be the end of the Click-And-Wait \
multimedia experience on the Internet."
);
assert_eq!(
channel.last_build_date(),
Some("Fri, 13 Apr 2001 19:23:02 GMT")
);
assert_eq!(channel.docs(), Some("http://backend.userland.com/rss092"));
assert_eq!(
channel.managing_editor(),
Some("[email protected] (Dave Winer)")
);
assert_eq!(channel.webmaster(), Some("[email protected] (Dave Winer)"));
let cloud = channel.cloud().unwrap();
assert_eq!(cloud.domain(), "data.ourfavoritesongs.com");
assert_eq!(cloud.port(), "80");
assert_eq!(cloud.path(), "/RPC2");
assert_eq!(
cloud.register_procedure(),
"ourFavoriteSongs.rssPleaseNotify"
);
assert_eq!(cloud.protocol(), "xml-rpc");
assert_eq!(channel.items().len(), 22);
let item = channel.items().get(0).unwrap();
assert_eq!(
item.description(),
Some(
"It's been a few days since I added a song to the Grateful Dead channel. Now \
that there are all these new Radio users, many of whom are tuned into this \
channel (it's #16 on the hotlist of upstreaming Radio users, there's no way \
of knowing how many non-upstreaming users are subscribing, have to \
do something about this..). Anyway, tonight's song is a live \
version of Weather Report Suite from Dick's Picks Volume 7. It's wistful \
music. Of course a beautiful song, oft-quoted here on Scripting News. <i>A \
little change, the wind and rain.</i>",
)
);
let enclosure = item.enclosure().unwrap();
assert_eq!(
enclosure.url(),
"http://www.scripting.com/mp3s/weatherReportDicksPicsVol7.mp3"
);
assert_eq!(enclosure.length(), "6182912");
assert_eq!(enclosure.mime_type(), "audio/mpeg");
}
#[test]
fn read_rss1() {
let input = include_str!("data/rss1.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "XML.com");
assert_eq!(channel.link(), "http://xml.com/pub");
assert_eq!(
channel.description(),
"XML.com features a rich mix of information and services \n \
for the XML community."
);
let image = channel.image().unwrap();
assert_eq!(image.title(), "XML.com");
assert_eq!(image.url(), "http://xml.com/universal/images/xml_tiny.gif");
assert_eq!(image.link(), "http://www.xml.com");
let text_input = channel.text_input().unwrap();
assert_eq!(text_input.title(), "Search XML.com");
assert_eq!(text_input.description(), "Search XML.com's XML collection");
assert_eq!(text_input.name(), "s");
assert_eq!(text_input.link(), "http://search.xml.com");
assert_eq!(channel.items().len(), 2);
let item = channel.items().get(0).unwrap();
assert_eq!(item.title(), Some("Processing Inclusions with XSLT"));
assert_eq!(
item.link(),
Some("http://xml.com/pub/2000/08/09/xslt/xslt.html")
);
assert_eq!(
item.description(),
Some(
"Processing document inclusions with general XML tools can be \n \
problematic. This article proposes a way of preserving inclusion \n \
information through SAX-based processing.",
)
);
}
#[test]
fn read_channel() {
let input = include_str!("data/channel.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "Title");
assert_eq!(channel.link(), "http://example.com/");
assert_eq!(channel.description(), "Description");
assert_eq!(channel.language(), Some("en-US"));
assert_eq!(channel.managing_editor(), Some("[email protected]"));
assert_eq!(channel.webmaster(), Some("[email protected]"));
assert_eq!(channel.pub_date(), Some("Sat, 27 Aug 2016 00:00:00 GMT"));
assert_eq!(
channel.last_build_date(),
Some("Sat, 27 Aug 2016 09:00:00 GMT")
);
assert_eq!(channel.generator(), Some("Generator"));
assert_eq!(
channel.docs(),
Some("http://blogs.law.harvard.edu/tech/rss")
);
assert_eq!(channel.ttl(), Some("60"));
assert_eq!(channel.skip_hours().get(0).unwrap().as_str(), "6");
assert_eq!(channel.skip_hours().get(1).unwrap().as_str(), "8");
assert_eq!(channel.skip_days().get(0).unwrap().as_str(), "Tuesday");
assert_eq!(channel.skip_days().get(1).unwrap().as_str(), "Thursday");
}
#[test]
fn read_item() {
let input = include_str!("data/item.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.items().get(0).unwrap().title(), Some("Title"));
assert_eq!(
channel.items().get(0).unwrap().link(),
Some("http://example.com/")
);
assert_eq!(
channel.items().get(0).unwrap().description(),
Some("Description")
);
assert_eq!(
channel.items().get(0).unwrap().author(),
Some("[email protected]")
);
assert_eq!(channel.items().get(0).unwrap().comments(), Some("Comments"));
assert_eq!(
channel.items().get(0).unwrap().pub_date(),
Some("Sat, 27 Aug 2016 00:00:00 GMT")
);
}
#[test]
fn read_content() {
let input = include_str!("data/content.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(
channel.items().get(0).unwrap().content(),
Some("An example <a href=\"http://example.com/\">link</a>.")
);
}
#[test]
fn read_source() {
let input = include_str!("data/source.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.source()
.as_ref()
.map(|v| v.url(),),
Some("http://example.com/feed/")
);
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.source()
.as_ref()
.and_then(|v| v.title(),),
Some("Feed")
);
}
#[test]
fn read_guid() {
let input = include_str!("data/guid.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.guid()
.as_ref()
.map(|v| v.is_permalink()),
Some(false)
);
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.guid()
.as_ref()
.map(|v| v.value(),),
Some("abc")
);
assert_eq!(
channel
.items()
.get(1)
.unwrap()
.guid()
.as_ref()
.map(|v| v.is_permalink()),
Some(true)
);
assert_eq!(
channel
.items()
.get(1,)
.unwrap()
.guid()
.as_ref()
.map(|v| v.value(),),
Some("def")
);
}
#[test]
fn read_enclosure() {
let input = include_str!("data/enclosure.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.enclosure()
.as_ref()
.map(|v| v.url(),),
Some("http://example.com/media.mp3")
);
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.enclosure()
.as_ref()
.map(|v| v.length(),),
Some("4992349")
);
assert_eq!(
channel
.items()
.get(0,)
.unwrap()
.enclosure()
.as_ref()
.map(|v| v.mime_type(),),
Some("audio/mpeg")
);
}
#[test]
fn read_category() {
let input = include_str!("data/category.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.categories().get(0).unwrap().domain(), None);
assert_eq!(channel.categories().get(0).unwrap().name(), "Category 1");
assert_eq!(
channel.categories().get(1).unwrap().domain(),
Some("http://example.com/")
);
assert_eq!(channel.categories().get(1).unwrap().name(), "Category 2");
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.categories()
.get(0)
.unwrap()
.domain(),
None
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.categories()
.get(0)
.unwrap()
.name(),
"Category 1"
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.categories()
.get(1)
.unwrap()
.domain(),
Some("http://example.com/")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.categories()
.get(1)
.unwrap()
.name(),
"Category 2"
);
}
#[test]
fn read_image() {
let input = include_str!("data/image.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.image().unwrap().title(), "Title");
assert_eq!(channel.image().unwrap().url(), "http://example.org/url");
assert_eq!(channel.image().unwrap().link(), "http://example.org/link");
assert_eq!(channel.image().unwrap().width(), Some("100"));
assert_eq!(channel.image().unwrap().height(), Some("200"));
assert_eq!(channel.image().unwrap().description(), Some("Description"));
}
#[test]
fn read_mixed_content() {
let input = include_str!("data/mixed_content.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.title(), "Title");
}
#[test]
fn read_cloud() {
let input = include_str!("data/cloud.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
let cloud = channel.cloud().expect("cloud missing");
assert_eq!(cloud.domain(), "example.com");
assert_eq!(cloud.port(), "80");
assert_eq!(cloud.path(), "/rpc");
assert_eq!(cloud.register_procedure(), "notify");
assert_eq!(cloud.protocol(), "xml-rpc");
}
#[test]
fn read_textinput() {
let input = include_str!("data/textinput.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
let text_input = channel.text_input().expect("textinput missing");
assert_eq!(text_input.title(), "Title");
assert_eq!(text_input.name(), "Name");
assert_eq!(text_input.link(), "http://example.com/");
assert_eq!(text_input.description(), "Description");
}
#[test]
fn read_extension() {
let input = include_str!("data/extension.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(
channel.namespaces().get("ext").unwrap(),
"http://example.com/"
);
assert_eq!(channel.namespaces().len(), 1);
assert_eq!(
get_extension_values(
channel
.items()
.get(0)
.unwrap()
.extensions()
.get("ext")
.unwrap(),
"creator",
),
Some(vec!["Creator Name"])
);
assert_eq!(
get_extension_values(
channel<|fim▁hole|> .unwrap()
.extensions()
.get("ext")
.unwrap(),
"contributor",
),
Some(vec!["Contributor 1", "Contributor 2"])
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.extensions()
.get("ext")
.unwrap()
.get("parent")
.map(|v| v.iter()
.find(|v| v.children().contains_key("child"))
.expect("failed to find child elements")
.children()
.get("child")
.unwrap()
.iter()
.map(|v| v.value())
.collect::<Vec<_>>()),
Some(vec![Some("Child 1"), Some("Child 2")])
);
}
#[test]
fn read_itunes() {
let input = include_str!("data/itunes.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
assert_eq!(channel.itunes_ext().unwrap().author(), Some("Author"));
assert_eq!(channel.itunes_ext().unwrap().block(), Some("yes"));
assert_eq!(channel.itunes_ext().unwrap().categories().len(), 2);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.categories()
.get(0)
.unwrap()
.text(),
"Category 1"
);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.categories()
.get(0)
.unwrap()
.subcategory()
.as_ref()
.map(|v| v.text()),
Some("Subcategory")
);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.categories()
.get(1)
.unwrap()
.text(),
"Category 2"
);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.categories()
.get(1)
.unwrap()
.subcategory(),
None
);
assert_eq!(
channel.itunes_ext().unwrap().image(),
Some("http://example.com/image.jpg")
);
assert_eq!(channel.itunes_ext().unwrap().explicit(), Some("no"));
assert_eq!(channel.itunes_ext().unwrap().complete(), Some("yes"));
assert_eq!(
channel.itunes_ext().unwrap().new_feed_url(),
Some("http://example.com/feed/")
);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.owner()
.as_ref()
.and_then(|v| v.name(),),
Some("Name")
);
assert_eq!(
channel
.itunes_ext()
.unwrap()
.owner()
.as_ref()
.and_then(|v| v.email(),),
Some("[email protected]")
);
assert_eq!(channel.itunes_ext().unwrap().subtitle(), Some("Subtitle"));
assert_eq!(channel.itunes_ext().unwrap().summary(), Some("Summary"));
assert_eq!(
channel.itunes_ext().unwrap().keywords(),
Some("key1,key2,key3")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.author(),
Some("Author")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.block(),
Some("yes")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.image(),
Some("http://example.com/image.jpg")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.duration(),
Some("01:22:33")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.explicit(),
Some("yes")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.closed_captioned(),
Some("no")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.order(),
Some("1")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.subtitle(),
Some("Subtitle")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.summary(),
Some("Summary")
);
assert_eq!(
channel
.items()
.get(0)
.unwrap()
.itunes_ext()
.unwrap()
.keywords(),
Some("key1,key2,key3")
);
}
#[test]
fn read_dublincore() {
let input = include_str!("data/dublincore.xml");
let channel = input.parse::<Channel>().expect("failed to parse xml");
fn test_ext(dc: &DublinCoreExtension) {
assert_eq!(
dc.contributors()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Contributor 1", "Contributor 2"]
);
assert_eq!(
dc.coverages()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Coverage"]
);
assert_eq!(
dc.creators().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Creator"]
);
assert_eq!(
dc.dates().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["2016-08-27"]
);
assert_eq!(
dc.descriptions()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Description"]
);
assert_eq!(
dc.formats().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["text/plain"]
);
assert_eq!(
dc.identifiers()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Identifier"]
);
assert_eq!(
dc.languages()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["en-US"]
);
assert_eq!(
dc.publishers()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Publisher"]
);
assert_eq!(
dc.relations()
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>(),
vec!["Relation"]
);
assert_eq!(
dc.rights().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Company"]
);
assert_eq!(
dc.sources().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Source"]
);
assert_eq!(
dc.subjects().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Subject"]
);
assert_eq!(
dc.titles().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Title"]
);
assert_eq!(
dc.types().iter().map(|s| s.as_str()).collect::<Vec<_>>(),
vec!["Type"]
);
}
test_ext(
channel
.dublin_core_ext()
.as_ref()
.expect("dc extension missing"),
);
test_ext(
channel
.items()
.get(0)
.unwrap()
.dublin_core_ext()
.as_ref()
.expect("ds extension missing"),
);
}
#[test]
fn read_escaped() {
let input = r#"
<rss version="2.0">
<channel>
<title>My <feed></title>
</channel>
</rss>
"#;
let channel = input.parse::<Channel>().unwrap();
assert_eq!("My <feed>", channel.title());
let output = channel.to_string();
let parsed_channel = output.parse::<Channel>().unwrap();
assert_eq!(channel, parsed_channel);
}<|fim▁end|> | .items()
.get(0) |
<|file_name|>mspdb.cpp<|end_file_name|><|fim▁begin|>// Convert DMD CodeView debug information to PDB files
// Copyright (c) 2009-2010 by Rainer Schuetze, All Rights Reserved
//
// License for redistribution is given by the Artistic License 2.0
// see file LICENSE for further details
#include "mspdb.h"
#include <windows.h>
#pragma comment(lib, "rpcrt4.lib")
HMODULE modMsPdb;
mspdb::fnPDBOpen2W *pPDBOpen2W;
char* mspdb80_dll = "mspdb80.dll";
<|fim▁hole|>bool getInstallDir(const char* version, char* installDir, DWORD size)
{
char key[260] = "SOFTWARE\\Microsoft\\";
strcat(key, version);
HKEY hkey;
if (RegOpenKeyExA(HKEY_LOCAL_MACHINE, key, 0, KEY_QUERY_VALUE, &hkey) != ERROR_SUCCESS)
return false;
bool rc = RegQueryValueExA(hkey, "InstallDir", 0, 0, (LPBYTE)installDir, &size) == ERROR_SUCCESS;
RegCloseKey(hkey);
return rc;
}
bool tryLoadMsPdb(const char* version, const char* mspdb)
{
char installDir[260];
if (!getInstallDir(version, installDir, sizeof(installDir)))
return false;
char* p = installDir + strlen(installDir);
if (p[-1] != '\\' && p[-1] != '/')
*p++ = '\\';
strcpy(p, mspdb);
modMsPdb = LoadLibraryA(installDir);
return modMsPdb != 0;
}
bool initMsPdb()
{
if (!modMsPdb)
modMsPdb = LoadLibraryA(mspdb80_dll);
if (!modMsPdb)
tryLoadMsPdb("VisualStudio\\9.0", mspdb80_dll);
if (!modMsPdb)
tryLoadMsPdb("VisualStudio\\8.0", mspdb80_dll);
if (!modMsPdb)
tryLoadMsPdb("VCExpress\\9.0", mspdb80_dll);
if (!modMsPdb)
tryLoadMsPdb("VCExpress\\8.0", mspdb80_dll);
#if 1
if (!modMsPdb)
{
modMsPdb = LoadLibraryA(mspdb100_dll);
if (!modMsPdb)
tryLoadMsPdb("VisualStudio\\10.0", mspdb100_dll);
if (!modMsPdb)
tryLoadMsPdb("VCExpress\\10.0", mspdb100_dll);
if (modMsPdb)
mspdb::DBI::isVS10 = true;
}
#endif
if (!modMsPdb)
return false;
if (!pPDBOpen2W)
pPDBOpen2W = (mspdb::fnPDBOpen2W*) GetProcAddress(modMsPdb, "PDBOpen2W");
if (!pPDBOpen2W)
return false;
return true;
}
bool exitMsPdb()
{
pPDBOpen2W = 0;
if (modMsPdb)
FreeLibrary(modMsPdb);
modMsPdb = 0;
return true;
}
mspdb::PDB* CreatePDB(const wchar_t* pdbname)
{
if (!initMsPdb ())
return 0;
mspdb::PDB* pdb = 0;
long data[194] = { 193, 0 };
wchar_t ext[256] = L".exe";
if (!(*pPDBOpen2W) (pdbname, "wf", data, ext, 0x400, &pdb))
return 0;
return pdb;
}<|fim▁end|> | char* mspdb100_dll = "mspdb100.dll";
bool mspdb::DBI::isVS10 = false;
|
<|file_name|>AnsiSqlDomServiceUnitTests.java<|end_file_name|><|fim▁begin|>// Wildebeest Migration Framework
// Copyright © 2013 - 2018, Matheson Ventures Pte Ltd
//
// This file is part of Wildebeest
//
// Wildebeest is free software: you can redistribute it and/or modify it under
// the terms of the GNU General Public License v2 as published by the Free
// Software Foundation.
//
// Wildebeest is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
// A PARTICULAR PURPOSE. See the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// Wildebeest. If not, see http://www.gnu.org/licenses/gpl-2.0.html
package co.mv.wb.plugin.generaldatabase.dom;
import co.mv.wb.Asserts;
import co.mv.wb.InvalidReferenceException;
import co.mv.wb.LoaderFault;
import co.mv.wb.ModelExtensions;
import co.mv.wb.PluginBuildException;
import co.mv.wb.Resource;
import co.mv.wb.fixture.Fixtures;
import co.mv.wb.impl.ResourceTypeServiceBuilder;
import co.mv.wb.plugin.base.dom.DomPlugins;
import co.mv.wb.plugin.base.dom.DomResourceLoader;
import co.mv.wb.plugin.generaldatabase.AnsiSqlCreateDatabaseMigration;
import co.mv.wb.plugin.generaldatabase.AnsiSqlDropDatabaseMigration;
import co.mv.wb.plugin.generaldatabase.AnsiSqlTableDoesNotExistAssertion;
import co.mv.wb.plugin.generaldatabase.AnsiSqlTableExistsAssertion;
import co.mv.wb.plugin.postgresql.PostgreSqlConstants;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.util.Optional;
import java.util.UUID;
/**
* Unit tests for the DOM persistence services for ANSI SQL plugins.
*
* @since 4.0
*/
public class AnsiSqlDomServiceUnitTests
{
@Test
public void ansiSqlCreateDatabaseMigrationLoadFromValidDocument() throws
LoaderFault,
PluginBuildException,
InvalidReferenceException<|fim▁hole|> {
// Setup
UUID migrationId = UUID.randomUUID();
UUID toStateId = UUID.randomUUID();
String xml = Fixtures
.resourceXmlBuilder()
.resource(PostgreSqlConstants.PostgreSqlDatabase.getUri(), UUID.randomUUID(), "Foo")
.migration("AnsiSqlCreateDatabase", migrationId, null, toStateId.toString())
.render();
DomResourceLoader loader = DomPlugins.resourceLoader(
ResourceTypeServiceBuilder
.create()
.withFactoryResourceTypes()
.build(),
xml);
// Execute
Resource resource = loader.load(new File("."));
// Verify
Assert.assertNotNull("resource", resource);
Assert.assertEquals("resource.migrations.size", 1, resource.getMigrations().size());
AnsiSqlCreateDatabaseMigration mT = ModelExtensions.as(
resource.getMigrations().get(0),
AnsiSqlCreateDatabaseMigration.class);
Assert.assertNotNull(
"resourceWithPlugin.resource.migrations[0] expected to be of type AnsiSqlCreateDatabaseMigration",
mT);
Assert.assertEquals(
"resourceWithPlugin.resource.migrations[0].id",
migrationId,
mT.getMigrationId());
Assert.assertEquals(
"resourceWithPlugin.resource.migrations[0].fromStateId",
Optional.empty(),
mT.getFromState());
Assert.assertEquals(
"resourceWithPlugin.resource.migrations[0].toStateId",
Optional.of(toStateId.toString()),
mT.getToState());
}
@Test
public void ansiSqlDropDatabaseMigrationLoadFromValidDocument() throws
LoaderFault,
PluginBuildException,
InvalidReferenceException
{
// Setup
UUID migrationId = UUID.randomUUID();
String toState = UUID.randomUUID().toString();
String xml = Fixtures
.resourceXmlBuilder()
.resource(PostgreSqlConstants.PostgreSqlDatabase.getUri(), UUID.randomUUID(), "Foo")
.migration("AnsiSqlDropDatabase", migrationId, null, toState.toString())
.render();
DomResourceLoader loader = DomPlugins.resourceLoader(
ResourceTypeServiceBuilder
.create()
.withFactoryResourceTypes()
.build(),
xml);
// Execute
Resource resource = loader.load(new File("."));
// Verify
Assert.assertNotNull("resource", resource);
Assert.assertEquals("resource.migrations.size", 1, resource.getMigrations().size());
AnsiSqlDropDatabaseMigration mT = ModelExtensions.as(
resource.getMigrations().get(0),
AnsiSqlDropDatabaseMigration.class);
Assert.assertNotNull("resource.migrations[0] expected to be of type AnsiSqlDropDatabaseMigration", mT);
Assert.assertEquals(
"resource.migrations[0].id",
migrationId,
mT.getMigrationId());
Assert.assertEquals(
"resource.migrations[0].fromState",
Optional.empty(),
mT.getFromState());
Assert.assertEquals(
"resource.migrations[0].toState",
Optional.of(toState),
mT.getToState());
}
@Test
public void ansiSqlTableExistsAssertionLoadFromValidDocument() throws
LoaderFault,
PluginBuildException,
InvalidReferenceException
{
// Setup
UUID assertionId = UUID.randomUUID();
String xml = Fixtures
.resourceXmlBuilder()
.resource(PostgreSqlConstants.PostgreSqlDatabase.getUri(), UUID.randomUUID(), "Foo")
.state(UUID.randomUUID(), null)
.assertion("AnsiSqlTableExists", assertionId)
.appendInnerXml("<schemaName>sch</schemaName>")
.appendInnerXml("<tableName>tbl</tableName>")
.build();
DomResourceLoader loader = DomPlugins.resourceLoader(
ResourceTypeServiceBuilder
.create()
.withFactoryResourceTypes()
.build(),
xml);
// Execute
Resource resource = loader.load(new File("."));
// Verify
Assert.assertNotNull("resource", resource);
Assert.assertEquals("resource.states.size", 1, resource.getStates().size());
Assert.assertEquals(
"resource.states[0].assertions.size",
1,
resource.getStates().get(0).getAssertions().size());
AnsiSqlTableExistsAssertion assertionT = ModelExtensions.as(
resource.getStates().get(0).getAssertions().get(0),
AnsiSqlTableExistsAssertion.class);
Assert.assertNotNull("Expected to be an AnsiSqlTableExistsAssertion", assertionT);
Asserts.assertAnsiSqlTableExistsAssertion(
assertionId,
"sch",
"tbl",
assertionT,
"resource.states[0].assertions[0]");
}
@Test
public void ansiSqlTableDoesNotExistAssertionLoadFromValidDocument() throws
LoaderFault,
PluginBuildException,
InvalidReferenceException
{
// Setup
UUID assertionId = UUID.randomUUID();
String xml = Fixtures
.resourceXmlBuilder()
.resource(PostgreSqlConstants.PostgreSqlDatabase.getUri(), UUID.randomUUID(), "Foo")
.state(UUID.randomUUID(), null)
.assertion("AnsiSqlTableDoesNotExist", assertionId)
.appendInnerXml("<schemaName>sch</schemaName>")
.appendInnerXml("<tableName>tbl</tableName>")
.build();
DomResourceLoader loader = DomPlugins.resourceLoader(
ResourceTypeServiceBuilder
.create()
.withFactoryResourceTypes()
.build(),
xml);
// Execute
Resource resource = loader.load(new File("."));
// Verify
Assert.assertNotNull("resource", resource);
Assert.assertEquals("resource.states.size", 1, resource.getStates().size());
Assert.assertEquals(
"resource.states[0].assertions.size",
1,
resource.getStates().get(0).getAssertions().size());
AnsiSqlTableDoesNotExistAssertion assertionT = ModelExtensions.as(
resource.getStates().get(0).getAssertions().get(0),
AnsiSqlTableDoesNotExistAssertion.class);
Assert.assertNotNull("Expected to be an AnsiSqlTableDoesNotExistAssertion", assertionT);
Asserts.assertAnsiSqlTableDoesNotExistAssertion(
assertionId,
"sch",
"tbl",
assertionT,
"resource.states[0].assertions[0]");
}
}<|fim▁end|> | |
<|file_name|>kinesis.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "kinesis")]
<|fim▁hole|>extern crate rusoto;
use rusoto::kinesis::{KinesisClient, ListStreamsInput};
use rusoto::{DefaultCredentialsProvider, Region};
#[test]
fn should_list_streams() {
let credentials = DefaultCredentialsProvider::new().unwrap();
let client = KinesisClient::new(credentials, Region::UsEast1);
let request = ListStreamsInput::default();
client.list_streams(&request).unwrap();
}<|fim▁end|> | |
<|file_name|>ProvisionerCli.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2015, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.simulator.provisioner;
import com.hazelcast.simulator.common.AgentsFile;
import com.hazelcast.simulator.common.SimulatorProperties;
import com.hazelcast.simulator.utils.Bash;
import com.hazelcast.simulator.utils.jars.HazelcastJARs;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.jclouds.compute.ComputeService;
import static com.hazelcast.simulator.common.SimulatorProperties.PROPERTIES_FILE_NAME;
import static com.hazelcast.simulator.utils.CliUtils.initOptionsWithHelp;
import static com.hazelcast.simulator.utils.CliUtils.printHelpAndExit;
import static com.hazelcast.simulator.utils.CloudProviderUtils.isStatic;
import static com.hazelcast.simulator.utils.SimulatorUtils.loadSimulatorProperties;
import static com.hazelcast.simulator.utils.jars.HazelcastJARs.isPrepareRequired;
import static com.hazelcast.simulator.utils.jars.HazelcastJARs.newInstance;
import static java.util.Collections.singleton;
final class ProvisionerCli {
private final OptionParser parser = new OptionParser();
private final OptionSpec<Integer> scaleSpec = parser.accepts("scale",
"Number of Simulator machines to scale to. If the number of machines already exists, the call is ignored. If the"
+ " desired number of machines is smaller than the actual number of machines, machines are terminated.")
.withRequiredArg().ofType(Integer.class);
private final OptionSpec installSpec = parser.accepts("install",
"Installs Simulator on all provisioned machines.");
private final OptionSpec uploadHazelcastSpec = parser.accepts("uploadHazelcast",
"If defined --install will upload the Hazelcast JARs as well.");
private final OptionSpec<Boolean> enterpriseEnabledSpec = parser.accepts("enterpriseEnabled",
"Use JARs of Hazelcast Enterprise Edition.")
.withRequiredArg().ofType(Boolean.class).defaultsTo(false);
private final OptionSpec listAgentsSpec = parser.accepts("list",
"Lists the provisioned machines (from " + AgentsFile.NAME + " file).");
private final OptionSpec<String> downloadSpec = parser.accepts("download",
"Download all files from the remote Worker directories. Use --clean to delete all Worker directories.")
.withOptionalArg().ofType(String.class).defaultsTo("workers");
private final OptionSpec cleanSpec = parser.accepts("clean",
"Cleans the remote Worker directories on the provisioned machines.");
private final OptionSpec killSpec = parser.accepts("kill",
"Kills the Java processes on all provisioned machines (via killall -9 java).");
private final OptionSpec terminateSpec = parser.accepts("terminate",
"Terminates all provisioned machines.");
private final OptionSpec<String> propertiesFileSpec = parser.accepts("propertiesFile",
"The file containing the Simulator properties. If no file is explicitly configured, first the local working directory"
+ " is checked for a file '" + PROPERTIES_FILE_NAME + "'. All missing properties are always loaded from"
+ " '$SIMULATOR_HOME/conf/" + PROPERTIES_FILE_NAME + "'.")
.withRequiredArg().ofType(String.class);
private ProvisionerCli() {
}
static Provisioner init(String[] args) {
ProvisionerCli cli = new ProvisionerCli();
OptionSet options = initOptionsWithHelp(cli.parser, args);
SimulatorProperties properties = loadSimulatorProperties(options, cli.propertiesFileSpec);
ComputeService computeService = isStatic(properties) ? null : new ComputeServiceBuilder(properties).build();
Bash bash = new Bash(properties);
HazelcastJARs hazelcastJARs = null;
boolean enterpriseEnabled = options.valueOf(cli.enterpriseEnabledSpec);
if (options.has(cli.uploadHazelcastSpec)) {
String hazelcastVersionSpec = properties.getHazelcastVersionSpec();
if (isPrepareRequired(hazelcastVersionSpec) || !enterpriseEnabled) {
hazelcastJARs = newInstance(bash, properties, singleton(hazelcastVersionSpec));
}
}
return new Provisioner(properties, computeService, bash, hazelcastJARs, enterpriseEnabled);
}
static void run(String[] args, Provisioner provisioner) {
ProvisionerCli cli = new ProvisionerCli();
OptionSet options = initOptionsWithHelp(cli.parser, args);
try {
if (options.has(cli.scaleSpec)) {
int size = options.valueOf(cli.scaleSpec);
provisioner.scale(size);
} else if (options.has(cli.installSpec)) {
provisioner.installSimulator();
} else if (options.has(cli.listAgentsSpec)) {
provisioner.listMachines();
} else if (options.has(cli.downloadSpec)) {
String dir = options.valueOf(cli.downloadSpec);
provisioner.download(dir);
} else if (options.has(cli.cleanSpec)) {
provisioner.clean();
} else if (options.has(cli.killSpec)) {
provisioner.killJavaProcesses();
} else if (options.has(cli.terminateSpec)) {<|fim▁hole|> } finally {
provisioner.shutdown();
}
}
}<|fim▁end|> | provisioner.terminate();
} else {
printHelpAndExit(cli.parser);
} |
<|file_name|>orders.server.routes.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function(app) {
var users = require('../../app/controllers/users.server.controller');
var orders = require('../../app/controllers/orders.server.controller');
// Orders Routes
app.route('/orders')
.get(orders.list)
.post(users.requiresLogin, orders.create);
app.route('/orders-analytics')
.get(orders.analytics)
app.route('/orders/:orderId')<|fim▁hole|> .delete(users.requiresLogin, orders.hasAuthorization, orders.delete);
// Finish by binding the Order middleware
app.param('orderId', orders.orderByID);
};<|fim▁end|> | .get(orders.read)
.put(users.requiresLogin, orders.hasAuthorization, orders.update) |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod workbook;
use std::os::raw::c_char;
use std::ffi::CStr;
use std::str;
#[link(name = "xlsreader")]
extern {<|fim▁hole|> let c_string = unsafe { CStr::from_ptr(xls_getVersion()) };
str::from_utf8(c_string.to_bytes()).unwrap().to_string()
}<|fim▁end|> | fn xls_getVersion() -> *const c_char;
}
pub fn version() -> String { |
<|file_name|>view.rs<|end_file_name|><|fim▁begin|>use std::{
boxed::Box,
collections::VecDeque,
os::raw::*,
slice, str,
sync::{Arc, Mutex, Weak},
};
use cocoa::{
appkit::{NSApp, NSEvent, NSEventModifierFlags, NSEventPhase, NSView, NSWindow},
base::{id, nil},
foundation::{NSInteger, NSPoint, NSRect, NSSize, NSString, NSUInteger},
};
use objc::{
declare::ClassDecl,
runtime::{Class, Object, Protocol, Sel, BOOL, NO, YES},
};
use crate::{
dpi::LogicalPosition,
event::{
DeviceEvent, ElementState, Event, KeyboardInput, ModifiersState, MouseButton,
MouseScrollDelta, TouchPhase, VirtualKeyCode, WindowEvent,
},
platform_impl::platform::{
app_state::AppState,
event::{
char_to_keycode, check_function_keys, event_mods, get_scancode, modifier_event,
scancode_to_keycode, EventWrapper,
},
ffi::*,
util::{self, IdRef},
window::get_window_id,
DEVICE_ID,
},
window::WindowId,
};
pub struct CursorState {
pub visible: bool,
pub cursor: util::Cursor,
}
impl Default for CursorState {
fn default() -> Self {
Self {
visible: true,
cursor: Default::default(),
}
}
}
pub(super) struct ViewState {
ns_window: id,
pub cursor_state: Arc<Mutex<CursorState>>,
ime_spot: Option<(f64, f64)>,
raw_characters: Option<String>,
is_key_down: bool,
pub(super) modifiers: ModifiersState,
tracking_rect: Option<NSInteger>,
}
impl ViewState {
fn get_scale_factor(&self) -> f64 {
(unsafe { NSWindow::backingScaleFactor(self.ns_window) }) as f64
}
}
pub fn new_view(ns_window: id) -> (IdRef, Weak<Mutex<CursorState>>) {
let cursor_state = Default::default();
let cursor_access = Arc::downgrade(&cursor_state);
let state = ViewState {
ns_window,
cursor_state,
ime_spot: None,
raw_characters: None,
is_key_down: false,
modifiers: Default::default(),
tracking_rect: None,
};
unsafe {
// This is free'd in `dealloc`
let state_ptr = Box::into_raw(Box::new(state)) as *mut c_void;
let ns_view: id = msg_send![VIEW_CLASS.0, alloc];
(
IdRef::new(msg_send![ns_view, initWithWinit: state_ptr]),
cursor_access,
)
}
}
pub unsafe fn set_ime_position(ns_view: id, input_context: id, x: f64, y: f64) {
let state_ptr: *mut c_void = *(*ns_view).get_mut_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let content_rect =
NSWindow::contentRectForFrameRect_(state.ns_window, NSWindow::frame(state.ns_window));
let base_x = content_rect.origin.x as f64;
let base_y = (content_rect.origin.y + content_rect.size.height) as f64;
state.ime_spot = Some((base_x + x, base_y - y));
let _: () = msg_send![input_context, invalidateCharacterCoordinates];
}
struct ViewClass(*const Class);
unsafe impl Send for ViewClass {}
unsafe impl Sync for ViewClass {}
lazy_static! {
static ref VIEW_CLASS: ViewClass = unsafe {
let superclass = class!(NSView);
let mut decl = ClassDecl::new("WinitView", superclass).unwrap();
decl.add_method(sel!(dealloc), dealloc as extern "C" fn(&Object, Sel));
decl.add_method(
sel!(initWithWinit:),
init_with_winit as extern "C" fn(&Object, Sel, *mut c_void) -> id,
);
decl.add_method(
sel!(viewDidMoveToWindow),
view_did_move_to_window as extern "C" fn(&Object, Sel),
);
decl.add_method(
sel!(drawRect:),
draw_rect as extern "C" fn(&Object, Sel, NSRect),
);
decl.add_method(
sel!(acceptsFirstResponder),
accepts_first_responder as extern "C" fn(&Object, Sel) -> BOOL,
);
decl.add_method(
sel!(touchBar),
touch_bar as extern "C" fn(&Object, Sel) -> BOOL,
);
decl.add_method(
sel!(resetCursorRects),
reset_cursor_rects as extern "C" fn(&Object, Sel),
);
decl.add_method(
sel!(hasMarkedText),
has_marked_text as extern "C" fn(&Object, Sel) -> BOOL,
);
decl.add_method(
sel!(markedRange),
marked_range as extern "C" fn(&Object, Sel) -> NSRange,
);
decl.add_method(
sel!(selectedRange),
selected_range as extern "C" fn(&Object, Sel) -> NSRange,
);
decl.add_method(
sel!(setMarkedText:selectedRange:replacementRange:),
set_marked_text as extern "C" fn(&mut Object, Sel, id, NSRange, NSRange),
);
decl.add_method(sel!(unmarkText), unmark_text as extern "C" fn(&Object, Sel));<|fim▁hole|> );
decl.add_method(
sel!(attributedSubstringForProposedRange:actualRange:),
attributed_substring_for_proposed_range
as extern "C" fn(&Object, Sel, NSRange, *mut c_void) -> id,
);
decl.add_method(
sel!(insertText:replacementRange:),
insert_text as extern "C" fn(&Object, Sel, id, NSRange),
);
decl.add_method(
sel!(characterIndexForPoint:),
character_index_for_point as extern "C" fn(&Object, Sel, NSPoint) -> NSUInteger,
);
decl.add_method(
sel!(firstRectForCharacterRange:actualRange:),
first_rect_for_character_range
as extern "C" fn(&Object, Sel, NSRange, *mut c_void) -> NSRect,
);
decl.add_method(
sel!(doCommandBySelector:),
do_command_by_selector as extern "C" fn(&Object, Sel, Sel),
);
decl.add_method(sel!(keyDown:), key_down as extern "C" fn(&Object, Sel, id));
decl.add_method(sel!(keyUp:), key_up as extern "C" fn(&Object, Sel, id));
decl.add_method(
sel!(flagsChanged:),
flags_changed as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(insertTab:),
insert_tab as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(insertBackTab:),
insert_back_tab as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(mouseDown:),
mouse_down as extern "C" fn(&Object, Sel, id),
);
decl.add_method(sel!(mouseUp:), mouse_up as extern "C" fn(&Object, Sel, id));
decl.add_method(
sel!(rightMouseDown:),
right_mouse_down as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(rightMouseUp:),
right_mouse_up as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(otherMouseDown:),
other_mouse_down as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(otherMouseUp:),
other_mouse_up as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(mouseMoved:),
mouse_moved as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(mouseDragged:),
mouse_dragged as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(rightMouseDragged:),
right_mouse_dragged as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(otherMouseDragged:),
other_mouse_dragged as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(mouseEntered:),
mouse_entered as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(mouseExited:),
mouse_exited as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(scrollWheel:),
scroll_wheel as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(pressureChangeWithEvent:),
pressure_change_with_event as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(_wantsKeyDownForEvent:),
wants_key_down_for_event as extern "C" fn(&Object, Sel, id) -> BOOL,
);
decl.add_method(
sel!(cancelOperation:),
cancel_operation as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(frameDidChange:),
frame_did_change as extern "C" fn(&Object, Sel, id),
);
decl.add_method(
sel!(acceptsFirstMouse:),
accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL,
);
decl.add_ivar::<*mut c_void>("winitState");
decl.add_ivar::<id>("markedText");
let protocol = Protocol::get("NSTextInputClient").unwrap();
decl.add_protocol(&protocol);
ViewClass(decl.register())
};
}
extern "C" fn dealloc(this: &Object, _sel: Sel) {
unsafe {
let state: *mut c_void = *this.get_ivar("winitState");
let marked_text: id = *this.get_ivar("markedText");
let _: () = msg_send![marked_text, release];
Box::from_raw(state as *mut ViewState);
}
}
extern "C" fn init_with_winit(this: &Object, _sel: Sel, state: *mut c_void) -> id {
unsafe {
let this: id = msg_send![this, init];
if this != nil {
(*this).set_ivar("winitState", state);
let marked_text =
<id as NSMutableAttributedString>::init(NSMutableAttributedString::alloc(nil));
(*this).set_ivar("markedText", marked_text);
let _: () = msg_send![this, setPostsFrameChangedNotifications: YES];
let notification_center: &Object =
msg_send![class!(NSNotificationCenter), defaultCenter];
let notification_name =
IdRef::new(NSString::alloc(nil).init_str("NSViewFrameDidChangeNotification"));
let _: () = msg_send![
notification_center,
addObserver: this
selector: sel!(frameDidChange:)
name: notification_name
object: this
];
}
this
}
}
extern "C" fn view_did_move_to_window(this: &Object, _sel: Sel) {
trace!("Triggered `viewDidMoveToWindow`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
if let Some(tracking_rect) = state.tracking_rect.take() {
let _: () = msg_send![this, removeTrackingRect: tracking_rect];
}
let rect: NSRect = msg_send![this, visibleRect];
let tracking_rect: NSInteger = msg_send![this,
addTrackingRect:rect
owner:this
userData:nil
assumeInside:NO
];
state.tracking_rect = Some(tracking_rect);
}
trace!("Completed `viewDidMoveToWindow`");
}
extern "C" fn frame_did_change(this: &Object, _sel: Sel, _event: id) {
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
if let Some(tracking_rect) = state.tracking_rect.take() {
let _: () = msg_send![this, removeTrackingRect: tracking_rect];
}
let rect: NSRect = msg_send![this, visibleRect];
let tracking_rect: NSInteger = msg_send![this,
addTrackingRect:rect
owner:this
userData:nil
assumeInside:NO
];
state.tracking_rect = Some(tracking_rect);
}
}
extern "C" fn draw_rect(this: &Object, _sel: Sel, rect: NSRect) {
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
AppState::handle_redraw(WindowId(get_window_id(state.ns_window)));
let superclass = util::superclass(this);
let () = msg_send![super(this, superclass), drawRect: rect];
}
}
extern "C" fn accepts_first_responder(_this: &Object, _sel: Sel) -> BOOL {
YES
}
// This is necessary to prevent a beefy terminal error on MacBook Pros:
// IMKInputSession [0x7fc573576ff0 presentFunctionRowItemTextInputViewWithEndpoint:completionHandler:] : [self textInputContext]=0x7fc573558e10 *NO* NSRemoteViewController to client, NSError=Error Domain=NSCocoaErrorDomain Code=4099 "The connection from pid 0 was invalidated from this process." UserInfo={NSDebugDescription=The connection from pid 0 was invalidated from this process.}, com.apple.inputmethod.EmojiFunctionRowItem
// TODO: Add an API extension for using `NSTouchBar`
extern "C" fn touch_bar(_this: &Object, _sel: Sel) -> BOOL {
NO
}
extern "C" fn reset_cursor_rects(this: &Object, _sel: Sel) {
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let bounds: NSRect = msg_send![this, bounds];
let cursor_state = state.cursor_state.lock().unwrap();
let cursor = if cursor_state.visible {
cursor_state.cursor.load()
} else {
util::invisible_cursor()
};
let _: () = msg_send![this,
addCursorRect:bounds
cursor:cursor
];
}
}
extern "C" fn has_marked_text(this: &Object, _sel: Sel) -> BOOL {
unsafe {
trace!("Triggered `hasMarkedText`");
let marked_text: id = *this.get_ivar("markedText");
trace!("Completed `hasMarkedText`");
(marked_text.length() > 0) as BOOL
}
}
extern "C" fn marked_range(this: &Object, _sel: Sel) -> NSRange {
unsafe {
trace!("Triggered `markedRange`");
let marked_text: id = *this.get_ivar("markedText");
let length = marked_text.length();
trace!("Completed `markedRange`");
if length > 0 {
NSRange::new(0, length - 1)
} else {
util::EMPTY_RANGE
}
}
}
extern "C" fn selected_range(_this: &Object, _sel: Sel) -> NSRange {
trace!("Triggered `selectedRange`");
trace!("Completed `selectedRange`");
util::EMPTY_RANGE
}
extern "C" fn set_marked_text(
this: &mut Object,
_sel: Sel,
string: id,
_selected_range: NSRange,
_replacement_range: NSRange,
) {
trace!("Triggered `setMarkedText`");
unsafe {
let marked_text_ref: &mut id = this.get_mut_ivar("markedText");
let _: () = msg_send![(*marked_text_ref), release];
let marked_text = NSMutableAttributedString::alloc(nil);
let has_attr = msg_send![string, isKindOfClass: class!(NSAttributedString)];
if has_attr {
marked_text.initWithAttributedString(string);
} else {
marked_text.initWithString(string);
};
*marked_text_ref = marked_text;
}
trace!("Completed `setMarkedText`");
}
extern "C" fn unmark_text(this: &Object, _sel: Sel) {
trace!("Triggered `unmarkText`");
unsafe {
let marked_text: id = *this.get_ivar("markedText");
let mutable_string = marked_text.mutableString();
let _: () = msg_send![mutable_string, setString:""];
let input_context: id = msg_send![this, inputContext];
let _: () = msg_send![input_context, discardMarkedText];
}
trace!("Completed `unmarkText`");
}
extern "C" fn valid_attributes_for_marked_text(_this: &Object, _sel: Sel) -> id {
trace!("Triggered `validAttributesForMarkedText`");
trace!("Completed `validAttributesForMarkedText`");
unsafe { msg_send![class!(NSArray), array] }
}
extern "C" fn attributed_substring_for_proposed_range(
_this: &Object,
_sel: Sel,
_range: NSRange,
_actual_range: *mut c_void, // *mut NSRange
) -> id {
trace!("Triggered `attributedSubstringForProposedRange`");
trace!("Completed `attributedSubstringForProposedRange`");
nil
}
extern "C" fn character_index_for_point(_this: &Object, _sel: Sel, _point: NSPoint) -> NSUInteger {
trace!("Triggered `characterIndexForPoint`");
trace!("Completed `characterIndexForPoint`");
0
}
extern "C" fn first_rect_for_character_range(
this: &Object,
_sel: Sel,
_range: NSRange,
_actual_range: *mut c_void, // *mut NSRange
) -> NSRect {
unsafe {
trace!("Triggered `firstRectForCharacterRange`");
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let (x, y) = state.ime_spot.unwrap_or_else(|| {
let content_rect = NSWindow::contentRectForFrameRect_(
state.ns_window,
NSWindow::frame(state.ns_window),
);
let x = content_rect.origin.x;
let y = util::bottom_left_to_top_left(content_rect);
(x, y)
});
trace!("Completed `firstRectForCharacterRange`");
NSRect::new(NSPoint::new(x as _, y as _), NSSize::new(0.0, 0.0))
}
}
extern "C" fn insert_text(this: &Object, _sel: Sel, string: id, _replacement_range: NSRange) {
trace!("Triggered `insertText`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let has_attr = msg_send![string, isKindOfClass: class!(NSAttributedString)];
let characters = if has_attr {
// This is a *mut NSAttributedString
msg_send![string, string]
} else {
// This is already a *mut NSString
string
};
let slice =
slice::from_raw_parts(characters.UTF8String() as *const c_uchar, characters.len());
let string = str::from_utf8_unchecked(slice);
state.is_key_down = true;
// We don't need this now, but it's here if that changes.
//let event: id = msg_send![NSApp(), currentEvent];
let mut events = VecDeque::with_capacity(characters.len());
for character in string.chars().filter(|c| !is_corporate_character(*c)) {
events.push_back(EventWrapper::StaticEvent(Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::ReceivedCharacter(character),
}));
}
AppState::queue_events(events);
}
trace!("Completed `insertText`");
}
extern "C" fn do_command_by_selector(this: &Object, _sel: Sel, command: Sel) {
trace!("Triggered `doCommandBySelector`");
// Basically, we're sent this message whenever a keyboard event that doesn't generate a "human readable" character
// happens, i.e. newlines, tabs, and Ctrl+C.
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let mut events = VecDeque::with_capacity(1);
if command == sel!(insertNewline:) {
// The `else` condition would emit the same character, but I'm keeping this here both...
// 1) as a reminder for how `doCommandBySelector` works
// 2) to make our use of carriage return explicit
events.push_back(EventWrapper::StaticEvent(Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::ReceivedCharacter('\r'),
}));
} else {
let raw_characters = state.raw_characters.take();
if let Some(raw_characters) = raw_characters {
for character in raw_characters
.chars()
.filter(|c| !is_corporate_character(*c))
{
events.push_back(EventWrapper::StaticEvent(Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::ReceivedCharacter(character),
}));
}
}
};
AppState::queue_events(events);
}
trace!("Completed `doCommandBySelector`");
}
fn get_characters(event: id, ignore_modifiers: bool) -> String {
unsafe {
let characters: id = if ignore_modifiers {
msg_send![event, charactersIgnoringModifiers]
} else {
msg_send![event, characters]
};
assert_ne!(characters, nil);
let slice =
slice::from_raw_parts(characters.UTF8String() as *const c_uchar, characters.len());
let string = str::from_utf8_unchecked(slice);
string.to_owned()
}
}
// As defined in: https://www.unicode.org/Public/MAPPINGS/VENDORS/APPLE/CORPCHAR.TXT
fn is_corporate_character(c: char) -> bool {
match c {
'\u{F700}'..='\u{F747}'
| '\u{F802}'..='\u{F84F}'
| '\u{F850}'
| '\u{F85C}'
| '\u{F85D}'
| '\u{F85F}'
| '\u{F860}'..='\u{F86B}'
| '\u{F870}'..='\u{F8FF}' => true,
_ => false,
}
}
// Retrieves a layout-independent keycode given an event.
fn retrieve_keycode(event: id) -> Option<VirtualKeyCode> {
#[inline]
fn get_code(ev: id, raw: bool) -> Option<VirtualKeyCode> {
let characters = get_characters(ev, raw);
characters.chars().next().and_then(|c| char_to_keycode(c))
}
// Cmd switches Roman letters for Dvorak-QWERTY layout, so we try modified characters first.
// If we don't get a match, then we fall back to unmodified characters.
let code = get_code(event, false).or_else(|| get_code(event, true));
// We've checked all layout related keys, so fall through to scancode.
// Reaching this code means that the key is layout-independent (e.g. Backspace, Return).
//
// We're additionally checking here for F21-F24 keys, since their keycode
// can vary, but we know that they are encoded
// in characters property.
code.or_else(|| {
let scancode = get_scancode(event);
scancode_to_keycode(scancode).or_else(|| check_function_keys(&get_characters(event, true)))
})
}
// Update `state.modifiers` if `event` has something different
fn update_potentially_stale_modifiers(state: &mut ViewState, event: id) {
let event_modifiers = event_mods(event);
if state.modifiers != event_modifiers {
state.modifiers = event_modifiers;
AppState::queue_event(EventWrapper::StaticEvent(Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::ModifiersChanged(state.modifiers),
}));
}
}
extern "C" fn key_down(this: &Object, _sel: Sel, event: id) {
trace!("Triggered `keyDown`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let window_id = WindowId(get_window_id(state.ns_window));
let characters = get_characters(event, false);
state.raw_characters = Some(characters.clone());
let scancode = get_scancode(event) as u32;
let virtual_keycode = retrieve_keycode(event);
let is_repeat = msg_send![event, isARepeat];
update_potentially_stale_modifiers(state, event);
#[allow(deprecated)]
let window_event = Event::WindowEvent {
window_id,
event: WindowEvent::KeyboardInput {
device_id: DEVICE_ID,
input: KeyboardInput {
state: ElementState::Pressed,
scancode,
virtual_keycode,
modifiers: event_mods(event),
},
is_synthetic: false,
},
};
let pass_along = {
AppState::queue_event(EventWrapper::StaticEvent(window_event));
// Emit `ReceivedCharacter` for key repeats
if is_repeat && state.is_key_down {
for character in characters.chars().filter(|c| !is_corporate_character(*c)) {
AppState::queue_event(EventWrapper::StaticEvent(Event::WindowEvent {
window_id,
event: WindowEvent::ReceivedCharacter(character),
}));
}
false
} else {
true
}
};
if pass_along {
// Some keys (and only *some*, with no known reason) don't trigger `insertText`, while others do...
// So, we don't give repeats the opportunity to trigger that, since otherwise our hack will cause some
// keys to generate twice as many characters.
let array: id = msg_send![class!(NSArray), arrayWithObject: event];
let _: () = msg_send![this, interpretKeyEvents: array];
}
}
trace!("Completed `keyDown`");
}
extern "C" fn key_up(this: &Object, _sel: Sel, event: id) {
trace!("Triggered `keyUp`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
state.is_key_down = false;
let scancode = get_scancode(event) as u32;
let virtual_keycode = retrieve_keycode(event);
update_potentially_stale_modifiers(state, event);
#[allow(deprecated)]
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::KeyboardInput {
device_id: DEVICE_ID,
input: KeyboardInput {
state: ElementState::Released,
scancode,
virtual_keycode,
modifiers: event_mods(event),
},
is_synthetic: false,
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
trace!("Completed `keyUp`");
}
extern "C" fn flags_changed(this: &Object, _sel: Sel, event: id) {
trace!("Triggered `flagsChanged`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let mut events = VecDeque::with_capacity(4);
if let Some(window_event) = modifier_event(
event,
NSEventModifierFlags::NSShiftKeyMask,
state.modifiers.shift(),
) {
state.modifiers.toggle(ModifiersState::SHIFT);
events.push_back(window_event);
}
if let Some(window_event) = modifier_event(
event,
NSEventModifierFlags::NSControlKeyMask,
state.modifiers.ctrl(),
) {
state.modifiers.toggle(ModifiersState::CTRL);
events.push_back(window_event);
}
if let Some(window_event) = modifier_event(
event,
NSEventModifierFlags::NSCommandKeyMask,
state.modifiers.logo(),
) {
state.modifiers.toggle(ModifiersState::LOGO);
events.push_back(window_event);
}
if let Some(window_event) = modifier_event(
event,
NSEventModifierFlags::NSAlternateKeyMask,
state.modifiers.alt(),
) {
state.modifiers.toggle(ModifiersState::ALT);
events.push_back(window_event);
}
let window_id = WindowId(get_window_id(state.ns_window));
for event in events {
AppState::queue_event(EventWrapper::StaticEvent(Event::WindowEvent {
window_id,
event,
}));
}
AppState::queue_event(EventWrapper::StaticEvent(Event::WindowEvent {
window_id,
event: WindowEvent::ModifiersChanged(state.modifiers),
}));
}
trace!("Completed `flagsChanged`");
}
extern "C" fn insert_tab(this: &Object, _sel: Sel, _sender: id) {
unsafe {
let window: id = msg_send![this, window];
let first_responder: id = msg_send![window, firstResponder];
let this_ptr = this as *const _ as *mut _;
if first_responder == this_ptr {
let (): _ = msg_send![window, selectNextKeyView: this];
}
}
}
extern "C" fn insert_back_tab(this: &Object, _sel: Sel, _sender: id) {
unsafe {
let window: id = msg_send![this, window];
let first_responder: id = msg_send![window, firstResponder];
let this_ptr = this as *const _ as *mut _;
if first_responder == this_ptr {
let (): _ = msg_send![window, selectPreviousKeyView: this];
}
}
}
// Allows us to receive Cmd-. (the shortcut for closing a dialog)
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=300620#c6
extern "C" fn cancel_operation(this: &Object, _sel: Sel, _sender: id) {
trace!("Triggered `cancelOperation`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let scancode = 0x2f;
let virtual_keycode = scancode_to_keycode(scancode);
debug_assert_eq!(virtual_keycode, Some(VirtualKeyCode::Period));
let event: id = msg_send![NSApp(), currentEvent];
update_potentially_stale_modifiers(state, event);
#[allow(deprecated)]
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::KeyboardInput {
device_id: DEVICE_ID,
input: KeyboardInput {
state: ElementState::Pressed,
scancode: scancode as _,
virtual_keycode,
modifiers: event_mods(event),
},
is_synthetic: false,
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
trace!("Completed `cancelOperation`");
}
fn mouse_click(this: &Object, event: id, button: MouseButton, button_state: ElementState) {
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
update_potentially_stale_modifiers(state, event);
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::MouseInput {
device_id: DEVICE_ID,
state: button_state,
button,
modifiers: event_mods(event),
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
}
extern "C" fn mouse_down(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Left, ElementState::Pressed);
}
extern "C" fn mouse_up(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Left, ElementState::Released);
}
extern "C" fn right_mouse_down(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Right, ElementState::Pressed);
}
extern "C" fn right_mouse_up(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Right, ElementState::Released);
}
extern "C" fn other_mouse_down(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Middle, ElementState::Pressed);
}
extern "C" fn other_mouse_up(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
mouse_click(this, event, MouseButton::Middle, ElementState::Released);
}
fn mouse_motion(this: &Object, event: id) {
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
// We have to do this to have access to the `NSView` trait...
let view: id = this as *const _ as *mut _;
let window_point = event.locationInWindow();
let view_point = view.convertPoint_fromView_(window_point, nil);
let view_rect = NSView::frame(view);
if view_point.x.is_sign_negative()
|| view_point.y.is_sign_negative()
|| view_point.x > view_rect.size.width
|| view_point.y > view_rect.size.height
{
let mouse_buttons_down: NSInteger = msg_send![class!(NSEvent), pressedMouseButtons];
if mouse_buttons_down == 0 {
// Point is outside of the client area (view) and no buttons are pressed
return;
}
}
let x = view_point.x as f64;
let y = view_rect.size.height as f64 - view_point.y as f64;
let logical_position = LogicalPosition::new(x, y);
update_potentially_stale_modifiers(state, event);
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::CursorMoved {
device_id: DEVICE_ID,
position: logical_position.to_physical(state.get_scale_factor()),
modifiers: event_mods(event),
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
}
extern "C" fn mouse_moved(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
}
extern "C" fn mouse_dragged(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
}
extern "C" fn right_mouse_dragged(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
}
extern "C" fn other_mouse_dragged(this: &Object, _sel: Sel, event: id) {
mouse_motion(this, event);
}
extern "C" fn mouse_entered(this: &Object, _sel: Sel, _event: id) {
trace!("Triggered `mouseEntered`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let enter_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::CursorEntered {
device_id: DEVICE_ID,
},
};
AppState::queue_event(EventWrapper::StaticEvent(enter_event));
}
trace!("Completed `mouseEntered`");
}
extern "C" fn mouse_exited(this: &Object, _sel: Sel, _event: id) {
trace!("Triggered `mouseExited`");
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::CursorLeft {
device_id: DEVICE_ID,
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
trace!("Completed `mouseExited`");
}
extern "C" fn scroll_wheel(this: &Object, _sel: Sel, event: id) {
trace!("Triggered `scrollWheel`");
mouse_motion(this, event);
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let delta = {
// macOS horizontal sign convention is the inverse of winit.
let (x, y) = (event.scrollingDeltaX() * -1.0, event.scrollingDeltaY());
if event.hasPreciseScrollingDeltas() == YES {
let delta = LogicalPosition::new(x, y).to_physical(state.get_scale_factor());
MouseScrollDelta::PixelDelta(delta)
} else {
MouseScrollDelta::LineDelta(x as f32, y as f32)
}
};
let phase = match event.phase() {
NSEventPhase::NSEventPhaseMayBegin | NSEventPhase::NSEventPhaseBegan => {
TouchPhase::Started
}
NSEventPhase::NSEventPhaseEnded => TouchPhase::Ended,
_ => TouchPhase::Moved,
};
let device_event = Event::DeviceEvent {
device_id: DEVICE_ID,
event: DeviceEvent::MouseWheel { delta },
};
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
update_potentially_stale_modifiers(state, event);
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::MouseWheel {
device_id: DEVICE_ID,
delta,
phase,
modifiers: event_mods(event),
},
};
AppState::queue_event(EventWrapper::StaticEvent(device_event));
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
trace!("Completed `scrollWheel`");
}
extern "C" fn pressure_change_with_event(this: &Object, _sel: Sel, event: id) {
trace!("Triggered `pressureChangeWithEvent`");
mouse_motion(this, event);
unsafe {
let state_ptr: *mut c_void = *this.get_ivar("winitState");
let state = &mut *(state_ptr as *mut ViewState);
let pressure = event.pressure();
let stage = event.stage();
let window_event = Event::WindowEvent {
window_id: WindowId(get_window_id(state.ns_window)),
event: WindowEvent::TouchpadPressure {
device_id: DEVICE_ID,
pressure,
stage,
},
};
AppState::queue_event(EventWrapper::StaticEvent(window_event));
}
trace!("Completed `pressureChangeWithEvent`");
}
// Allows us to receive Ctrl-Tab and Ctrl-Esc.
// Note that this *doesn't* help with any missing Cmd inputs.
// https://github.com/chromium/chromium/blob/a86a8a6bcfa438fa3ac2eba6f02b3ad1f8e0756f/ui/views/cocoa/bridged_content_view.mm#L816
extern "C" fn wants_key_down_for_event(_this: &Object, _sel: Sel, _event: id) -> BOOL {
YES
}
extern "C" fn accepts_first_mouse(_this: &Object, _sel: Sel, _event: id) -> BOOL {
YES
}<|fim▁end|> | decl.add_method(
sel!(validAttributesForMarkedText),
valid_attributes_for_marked_text as extern "C" fn(&Object, Sel) -> id, |
<|file_name|>parse_args.py<|end_file_name|><|fim▁begin|>"""
atomorder/parse_args.py
Parses command line arguments and overwrites setting defaults
"""
from . import settings
import argparse
import sys
description = ""
epilog = ""
parser = argparse.ArgumentParser(
description = description,
formatter_class = argparse.RawDescriptionHelpFormatter,
epilog = epilog)
parser = argparse.ArgumentParser(description='Fit probability density functions to data-files')
parser.add_argument('-r', '--reactants', help='Reactant structures in a coordinate file format.', action='store', type=str, nargs='+')
parser.add_argument('-p', '--products', help='Product structures in a coordinate file format.', action='store', type=str, nargs='+')<|fim▁hole|> rotate: Ignore bond order, align a single reactant and product molecule and match all atoms\n \
no-bond: Atom matching by rotation and atomic similarity\n \
full: Atom matching by rotation and bond similarity\n \
info: Information about molecule sybyl atom types, bond types and conjugated sub systems',
choices = ['rotate', 'full', 'info', 'no-bond'], action='store', default='full')
parser.add_argument('-o', '--output', help='Given a filename, output the reordered product in xyz format instead of printing to stdout', action='store', type=str, default=sys.stdout)
parser.add_argument('--atomic-sybyl-weight', action='store', default=1, type=float)
parser.add_argument('--bond-weight', action='store', default=1, type=float)
# TODO output to folder
# TODO output atom mapping oneline, save reordered products
# TODO allow possibility to give pickle with reaction object
# TODO output sybyl
# TODO batch reactions
# TODO output aromatic/conjugated subgroups
args = parser.parse_args()
# override setting defaults
settings.update(args)<|fim▁end|> | parser.add_argument('--print-level', help='Print-level - 0: quiet, 1: results and errors, 2: +warnings, 3: +progress, 4: excess, 5: EXTREME',
action='store', choices = range(0,6), default=1, type=int)
parser.add_argument('-f', '--format', help='File format', type=str, action='store', default='guess', choices=["guess","xyz","pdb"])
parser.add_argument('-m', '--method', help='Method to use.\n \ |
<|file_name|>tasks_for_notebook.py<|end_file_name|><|fim▁begin|>import os
import json
import pandas
import numpy
from IPython.display import HTML
from datetime import datetime
import pandas_highcharts.core
title_name = 'Tasks'
file_name = 'tasks.csv'
css_dt_name = '//cdn.datatables.net/1.10.12/css/jquery.dataTables.min.css'
js_dt_name = '//cdn.datatables.net/1.10.12/js/jquery.dataTables.min'
js_hc_name_1 = '//code.highcharts.com/highcharts'
js_hc_name_2 = '//code.highcharts.com/modules/exporting'
def read_task():
if os.path.exists(file_name):
return pandas.DataFrame.from_csv(file_name)
else:
return pandas.DataFrame()
def save_task(data):
pandas.DataFrame(data).to_csv(file_name)
def add_task(name, content):
data = read_task()
df = pandas.DataFrame([{
'name':name,
'content':content,
'status':'new',
'created_at':datetime.now().strftime("%Y/%m/%d %H:%M:%S")
}], columns = ['name', 'content', 'status', 'created_at', 'updated_at'])
data = data.append(df, ignore_index=True)
save_task(data)
def render_task(data):
js = '''
<link rel='stylesheet' type='text/css' href='%s'>
<script>
require.config({
paths: {
dataTables: '%s'
}
});
require(['dataTables'], function(){
$('.dataframe').DataTable();
});
</script>
'''%(css_dt_name, js_dt_name)
return HTML('<h2>%s</h2>'%(title_name) + data.to_html(classes="display") + js)
def show_done_task():
data = read_task()
data = data[data['status'] == 'done']
return render_task(data)
def show_task():<|fim▁hole|>def update_task(id, **kwargs):
data = read_task()
if kwargs.get('name'):
data.loc.__setitem__((slice(id, id), 'name'), kwargs.get('name'))
if kwargs.get('content'):
data.loc.__setitem__((slice(id, id), 'content'), kwargs.get('content'))
if kwargs.get('status'):
data.loc.__setitem__((slice(id, id), 'status'), kwargs.get('status'))
data.loc.__setitem__((slice(id, id), 'updated_at'), datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
save_task(data)
def delete_task(id):
data = read_task()
data = data.drop(id)
save_task(data)
def backup_task():
os.system( "mkdir backup" )
os.system( "cp %s backup/%s_%s"%(file_name, datetime.now().strftime("%Y%m%d%H%M%S"), file_name) )
def render_graph(data):
chart = pandas_highcharts.core.serialize(data, title=title_name, zoom="xy", output_type='dict')
chart['subtitle'] = {"text": "created tasks", "x": -20}
html = HTML('''
<div id="chart1" style="min-width: 400px; height: 400px; margin: 0 auto"></div>
<script>
require.config({
paths: {
highcharts: '%s',
exporting: '%s'
}
});
require(['highcharts','exporting'], function(){
$('#chart1').highcharts(%s);
});
</script>
''' %(js_hc_name_1, js_hc_name_2, json.dumps(chart)))
return html
def graph_task():
data = read_task()
data['datetime'] = pandas.to_datetime(data['created_at'])
data['count'] = data['name'].count()
data = data.groupby([data['datetime'].dt.year, data['datetime'].dt.month, data['datetime'].dt.day])['count'].count()
data = pandas.DataFrame(data)
return render_graph(data)<|fim▁end|> | data = read_task()
data = data[data['status'] != 'done']
return render_task(data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.