repo
string | commit
string | message
string | diff
string |
---|---|---|---|
spencertipping/webtimer
|
00f4d3ce9f3b9caff8a4af592fe925f5309c2943
|
Writing a small timer app for numbered ticket queues
|
diff --git a/timer.html b/timer.html
new file mode 100644
index 0000000..9c8c96a
--- /dev/null
+++ b/timer.html
@@ -0,0 +1,62 @@
+<html>
+ <head>
+ <title>DMV Waiting List Estimator</title>
+ <style>
+ body {font-family: sans-serif}
+ </style>
+ <script src='http://ajax.googleapis.com/libs/ajax/jquery/1.4.2/jquery.min.js'></script>
+
+ <script>
+ var estimate = function () {
+ var hashtag = $('#hashtag').val();
+ var number = Number($('#number').val());
+
+ $('#result').text ('Gathering data and producing estimate...');
+
+ // Use the Twitter API to query for the numbers.
+ $.getJSON ('http://search.twitter.com/search.json?q=' + escape (hashtag), function (results) {
+ var results_within_hour = [];
+ var match = null;
+ for (var i = 0, l = results.results.length; i < l; ++i)
+ if (new Date().getTime() - Date.parse (results.results[i].created_at) < 3600 * 1000 &&
+ (match = /^\s*(\d+)\s+#/.exec (results.results[i].text)))
+ results_within_hour.push ({number: match[1], time: Date.parse (results.results[i].created_at)});
+
+ // Take the minimum and maximum values and find the rate.
+ var first = results_within_hour[0];
+ var middle = results_within_hour[results_within_hour.length >> 1];
+ var last = results_within_hour[results_within_hour.length - 1];
+
+ var dt = (last.time - first.time) / 60000; // In minutes
+ var dn = last.number - first.number;
+
+ var rate = dn / dt;
+ var time = (new Date().getTime() - first.time) / 60000; // Also in minutes
+ var diff = number - first.number;
+
+ var expected = diff / rate * time;
+
+ $('#result').text ('Your expected wait is ' + (expected >>> 0) + ' minutes.');
+ });
+ };
+ </script>
+ </head>
+
+ <body>
+ <h1>Wait Estimator</h1>
+ <p>This application estimates your wait if you are in a numbered queue. To provide data, tweet the current number on a Twitter channel.
+ For example, if the current number at the Boulder DMV office is 904, you would tweet <code>904 #boulderdmv</code>.</p>
+
+ <p>To compute your wait, enter the Twitter hashtag and your number. Based on the tweets that you and others have provided, the amount of time left
+ will be estimated in a few seconds.</p>
+
+ <p>Keep in mind that this estimate is exact; if you are worried about missing your appointment, you should arrive early. It is only an
+ estimate, not a guarantee; so it is possibly inaccurate.</p>
+
+ <table><tr><td>Twitter hashtag (e.g. <code>#boulderdmv</code>): </td><td><input id='hashtag'></input></td></tr>
+ <tr><td>Your number: </td><td><input id='number'></input></td></tr></table>
+ <button onclick='estimate()'>Estimate</button>
+
+ <div id='result'></div>
+ </body>
+</html>
|
gcao/server_extras
|
72931934b2fda6612e7d0ac6ad2f107b6966f77d
|
Make restore.sh executable
|
diff --git a/restore.sh b/restore.sh
old mode 100644
new mode 100755
|
gcao/server_extras
|
1a1b66e10b43aad30d62d7de161738b6361e6bd9
|
backup and restore data script
|
diff --git a/backup.sh b/backup.sh
index e068b23..4562c43 100755
--- a/backup.sh
+++ b/backup.sh
@@ -1,12 +1,13 @@
#!/bin/bash
rm -rf ~/backup
mkdir ~/backup
cd ~/backup
-tar czf mysql-bbs.tgz -C /var/lib/mysql bbs
-tar czf mysql-gocool.tgz -C /var/lib/mysql gocool_production
+
+tar czf mysql-bbs.tgz -C /var/lib/mysql bbs
+tar czf mysql-gocool.tgz -C /var/lib/mysql gocool_production
tar czf mysql-ucenter.tgz -C /var/lib/mysql ucenter
-tar czf bbs-images.tgz -C /data/apps/bbs/current attachments images/avatars/bbsxp
-tar czf gocool-sgfs.tgz -C /data/apps/gocool/shared system
+tar czf bbs-images.tgz -C /data/apps/bbs/current attachments images/avatars/bbsxp
+tar czf gocool-sgfs.tgz -C /data/apps/gocool/shared system
diff --git a/restore.sh b/restore.sh
new file mode 100644
index 0000000..e4269ce
--- /dev/null
+++ b/restore.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+/etc/init.d/apache2 stop
+
+tar xzf ~/backup/mysql-bbs.tgz -C /var/lib/mysql
+tar xzf ~/backup/mysql-ucenter.tgz -C /var/lib/mysql
+tar xzf ~/backup/mysql-gocool.tgz -C /var/lib/mysql
+
+restart mysql
+/etc/init.d/apache2 start
+
+tar xzf ~/backup/bbs-images.tgz -C /data/apps/bbs/current
+tar xzf ~/backup/gocool-sgfs.tgz -C /data/apps/gocool/shared
+
|
gcao/server_extras
|
6a1e56951d80fa368f937f97d82992ae34922516
|
Do not update file timestamp after deployment, change how sgf files are backed up
|
diff --git a/Capfile b/Capfile
index a4a0241..c85a81c 100644
--- a/Capfile
+++ b/Capfile
@@ -1,24 +1,26 @@
# Look here for cleanup/refactoring ideas: http://github.com/leehambley/railsless-deploy/
load 'deploy' if respond_to?(:namespace) # cap2 differentiator
set :application, "server_extras"
set :deploy_to, "/data/apps/extras"
set :scm, :git
set :repository, "git://github.com/gcao/#{application}.git"
+set :normalize_asset_timestamps, false
+
if ENV['DEPLOYMENT_TARGET'] == 'production'
set :user, "root"
set :use_sudo, false
ami_host = `ami_host`.strip
# AMI ami-0d729464: ubuntu 9.04 server base
server ami_host, :all, :primary => true
else
set :user, "vagrant"
set :use_sudo, true
server 'vagrant', :app, :web, :db, :primary => true
end
diff --git a/backup.sh b/backup.sh
index 5a215e7..e068b23 100755
--- a/backup.sh
+++ b/backup.sh
@@ -1,12 +1,12 @@
#!/bin/bash
rm -rf ~/backup
mkdir ~/backup
cd ~/backup
tar czf mysql-bbs.tgz -C /var/lib/mysql bbs
tar czf mysql-gocool.tgz -C /var/lib/mysql gocool_production
tar czf mysql-ucenter.tgz -C /var/lib/mysql ucenter
tar czf bbs-images.tgz -C /data/apps/bbs/current attachments images/avatars/bbsxp
-tar czf gocool-sgfs.tgz -C /data/apps/gocool/current public/system
+tar czf gocool-sgfs.tgz -C /data/apps/gocool/shared system
|
gcao/server_extras
|
89037c92a48c28bd4e07c5ddb8a058fd849ac239
|
Change deployment target dir
|
diff --git a/Capfile b/Capfile
index ed6f8b8..a4a0241 100644
--- a/Capfile
+++ b/Capfile
@@ -1,24 +1,24 @@
# Look here for cleanup/refactoring ideas: http://github.com/leehambley/railsless-deploy/
load 'deploy' if respond_to?(:namespace) # cap2 differentiator
set :application, "server_extras"
-set :deploy_to, "/data/apps/#{application}"
+set :deploy_to, "/data/apps/extras"
set :scm, :git
set :repository, "git://github.com/gcao/#{application}.git"
if ENV['DEPLOYMENT_TARGET'] == 'production'
set :user, "root"
set :use_sudo, false
ami_host = `ami_host`.strip
# AMI ami-0d729464: ubuntu 9.04 server base
server ami_host, :all, :primary => true
else
set :user, "vagrant"
set :use_sudo, true
server 'vagrant', :app, :web, :db, :primary => true
end
|
gcao/server_extras
|
7a18f911c55dd374fd2ad7472abe6a83c864ef7c
|
Add capistrano deployment and backup script
|
diff --git a/Capfile b/Capfile
new file mode 100644
index 0000000..ed6f8b8
--- /dev/null
+++ b/Capfile
@@ -0,0 +1,24 @@
+# Look here for cleanup/refactoring ideas: http://github.com/leehambley/railsless-deploy/
+load 'deploy' if respond_to?(:namespace) # cap2 differentiator
+
+set :application, "server_extras"
+set :deploy_to, "/data/apps/#{application}"
+
+set :scm, :git
+set :repository, "git://github.com/gcao/#{application}.git"
+
+if ENV['DEPLOYMENT_TARGET'] == 'production'
+ set :user, "root"
+ set :use_sudo, false
+
+ ami_host = `ami_host`.strip
+
+ # AMI ami-0d729464: ubuntu 9.04 server base
+ server ami_host, :all, :primary => true
+else
+ set :user, "vagrant"
+ set :use_sudo, true
+
+ server 'vagrant', :app, :web, :db, :primary => true
+end
+
diff --git a/backup.sh b/backup.sh
new file mode 100755
index 0000000..5a215e7
--- /dev/null
+++ b/backup.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+rm -rf ~/backup
+mkdir ~/backup
+cd ~/backup
+tar czf mysql-bbs.tgz -C /var/lib/mysql bbs
+tar czf mysql-gocool.tgz -C /var/lib/mysql gocool_production
+tar czf mysql-ucenter.tgz -C /var/lib/mysql ucenter
+
+tar czf bbs-images.tgz -C /data/apps/bbs/current attachments images/avatars/bbsxp
+tar czf gocool-sgfs.tgz -C /data/apps/gocool/current public/system
+
|
gcao/server_extras
|
3d483d33467818ed4fe61c8f9b4ffc736ecfee1b
|
Notes on restore from amazon s3 backup
|
diff --git a/README.textile b/README.textile
index 61c35dd..5d3f295 100644
--- a/README.textile
+++ b/README.textile
@@ -1,20 +1,34 @@
To restore a instance, follow instructions here
http://docs.amazonwebservices.com/AWSEC2/latest/GettingStartedGuide/creating-an-image.html#registering-the-ami
Run these commands locally
+Connect to S3 through S3Hub and change primary_server_backup/mondy* to be readable by everyone
+ This is optional if I can get the ec2-register command to connect to it with my private key and certificate.
+ See http://developer.amazonwebservices.com/connect/message.jspa?messageID=126461
+ Grant [email protected] read access to primary_server_backup and all its files should be good enough!!
+
$ ec2-register primary_server_backup/monday.manifest.xml
IMAGE ami-xxxxxxxx
-$ ec2-run-instances ami-xxxxxxxx
+
+$ ec2-run-instances -k ec2-keypair ami-xxxxxxxx
+
$ ec2-deregister ami-xxxxxxxx
-Assocaite the elastic IP to this new instance
+$ ec2-describe-instances
+
+Assocaite the elastic IP to this new instance when the new instance is verified thoroughly
+
+$ ec2-associate-address IP -i INSTANCE
+
+Terminate the old instance
+$ ec2-terminate-instances OLD_INSTANCE
IDEA
Create a post in bbs
Cron runs a script every 10 minutes to check content of that post to decide what to do
E.g.: forward a game from TOM, forward a game from another URL
Keep a history to avoid duplicate posts
|
gcao/server_extras
|
cac9d682fee7ce0831ca93760fab6f9259c0a828
|
Misc changes
|
diff --git a/README.textile b/README.textile
index 8e2311d..61c35dd 100644
--- a/README.textile
+++ b/README.textile
@@ -1,12 +1,20 @@
To restore a instance, follow instructions here
http://docs.amazonwebservices.com/AWSEC2/latest/GettingStartedGuide/creating-an-image.html#registering-the-ami
Run these commands locally
$ ec2-register primary_server_backup/monday.manifest.xml
IMAGE ami-xxxxxxxx
$ ec2-run-instances ami-xxxxxxxx
$ ec2-deregister ami-xxxxxxxx
Assocaite the elastic IP to this new instance
+
+
+IDEA
+
+Create a post in bbs
+Cron runs a script every 10 minutes to check content of that post to decide what to do
+E.g.: forward a game from TOM, forward a game from another URL
+Keep a history to avoid duplicate posts
diff --git a/backup.py b/backup.py
index ac7ee4a..556710d 100644
--- a/backup.py
+++ b/backup.py
@@ -1,32 +1,39 @@
#!/usr/bin/python
# http://blog.awarelabs.com/2009/painless-amazon-ec2-backup/
+#
+# To restore, you would have to register it using ec2 tools:
+# ec2-register <your-s3-bucket> /image.manifest.xml
+# and then launch it using this command:
+# ec2-run-instances <ami-identifier>
+#
+# or Amazon AWS Console (http://www.softwarepassion.com/?p=164)
import os
from datetime import date
pem_file = '/root/.amazon/pk.pem'
cert_file = '/root/.amazon/cert.pem'
user_id = '111122223333'
platform = 'i386'
bucket = 'primary_server_backup'
access_key = 'XXX'
secret_key = 'YYY'
ec2_path = '/usr/local/bin/' #use trailing slash
# DO NOT EDIT BELOW THIS
days = ('sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday')
manifest = days[date.today().weekday()]
step_1 = 'rm -f /mnt/%s*' % (manifest,)
step_2 = '%sec2-bundle-vol -p %s -d /mnt -k %s -c %s -u %s -r %s' % (ec2_path, manifest, pem_file, cert_file, user_id, platform)
step_3 = '%sec2-upload-bundle --batch -b %s -m /mnt/%s.manifest.xml -a %s -s %s' % (ec2_path, bucket, manifest, access_key, secret_key)
print step_1
os.system(step_1)
print step_2
os.system(step_2)
print step_3
os.system(step_3)
\ No newline at end of file
diff --git a/bbspost b/bbspost
index b42b261..7204e72 100755
--- a/bbspost
+++ b/bbspost
@@ -1,35 +1,33 @@
#!/usr/bin/env ruby
host = `bbsinfo host`.strip
fid = `bbsinfo fid`.strip
formhash = `bbsformhash`.strip
-username = `bbsinfo username`.strip
-password = `bbsinfo password`.strip
url = "http://#{host}/bbs/post.php"
referer = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5"
post_data = {
"formhash" => formhash,
"action" => "newthread",
"topicsubmit" => "yes",
"fid" => fid,
- "subject" => "Subject",
- "message" => "Message body",
+ "subject" => URI.escape("Subject"),
+ "message" => URI.escape("Message body"),
"extra" => "",
"wysiwyg" => "0",
"iconid" => "",
"sortid" => "0",
"checkbox" => "0",
"tags" => "",
"readperm" => "",
"attention_add" => "1",
"usesig" => "1",
}.map{ |key, value| "#{key}=#{value}" }.join("&")
cmd = %Q(curl -d "#{post_data}" -b /tmp/cookie.txt -o /tmp/cache.html -A "#{user_agent}" -e "#{referer}" "#{url}")
puts cmd
`#{cmd}`
diff --git a/lib/discuz.rb b/lib/discuz.rb
new file mode 100644
index 0000000..d4e81fd
--- /dev/null
+++ b/lib/discuz.rb
@@ -0,0 +1,73 @@
+require 'uri'
+
+def login host, username, password
+ url = "http://#{host}/bbs/logging.php"
+
+ post_data = {
+ "referer" => "index.php",
+ "action" => "login",
+ "loginsubmit" => "yes",
+ "loginfield" => "username",
+ "username" => username,
+ "password" => password,
+ "answer" => "",
+ "cookietime" => "2592000",
+ }.map{ |key, value| "#{key}=#{value}" }.join("&")
+
+ cookie_file = "/tmp/cookie-#{rand(100}.txt"
+ cmd = "curl -d \"#{post_data}\" -D #{cookie_file} -o /tmp/cache.html #{url}"
+ puts cmd
+
+ `#{cmd}`
+ puts cookie_file
+ cookie_file
+end
+
+def get_form_hash host, fid, cookie_file
+ url = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+ referer = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+ user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5"
+
+ cmd = %Q(curl -b #{cookie_file} -o /tmp/cache.html -A "#{user_agent}" -e "#{referer}" "#{url}")
+ `#{cmd}`
+
+ File.open("/tmp/cache.html") do |file|
+ file.each do |line|
+ next unless line.index('name="formhash"')
+ line =~ /value="([\w\d]+)"/
+ return $1
+ end
+ end
+end
+
+def post host, fid, subject, body, cookie_file
+ formhash = get_form_hash host, fid, cookie_file
+
+ url = "http://#{host}/bbs/post.php"
+ referer = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+
+ user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5"
+
+ post_data = {
+ "formhash" => formhash,
+ "action" => "newthread",
+ "topicsubmit" => "yes",
+ "fid" => fid,
+ "subject" => URI.escape(subject),
+ "message" => URI.escape(body),
+ "extra" => "",
+ "wysiwyg" => "0",
+ "iconid" => "",
+ "sortid" => "0",
+ "checkbox" => "0",
+ "tags" => "",
+ "readperm" => "",
+ "attention_add" => "1",
+ "usesig" => "1",
+ }.map{ |key, value| "#{key}=#{value}" }.join("&")
+
+ cmd = %Q(curl -d "#{post_data}" -b #{cookie_file} -o /tmp/cache.html -A "#{user_agent}" -e "#{referer}" "#{url}")
+ puts cmd
+
+ `#{cmd}`
+end
|
gcao/server_extras
|
40bd9036e68ffe2bd46d3872d656d40ce3b817ac
|
Scripts to post to forum
|
diff --git a/README.textile b/README.textile
new file mode 100644
index 0000000..8e2311d
--- /dev/null
+++ b/README.textile
@@ -0,0 +1,12 @@
+To restore a instance, follow instructions here
+
+http://docs.amazonwebservices.com/AWSEC2/latest/GettingStartedGuide/creating-an-image.html#registering-the-ami
+
+Run these commands locally
+
+$ ec2-register primary_server_backup/monday.manifest.xml
+IMAGE ami-xxxxxxxx
+$ ec2-run-instances ami-xxxxxxxx
+$ ec2-deregister ami-xxxxxxxx
+
+Assocaite the elastic IP to this new instance
diff --git a/bbsformhash b/bbsformhash
new file mode 100755
index 0000000..1a7cefd
--- /dev/null
+++ b/bbsformhash
@@ -0,0 +1,22 @@
+#!/usr/bin/env ruby
+
+host = `bbsinfo host`.strip
+fid = `bbsinfo fid`.strip
+
+url = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+referer = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5"
+
+cmd = %Q(curl -b /tmp/cookie.txt -o /tmp/cache.html -A "#{user_agent}" -e "#{referer}" "#{url}")
+`#{cmd}`
+
+File.open("/tmp/cache.html") do |file|
+ file.each do |line|
+ next unless line.index('name="formhash"')
+ line =~ /value="([\w\d]+)"/
+ print $1
+ exit 0
+ end
+end
+
+exit 1
diff --git a/bbsinfo b/bbsinfo
new file mode 100755
index 0000000..6ac65fc
--- /dev/null
+++ b/bbsinfo
@@ -0,0 +1,12 @@
+#!/usr/bin/env ruby
+
+exit 1 if ARGV.length < 1
+
+answer = {
+ "host" => "localhost",
+ "fid" => "18",
+ "username" => "xxx",
+ "password" => "xxx",
+}[ARGV[0]]
+
+print answer
diff --git a/bbslogin b/bbslogin
new file mode 100755
index 0000000..5db7c7a
--- /dev/null
+++ b/bbslogin
@@ -0,0 +1,23 @@
+#!/usr/bin/env ruby
+
+host = `bbsinfo host`.strip
+username = `bbsinfo username`.strip
+password = `bbsinfo password`.strip
+
+url = "http://#{host}/bbs/logging.php"
+
+post_data = {
+ "referer" => "index.php",
+ "action" => "login",
+ "loginsubmit" => "yes",
+ "loginfield" => "username",
+ "username" => username,
+ "password" => password,
+ "answer" => "",
+ "cookietime" => "2592000",
+}.map{ |key, value| "#{key}=#{value}" }.join("&")
+
+cmd = "curl -d \"#{post_data}\" -D /tmp/cookie.txt -o /tmp/cache.html #{url}"
+puts cmd
+
+`#{cmd}`
diff --git a/bbspost b/bbspost
new file mode 100755
index 0000000..b42b261
--- /dev/null
+++ b/bbspost
@@ -0,0 +1,35 @@
+#!/usr/bin/env ruby
+
+host = `bbsinfo host`.strip
+fid = `bbsinfo fid`.strip
+formhash = `bbsformhash`.strip
+username = `bbsinfo username`.strip
+password = `bbsinfo password`.strip
+
+url = "http://#{host}/bbs/post.php"
+referer = "http://#{host}/bbs/forumdisplay.php?fid=#{fid}"
+
+user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5"
+
+post_data = {
+ "formhash" => formhash,
+ "action" => "newthread",
+ "topicsubmit" => "yes",
+ "fid" => fid,
+ "subject" => "Subject",
+ "message" => "Message body",
+ "extra" => "",
+ "wysiwyg" => "0",
+ "iconid" => "",
+ "sortid" => "0",
+ "checkbox" => "0",
+ "tags" => "",
+ "readperm" => "",
+ "attention_add" => "1",
+ "usesig" => "1",
+}.map{ |key, value| "#{key}=#{value}" }.join("&")
+
+cmd = %Q(curl -d "#{post_data}" -b /tmp/cookie.txt -o /tmp/cache.html -A "#{user_agent}" -e "#{referer}" "#{url}")
+puts cmd
+
+`#{cmd}`
|
godfat/dm-is-reflective
|
1b0ea98385758e45cf1fc5f4ad1ea2031295cf37
|
Fix license identifier
|
diff --git a/README.md b/README.md
index 0bd0ce3..d3d61b1 100644
--- a/README.md
+++ b/README.md
@@ -1,163 +1,163 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
### Generating sources from a DATABASE_URI
We also have an executable to generate sources for you.
```
Usage: dm-is-reflective DATABASE_URI
-s, --scope SCOPE SCOPE where the models should go (default: Object)
-o, --output DIRECTORY DIRECTORY where the output goes (default: dm-is-reflective)
-h, --help Print this message
-v, --version Print the version
```
### API
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# show all indices
dm.indices('users') # [:id, {:unique_index => :users_pkey}]
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
# you can also generate the source from models:
puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
* Mischa Molhoek (@mmolhoek)
* @philfine
* Sebastian Marr (@sebastianmarr)
## LICENSE:
-Apache License 2.0
+Apache License 2.0 (Apache-2.0)
Copyright (c) 2008-2017, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
godfat/dm-is-reflective
|
13527752e2603de7f1f996c7d8778ef58afb9863
|
Prepare 1.3.2
|
diff --git a/CHANGES.md b/CHANGES.md
index a40cbc5..e5e2610 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,253 +1,263 @@
# CHANGES
+## dm-is-reflective 1.3.2 -- 2017-12-29
+
+* Prefix an underscore whenever the property begins with a number, which
+ cannot be used as a method name. See:
+ [#9](https://github.com/godfat/dm-is-reflective/pull/9)
+ Thanks Mischa Molhoek (@mmolhoek)
+* Don't create the model for PostgreSQL views. See:
+ [#10](https://github.com/godfat/dm-is-reflective/pull/10)
+ Thanks @philfine
+
## dm-is-reflective 1.3.1, 2013-05-22
* Introduce `indices` method which would return all indices in the storage.
## dm-is-reflective 1.3.0, 2013-05-20
* Warn instead of raising a TypeError if a datatype cannot be found.
We fallback to use String.
* Now it works for multiple composite keys.
* If there's no key defined, it would pick the first unique index as the key.
* If a field name is conflicted, it would try to resolve it by appending a
underscore to the field name.
## dm-is-reflective 1.2.0, 2013-05-14
* We got a bunch of internal renaming.
* Added DataMapper::Resource#to_source.
* Added an executable which generates sources for you.
* Fixed MySQL issues with setting up with a hash rather than URI.
* Fixed SQLite issues without loading dm-migrations.
## dm-is-reflective 1.1.0, 2013-01-11
* The need for dm-migrations is now removed.
* Added a few more datatypes. Thanks @onewheelskyward
* Tested against dm-core 1.2.0.
## dm-is-reflective 1.0.1, 2012-05-16
* allow_nil is more close to db's semantics, not required. Thanks miaout17.
`:allow_nil` allows empty value, but `:required` does not. So here we
always use `:allow_nil` to match db's semantics.
## dm-is-reflective 1.0.0, 2011-06-16
* updated against dm-core 1.1.0
## dm-is-reflective 0.9.0, 2010-07-05
* adapted to dm-core 1.0.0
* reanmed AbstractAdapter to DataObjectsAdapter
## dm-is-reflective 0.8.0, 2009-09-16
* require dm-core 0.10.0 and above now
* Serial would map to Serial not Integer now
* no more type_map now
* no more Extlib::Hook to load adapter
## dm-mapping 0.7.1, never released as a gem
don't open module Migration and edit it, instead, use include, more see:
* added DataMapper::Mapping::AbstractAdapter
* added DataMapper::Mapping::Sqlite3Adapter
* added DataMapper::Mapping::MysqlAdapter
* added DataMapper::Mapping::PostgresAdapter
* each adapter was included in related adapter in DataMapper.
* Model#fields now accept repository name as argument
there's differences between adapters,
Sqlite3 added default => 'UL' in Boolean type,
Mysql can't tell whether it's a Boolean or Tinyint,
and Postgres is fine. see test/abstract.rb: super_user_fields for detail.
## dm-mapping 0.7.0, 2008-09-01
* feature added
- added postgres support.
* bug fixed
- fixed key mapping in mysql adapter. PRI and MUL are all keys.
- use DM::Text.size as default text size in sqlite3.
## dm-mapping 0.6.2, 2008-08-30
* mapping more data types for mysql.
* don't map TINYINT to TrueClass with mysql, skip it in type_map.
## dm-mapping 0.6.1, 2008-08-22
* gem 'dm-core', '>=0.9.3' instead of '=0.9.3'
## dm-mapping 0.6.0, 2008-08-16
* mapping returns an array of properties indicating fields it mapped.
* performance boosted by refactored mapping implementation.
* changed the way using auto_genclass!, now accepts args like mapping!
* changed fields to return field name with Symbol instead of String.
this would make it be more consistent with DataMapper.
* storage names remain String.
* added more mysql data type to map
* use Extlib::Hook to setup dm-mapping instead of stupid alias_method.
* removed ensure_require in model. always setup DataMapper before define model.
## dm-mapping 0.5.0, 2008-08-14
* feature added
- added mysql support.
- reflect size 65535 in TEXT for sqlite3.
* bug fixed
- reflect VARCHAR(size) instead of default size in sqlite3.
* misc
- renamed sqlite3adapter to sqlite3_adapter.
## dm-mapping 0.4.1, 2008-08-14
* removed type hack, replaced with rejecting special type to lookup.
## dm-mapping 0.4.0, 2008-08-04
* added Migration#auto_genclass!.
* updated README.
* added more rdoc.
## dm-mapping 0.3.0, 2008-08-04
* added support of mapping Integer, DateTime, etc.
* renamed some internals.
* changed the way requiring adapter. no more setup first.
* added Migration#storages_and_fields
* added mapping :serial => true for primary key.
* added mapping :default, and :nullable.
* added support of mapping name. (through passing symbol or string)
* added support of multiple arguments.
* removed Mapping::All, use /.*/ instead.
## dm-mapping 0.2.1, 2008-08-03
* fixed a bug that type map should lookup for parent.
* fixed a bug that sql type could be lower case.
fixed by calling upcase.
## dm-mapping 0.2.0, 2008-08-02
* added Sqlite3Adapter::Migration#fields
* added DataMapper::Model#mapping
* added DataMapper::Model#fields
* added DataMapper::TypeMap#find_primitive for reversed lookup.
mapping SQL type back to Ruby type.
* added corresponded test.
## dm-mapping 0.1.0, 2008-07-27
* birthday!
* added DataMapper.repository.storages for sqlite3.
* please refer:
<http://groups.google.com/group/datamapper/browse_thread/thread/b9ca41120c5c9389>
original message:
from Lin Jen-Shin
to DataMapper
cc godfat
date Sun, Jul 27, 2008 at 5:40 PM
subject Manipulate an existing database.
mailed-by gmail.com
Greetings,
DataMapper looks very promising for me, so I am thinking of
using it in the near future. I hate separate my domain objects into
two parts in Rails, writing migration and switching to ActiveRecord,
vice versa, is very annoying to me.
But there's a very convenient feature to me in ActiveRecord,
that is ActiveRecord automatically mapping all fields in a table.
It makes me easily control an existing database without any domain object.
For example,
require 'active_record'
ActiveRecord::Base.establish_connection(
:adapter => 'sqlite3',
:database => 'db/development.sqlite3'
)
clsas User < ActiveRecord::Base
end
User.find 1
=> #<User id: 1, account: "admin", created_at: "2008-05-18 20:08:37", etc.>
Some people would use database admin such as phpMyAdmin to
accomplish this kind of task, but I prefer anything in Ruby,
calling Ruby function, manipulating data without SQL and
any domain object. (i.e. I didn't have to load up entire environment.)
In DataMapper, I didn't find an easy way to accomplish this.
I am sorry if there's one but I didn't find it, please point out,
many thanks. In short, I would like to do this in DataMapper:
class User
include DataMapper::Resource
mapping :account, :created_at
end
or
class User
include DataMapper::Resource
mapping All
end
class User
include DataMapper::ResourceAll
end
or
class User
include DataMapper::Resource
mapping *storage_fields
end
The above User.storage_fields should return an Array,
telling all the fields in the table, e.g. [:account, :created_at, :etc]
or a Hash includes data type, e.g. {:account => String,
:created_at => DateTime}
then mapping *storage_fields should change to:
mapping *storage_fields.each_key.to_a
If it's possible, a feature returning the database schema as well:
DataMapper.repository.storages
# => [:users, :posts, :etc]
DataMapper.repository.storages_and_fields
# => {:users => {:account => String},
:posts => {:title => String, :content => Text}}
or returning DataObject::Field, DataObject::Storage, etc.
DataMapper.repository.storage
# => [#<DataObject::Storage @name='users' @fields=
[#<DataObject::Field @name='account' @type=String>]>]
If you feel this kind of feature is indeed needed or not bad for
adding it, I could try to provide a patch for it. Though I didn't
read the source code deeply, not knowning it's easy or not.
sincerely,
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index 8e5db4d..189324e 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,79 +1,79 @@
# -*- encoding: utf-8 -*-
-# stub: dm-is-reflective 1.3.1 ruby lib
+# stub: dm-is-reflective 1.3.2 ruby lib
Gem::Specification.new do |s|
s.name = "dm-is-reflective".freeze
- s.version = "1.3.1"
+ s.version = "1.3.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Lin Jen-Shin (godfat)".freeze]
s.date = "2017-12-29"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties.".freeze
s.email = ["godfat (XD) godfat.org".freeze]
s.executables = ["dm-is-reflective".freeze]
s.files = [
".gitignore".freeze,
".gitmodules".freeze,
".travis.yml".freeze,
"CHANGES.md".freeze,
"Gemfile".freeze,
"LICENSE".freeze,
"README.md".freeze,
"Rakefile".freeze,
"TODO.md".freeze,
"bin/dm-is-reflective".freeze,
"dm-is-reflective.gemspec".freeze,
"lib/dm-is-reflective.rb".freeze,
"lib/dm-is-reflective/adapters/data_objects_adapter.rb".freeze,
"lib/dm-is-reflective/adapters/mysql_adapter.rb".freeze,
"lib/dm-is-reflective/adapters/postgres_adapter.rb".freeze,
"lib/dm-is-reflective/adapters/sqlite_adapter.rb".freeze,
"lib/dm-is-reflective/reflective.rb".freeze,
"lib/dm-is-reflective/runner.rb".freeze,
"lib/dm-is-reflective/test.rb".freeze,
"lib/dm-is-reflective/version.rb".freeze,
"task/README.md".freeze,
"task/gemgem.rb".freeze,
"test/setup_db.sh".freeze,
"test/test_mysql.rb".freeze,
"test/test_postgres.rb".freeze,
"test/test_sqlite.rb".freeze,
"test/test_to_source.rb".freeze]
s.homepage = "https://github.com/godfat/dm-is-reflective".freeze
s.licenses = ["Apache License 2.0".freeze]
s.rubygems_version = "2.7.3".freeze
s.summary = "DataMapper plugin that helps you manipulate an existing database.".freeze
s.test_files = [
"test/test_mysql.rb".freeze,
"test/test_postgres.rb".freeze,
"test/test_sqlite.rb".freeze,
"test/test_to_source.rb".freeze]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>.freeze, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
s.add_development_dependency(%q<dm-migrations>.freeze, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
else
s.add_dependency(%q<dm-core>.freeze, [">= 0"])
s.add_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-migrations>.freeze, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
end
else
s.add_dependency(%q<dm-core>.freeze, [">= 0"])
s.add_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-migrations>.freeze, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
end
end
|
godfat/dm-is-reflective
|
c722c63f7570be049eeef3b23c7f1c6c4c19a472
|
Add another contributor
|
diff --git a/README.md b/README.md
index 474ec35..0bd0ce3 100644
--- a/README.md
+++ b/README.md
@@ -1,162 +1,163 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
### Generating sources from a DATABASE_URI
We also have an executable to generate sources for you.
```
Usage: dm-is-reflective DATABASE_URI
-s, --scope SCOPE SCOPE where the models should go (default: Object)
-o, --output DIRECTORY DIRECTORY where the output goes (default: dm-is-reflective)
-h, --help Print this message
-v, --version Print the version
```
### API
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# show all indices
dm.indices('users') # [:id, {:unique_index => :users_pkey}]
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
# you can also generate the source from models:
puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
* Mischa Molhoek (@mmolhoek)
* @philfine
+* Sebastian Marr (@sebastianmarr)
## LICENSE:
Apache License 2.0
Copyright (c) 2008-2017, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
godfat/dm-is-reflective
|
644d17a95620c24e14b5cdc12d0f56739bb790ee
|
Add simplecov for CI
|
diff --git a/Gemfile b/Gemfile
index a8d7039..bc942c6 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,7 +1,14 @@
source 'https://rubygems.org'
gemspec
gem 'rake'
gem 'bacon'
+
+gem 'simplecov', :require => false if ENV['COV']
+gem 'coveralls', :require => false if ENV['CI']
+
+platforms :rbx do
+ gem 'rubysl-singleton' # used in rake
+end
|
godfat/dm-is-reflective
|
6a6774252a7bbd451a88e4484ff5a5b62b4b2abe
|
bump year, tasks, gemspec, version, etc
|
diff --git a/.travis.yml b/.travis.yml
index 4ac0c89..636a147 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,25 +1,28 @@
+sudo: false
language: ruby
-rvm:
- - 2.0
- - 2.1
- - 2.2
- - rbx-2
- - jruby
- - jruby-head
-install: 'bundle install --retry=3'
-script: 'ruby -r bundler/setup -S rake test'
+install: 'gem install bundler; bundle install --retry=3'
+script: 'ruby -vr bundler/setup -S rake test'
+
+matrix:
+ include:
+ - rvm: 2.2
+ - rvm: 2.3
+ - rvm: 2.4
+ - rvm: ruby-head
+ - rvm: jruby
+ env: JRUBY_OPTS=--debug
+ - rvm: rbx
+
+ allow_failures:
+ - rvm: rbx
+ - rvm: jruby
addons:
postgresql: 9.3
before_install:
- sudo apt-get update
- sudo apt-get install postgresql-server-dev-9.3 libpq-dev
- mysql -e 'create database myapp_test;'
- psql -c 'create database myapp_test;' -U postgres
-
-matrix:
- allow_failures:
- - rvm: jruby
- - rvm: jruby-head
diff --git a/README.md b/README.md
index c5bbe2e..474ec35 100644
--- a/README.md
+++ b/README.md
@@ -1,161 +1,162 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
### Generating sources from a DATABASE_URI
We also have an executable to generate sources for you.
```
Usage: dm-is-reflective DATABASE_URI
-s, --scope SCOPE SCOPE where the models should go (default: Object)
-o, --output DIRECTORY DIRECTORY where the output goes (default: dm-is-reflective)
-h, --help Print this message
-v, --version Print the version
```
### API
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# show all indices
dm.indices('users') # [:id, {:unique_index => :users_pkey}]
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
# you can also generate the source from models:
puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
* Mischa Molhoek (@mmolhoek)
+* @philfine
## LICENSE:
Apache License 2.0
-Copyright (c) 2008-2013, Lin Jen-Shin (godfat)
+Copyright (c) 2008-2017, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index 4f71775..8e5db4d 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,79 +1,79 @@
# -*- encoding: utf-8 -*-
# stub: dm-is-reflective 1.3.1 ruby lib
Gem::Specification.new do |s|
- s.name = "dm-is-reflective"
+ s.name = "dm-is-reflective".freeze
s.version = "1.3.1"
- s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
- s.authors = ["Lin Jen-Shin (godfat)"]
- s.date = "2013-09-29"
- s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
- s.email = ["godfat (XD) godfat.org"]
- s.executables = ["dm-is-reflective"]
+ s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
+ s.require_paths = ["lib".freeze]
+ s.authors = ["Lin Jen-Shin (godfat)".freeze]
+ s.date = "2017-12-29"
+ s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties.".freeze
+ s.email = ["godfat (XD) godfat.org".freeze]
+ s.executables = ["dm-is-reflective".freeze]
s.files = [
- ".gitignore",
- ".gitmodules",
- ".travis.yml",
- "CHANGES.md",
- "Gemfile",
- "LICENSE",
- "README.md",
- "Rakefile",
- "TODO.md",
- "bin/dm-is-reflective",
- "dm-is-reflective.gemspec",
- "lib/dm-is-reflective.rb",
- "lib/dm-is-reflective/adapters/data_objects_adapter.rb",
- "lib/dm-is-reflective/adapters/mysql_adapter.rb",
- "lib/dm-is-reflective/adapters/postgres_adapter.rb",
- "lib/dm-is-reflective/adapters/sqlite_adapter.rb",
- "lib/dm-is-reflective/reflective.rb",
- "lib/dm-is-reflective/runner.rb",
- "lib/dm-is-reflective/test.rb",
- "lib/dm-is-reflective/version.rb",
- "task/.gitignore",
- "task/gemgem.rb",
- "test/setup_db.sh",
- "test/test_mysql.rb",
- "test/test_postgres.rb",
- "test/test_sqlite.rb",
- "test/test_to_source.rb"]
- s.homepage = "https://github.com/godfat/dm-is-reflective"
- s.licenses = ["Apache License 2.0"]
- s.require_paths = ["lib"]
- s.rubygems_version = "2.1.5"
- s.summary = "DataMapper plugin that helps you manipulate an existing database."
+ ".gitignore".freeze,
+ ".gitmodules".freeze,
+ ".travis.yml".freeze,
+ "CHANGES.md".freeze,
+ "Gemfile".freeze,
+ "LICENSE".freeze,
+ "README.md".freeze,
+ "Rakefile".freeze,
+ "TODO.md".freeze,
+ "bin/dm-is-reflective".freeze,
+ "dm-is-reflective.gemspec".freeze,
+ "lib/dm-is-reflective.rb".freeze,
+ "lib/dm-is-reflective/adapters/data_objects_adapter.rb".freeze,
+ "lib/dm-is-reflective/adapters/mysql_adapter.rb".freeze,
+ "lib/dm-is-reflective/adapters/postgres_adapter.rb".freeze,
+ "lib/dm-is-reflective/adapters/sqlite_adapter.rb".freeze,
+ "lib/dm-is-reflective/reflective.rb".freeze,
+ "lib/dm-is-reflective/runner.rb".freeze,
+ "lib/dm-is-reflective/test.rb".freeze,
+ "lib/dm-is-reflective/version.rb".freeze,
+ "task/README.md".freeze,
+ "task/gemgem.rb".freeze,
+ "test/setup_db.sh".freeze,
+ "test/test_mysql.rb".freeze,
+ "test/test_postgres.rb".freeze,
+ "test/test_sqlite.rb".freeze,
+ "test/test_to_source.rb".freeze]
+ s.homepage = "https://github.com/godfat/dm-is-reflective".freeze
+ s.licenses = ["Apache License 2.0".freeze]
+ s.rubygems_version = "2.7.3".freeze
+ s.summary = "DataMapper plugin that helps you manipulate an existing database.".freeze
s.test_files = [
- "test/test_mysql.rb",
- "test/test_postgres.rb",
- "test/test_sqlite.rb",
- "test/test_to_source.rb"]
+ "test/test_mysql.rb".freeze,
+ "test/test_postgres.rb".freeze,
+ "test/test_sqlite.rb".freeze,
+ "test/test_to_source.rb".freeze]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
- s.add_runtime_dependency(%q<dm-core>, [">= 0"])
- s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
- s.add_development_dependency(%q<dm-migrations>, [">= 0"])
- s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
- s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
- s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-core>.freeze, [">= 0"])
+ s.add_runtime_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
+ s.add_development_dependency(%q<dm-migrations>.freeze, [">= 0"])
+ s.add_development_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
+ s.add_development_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
+ s.add_development_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
else
- s.add_dependency(%q<dm-core>, [">= 0"])
- s.add_dependency(%q<dm-do-adapter>, [">= 0"])
- s.add_dependency(%q<dm-migrations>, [">= 0"])
- s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
- s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
- s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-core>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-migrations>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
end
else
- s.add_dependency(%q<dm-core>, [">= 0"])
- s.add_dependency(%q<dm-do-adapter>, [">= 0"])
- s.add_dependency(%q<dm-migrations>, [">= 0"])
- s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
- s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
- s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-core>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-do-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-migrations>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-mysql-adapter>.freeze, [">= 0"])
+ s.add_dependency(%q<dm-postgres-adapter>.freeze, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
index b703330..d93a043 100644
--- a/lib/dm-is-reflective/version.rb
+++ b/lib/dm-is-reflective/version.rb
@@ -1,4 +1,4 @@
module DmIsReflective
- VERSION = '1.3.1'
+ VERSION = '1.3.2'
end
diff --git a/task b/task
index 1532667..e05f9df 160000
--- a/task
+++ b/task
@@ -1 +1 @@
-Subproject commit 15326673330ee3d2b6aa87593336852e39d0b696
+Subproject commit e05f9df7c2c94ee0efdd078d7267c16c8fd9c000
|
godfat/dm-is-reflective
|
631970721b9ccf2e5415ab63e38d8d2d9c4bf50d
|
Allowing the existence of views in the Postgres database (#10)
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 92fd9e8..63def83 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,103 +1,103 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
- WHERE table_schema = current_schema()
+ WHERE table_schema = current_schema() AND table_type = 'BASE TABLE'
SQL
select(Ext::String.compress_lines(sql))
end
def indices storage
sql = <<-SQL
SELECT a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relname = ?
SQL
select(Ext::String.compress_lines(sql), storage).group_by(&:attname).
inject({}) do |r, (column, idxs)|
key = !!idxs.find(&:indisprimary)
idx_uni, idx_com = idxs.partition(&:indisunique).map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
r[column.to_sym] = reflective_indices_hash(key, idx_uni, idx_com)
r
end
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
idxs = indices(storage)
select(Ext::String.compress_lines(sql), storage).map do |f|
f.define_singleton_method(:indices){ idxs[f.column_name.to_sym] }
f
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs={}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
attrs.merge!(field.indices) if field.indices
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
9c74b3c512e0c4aff3ddd6b5ebf5038726c98ef8
|
add contributor [ci skip]
|
diff --git a/README.md b/README.md
index 59a8790..c5bbe2e 100644
--- a/README.md
+++ b/README.md
@@ -1,160 +1,161 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
### Generating sources from a DATABASE_URI
We also have an executable to generate sources for you.
```
Usage: dm-is-reflective DATABASE_URI
-s, --scope SCOPE SCOPE where the models should go (default: Object)
-o, --output DIRECTORY DIRECTORY where the output goes (default: dm-is-reflective)
-h, --help Print this message
-v, --version Print the version
```
### API
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# show all indices
dm.indices('users') # [:id, {:unique_index => :users_pkey}]
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
# you can also generate the source from models:
puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
+* Mischa Molhoek (@mmolhoek)
## LICENSE:
Apache License 2.0
Copyright (c) 2008-2013, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
godfat/dm-is-reflective
|
534b5d60a8da30224a9a2bcaeb9ccc90004bff68
|
comment about missing test [ci skip]
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index d24b730..58fcb57 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,112 +1,112 @@
module DmIsReflective
autoload :VERSION, 'dm-is-reflective/version'
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
when Regexp
if name.to_s =~ target
if name.to_s =~ /^\d/
- break :"_#{name}"
+ break :"_#{name}" # TODO: missing test for this
else
break name
end
end
when Symbol, String
break name if name == target.to_sym
when Class
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflect_property(reflected, type, attrs) if
reflected.kind_of?(Symbol)
}.compact
if key.empty? && k = properties.find{ |p| p.unique_index }
property k.name, k.primitive, :key => true
end
finalize if respond_to?(:finalize)
result
end
def reflect_property reflected, type, attrs
property reflected, type, attrs
rescue ArgumentError => e
if e.message =~ /cannot be used as a property name/
reflect_property "#{reflected}_", type,
{:field => reflected.to_s}.merge(attrs)
end
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
ba183e570527976565ac44a84646215824c67111
|
apply underscore prefix as suggested in #9, thanks @mmolhoek
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index 6972f29..d24b730 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,106 +1,112 @@
module DmIsReflective
autoload :VERSION, 'dm-is-reflective/version'
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
- when Regexp;
- break name if name.to_s =~ target
+ when Regexp
+ if name.to_s =~ target
+ if name.to_s =~ /^\d/
+ break :"_#{name}"
+ else
+ break name
+ end
+ end
- when Symbol, String;
+ when Symbol, String
break name if name == target.to_sym
- when Class;
+ when Class
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflect_property(reflected, type, attrs) if
reflected.kind_of?(Symbol)
}.compact
if key.empty? && k = properties.find{ |p| p.unique_index }
property k.name, k.primitive, :key => true
end
finalize if respond_to?(:finalize)
result
end
def reflect_property reflected, type, attrs
property reflected, type, attrs
rescue ArgumentError => e
if e.message =~ /cannot be used as a property name/
reflect_property "#{reflected}_", type,
{:field => reflected.to_s}.merge(attrs)
end
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
f6639266745cb2e79fd97e3b2afd02a8b3738bb2
|
Revert "skip properties that start with digit, cannot be mapped to function"
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index b39d1d6..d176c25 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,150 +1,150 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all indices in the storage.
def indices storage
reflective_auto_load_adapter_extension
indices(storage) # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
- model.__send__(:reflect, /^[^\d].*/)
+ model.__send__(:reflect, /.*/)
model
end
def reflective_lookup_primitive primitive
warn "#{primitive} not found for #{self.class}: #{caller.inspect}"
String # falling back to the universal interface
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
def reflective_indices_hash key, idx_uni, idx_com
h = {}
h[:key] = key if key
h[:unique_index] = idx_uni if idx_uni
h[ :index] = idx_com if idx_com
h
end
end
|
godfat/dm-is-reflective
|
87d668bb085529b4c5f3f80eeaa7596c6f096306
|
skip jruby for now
|
diff --git a/.travis.yml b/.travis.yml
index b24ae96..4ac0c89 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,20 +1,25 @@
language: ruby
rvm:
- 2.0
- 2.1
- 2.2
- rbx-2
- jruby
- jruby-head
install: 'bundle install --retry=3'
script: 'ruby -r bundler/setup -S rake test'
addons:
postgresql: 9.3
before_install:
- sudo apt-get update
- sudo apt-get install postgresql-server-dev-9.3 libpq-dev
- mysql -e 'create database myapp_test;'
- psql -c 'create database myapp_test;' -U postgres
+
+matrix:
+ allow_failures:
+ - rvm: jruby
+ - rvm: jruby-head
|
godfat/dm-is-reflective
|
3bbc54bab58b915c2dd86a99cf9466d5f1c204f7
|
i hate ubuntu
|
diff --git a/.travis.yml b/.travis.yml
index f309d0b..b24ae96 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,18 +1,20 @@
language: ruby
rvm:
- 2.0
- 2.1
- 2.2
- rbx-2
- jruby
- jruby-head
install: 'bundle install --retry=3'
script: 'ruby -r bundler/setup -S rake test'
addons:
postgresql: 9.3
before_install:
+ - sudo apt-get update
+ - sudo apt-get install postgresql-server-dev-9.3 libpq-dev
- mysql -e 'create database myapp_test;'
- psql -c 'create database myapp_test;' -U postgres
|
godfat/dm-is-reflective
|
fb41ab076570b84ce7ac488eb07138630f2233b0
|
skip properties that start with digit, cannot be mapped to function
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index d176c25..b39d1d6 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,150 +1,150 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all indices in the storage.
def indices storage
reflective_auto_load_adapter_extension
indices(storage) # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
- model.__send__(:reflect, /.*/)
+ model.__send__(:reflect, /^[^\d].*/)
model
end
def reflective_lookup_primitive primitive
warn "#{primitive} not found for #{self.class}: #{caller.inspect}"
String # falling back to the universal interface
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
def reflective_indices_hash key, idx_uni, idx_com
h = {}
h[:key] = key if key
h[:unique_index] = idx_uni if idx_uni
h[ :index] = idx_com if idx_com
h
end
end
|
godfat/dm-is-reflective
|
61163d5de023fbd84d2c35ff5f99431e7724a629
|
.travis.yml: maybe the only way to get rbx running
|
diff --git a/.travis.yml b/.travis.yml
index 502a832..b6d0cd5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,15 +1,12 @@
before_install:
- 'git submodule update --init'
- mysql -e 'create database myapp_test;'
- psql -c 'create database myapp_test;' -U postgres
script: 'ruby -r bundler/setup -S rake test'
-env:
- - 'RBXOPT=-X19'
-
rvm:
- 1.9.3
- 2.0.0
- - rbx-head
+ - rbx-2.1.1
- jruby-head
|
godfat/dm-is-reflective
|
eb5d6ef1f8603b766ba4b09ba1b6500b0b8d2d3f
|
runner.rb: should be slightly faster
|
diff --git a/lib/dm-is-reflective/runner.rb b/lib/dm-is-reflective/runner.rb
index ca00a07..a491ab4 100644
--- a/lib/dm-is-reflective/runner.rb
+++ b/lib/dm-is-reflective/runner.rb
@@ -1,87 +1,88 @@
require 'dm-is-reflective'
module DmIsReflective::Runner
module_function
def options
@options ||=
[['-s, --scope SCOPE' ,
'SCOPE where the models should go (default: Object)' ],
['-o, --output DIRECTORY' ,
'DIRECTORY where the output goes (default: dm-is-reflective)'],
['-h, --help' , 'Print this message' ],
['-v, --version', 'Print the version' ]]
end
def run argv=ARGV
puts(help) and exit if argv.empty?
generate(*parse(argv))
end
def generate uri, scope, output
require 'fileutils'
FileUtils.mkdir_p(output)
DataMapper.setup(:default, uri).auto_genclass!(:scope => scope).
each do |model|
path = "#{output}/#{model.name.gsub(/::/, '').
gsub(/([A-Z])/, '_\1').
downcase[1..-1]}.rb"
File.open(path, 'w') do |file|
file.puts model.to_source
end
end
end
def parse argv
uri, scope, output = ['sqlite::memory:', Object, 'dm-is-reflective']
until argv.empty?
case arg = argv.shift
when /^-s=?(.+)?/, /^--scope=?(.+)?/
name = $1 || argv.shift
scope = if Object.const_defined?(name)
Object.const_get(name)
else
mkconst_p(name)
end
when /^-o=?(.+)?/, /^--output=?(.+)?/
output = $1 || argv.shift
when /^-h/, '--help'
puts(help)
exit
when /^-v/, '--version'
puts(DmIsReflective::VERSION)
exit
else
uri = arg
end
end
[uri, scope, output]
end
def mkconst_p name
name.split('::').inject(Object) do |ret, mod|
if Object.const_defined?(mod)
ret.const_get(mod)
else
ret.const_set(mod, Module.new)
end
end
end
def help
- maxn = options.transpose.first.map(&:size).max
- maxd = options.transpose.last .map(&:size).max
+ optt = options.transpose
+ maxn = optt.first.map(&:size).max
+ maxd = optt.last .map(&:size).max
"Usage: dm-is-reflective DATABASE_URI\n" +
options.map{ |(name, desc)|
if desc.empty?
name
else
sprintf(" %-*s %-*s", maxn, name, maxd, desc)
end
}.join("\n")
end
end
|
godfat/dm-is-reflective
|
a97e5429715f3468626a29b9de0aa3091fba4085
|
prepare release 1.3.1
|
diff --git a/CHANGES.md b/CHANGES.md
index 71422f3..a40cbc5 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,249 +1,253 @@
# CHANGES
+## dm-is-reflective 1.3.1, 2013-05-22
+
+* Introduce `indices` method which would return all indices in the storage.
+
## dm-is-reflective 1.3.0, 2013-05-20
* Warn instead of raising a TypeError if a datatype cannot be found.
We fallback to use String.
* Now it works for multiple composite keys.
* If there's no key defined, it would pick the first unique index as the key.
* If a field name is conflicted, it would try to resolve it by appending a
underscore to the field name.
## dm-is-reflective 1.2.0, 2013-05-14
* We got a bunch of internal renaming.
* Added DataMapper::Resource#to_source.
* Added an executable which generates sources for you.
* Fixed MySQL issues with setting up with a hash rather than URI.
* Fixed SQLite issues without loading dm-migrations.
## dm-is-reflective 1.1.0, 2013-01-11
* The need for dm-migrations is now removed.
* Added a few more datatypes. Thanks @onewheelskyward
* Tested against dm-core 1.2.0.
## dm-is-reflective 1.0.1, 2012-05-16
* allow_nil is more close to db's semantics, not required. Thanks miaout17.
`:allow_nil` allows empty value, but `:required` does not. So here we
always use `:allow_nil` to match db's semantics.
## dm-is-reflective 1.0.0, 2011-06-16
* updated against dm-core 1.1.0
## dm-is-reflective 0.9.0, 2010-07-05
* adapted to dm-core 1.0.0
* reanmed AbstractAdapter to DataObjectsAdapter
## dm-is-reflective 0.8.0, 2009-09-16
* require dm-core 0.10.0 and above now
* Serial would map to Serial not Integer now
* no more type_map now
* no more Extlib::Hook to load adapter
## dm-mapping 0.7.1, never released as a gem
don't open module Migration and edit it, instead, use include, more see:
* added DataMapper::Mapping::AbstractAdapter
* added DataMapper::Mapping::Sqlite3Adapter
* added DataMapper::Mapping::MysqlAdapter
* added DataMapper::Mapping::PostgresAdapter
* each adapter was included in related adapter in DataMapper.
* Model#fields now accept repository name as argument
there's differences between adapters,
Sqlite3 added default => 'UL' in Boolean type,
Mysql can't tell whether it's a Boolean or Tinyint,
and Postgres is fine. see test/abstract.rb: super_user_fields for detail.
## dm-mapping 0.7.0, 2008-09-01
* feature added
- added postgres support.
* bug fixed
- fixed key mapping in mysql adapter. PRI and MUL are all keys.
- use DM::Text.size as default text size in sqlite3.
## dm-mapping 0.6.2, 2008-08-30
* mapping more data types for mysql.
* don't map TINYINT to TrueClass with mysql, skip it in type_map.
## dm-mapping 0.6.1, 2008-08-22
* gem 'dm-core', '>=0.9.3' instead of '=0.9.3'
## dm-mapping 0.6.0, 2008-08-16
* mapping returns an array of properties indicating fields it mapped.
* performance boosted by refactored mapping implementation.
* changed the way using auto_genclass!, now accepts args like mapping!
* changed fields to return field name with Symbol instead of String.
this would make it be more consistent with DataMapper.
* storage names remain String.
* added more mysql data type to map
* use Extlib::Hook to setup dm-mapping instead of stupid alias_method.
* removed ensure_require in model. always setup DataMapper before define model.
## dm-mapping 0.5.0, 2008-08-14
* feature added
- added mysql support.
- reflect size 65535 in TEXT for sqlite3.
* bug fixed
- reflect VARCHAR(size) instead of default size in sqlite3.
* misc
- renamed sqlite3adapter to sqlite3_adapter.
## dm-mapping 0.4.1, 2008-08-14
* removed type hack, replaced with rejecting special type to lookup.
## dm-mapping 0.4.0, 2008-08-04
* added Migration#auto_genclass!.
* updated README.
* added more rdoc.
## dm-mapping 0.3.0, 2008-08-04
* added support of mapping Integer, DateTime, etc.
* renamed some internals.
* changed the way requiring adapter. no more setup first.
* added Migration#storages_and_fields
* added mapping :serial => true for primary key.
* added mapping :default, and :nullable.
* added support of mapping name. (through passing symbol or string)
* added support of multiple arguments.
* removed Mapping::All, use /.*/ instead.
## dm-mapping 0.2.1, 2008-08-03
* fixed a bug that type map should lookup for parent.
* fixed a bug that sql type could be lower case.
fixed by calling upcase.
## dm-mapping 0.2.0, 2008-08-02
* added Sqlite3Adapter::Migration#fields
* added DataMapper::Model#mapping
* added DataMapper::Model#fields
* added DataMapper::TypeMap#find_primitive for reversed lookup.
mapping SQL type back to Ruby type.
* added corresponded test.
## dm-mapping 0.1.0, 2008-07-27
* birthday!
* added DataMapper.repository.storages for sqlite3.
* please refer:
<http://groups.google.com/group/datamapper/browse_thread/thread/b9ca41120c5c9389>
original message:
from Lin Jen-Shin
to DataMapper
cc godfat
date Sun, Jul 27, 2008 at 5:40 PM
subject Manipulate an existing database.
mailed-by gmail.com
Greetings,
DataMapper looks very promising for me, so I am thinking of
using it in the near future. I hate separate my domain objects into
two parts in Rails, writing migration and switching to ActiveRecord,
vice versa, is very annoying to me.
But there's a very convenient feature to me in ActiveRecord,
that is ActiveRecord automatically mapping all fields in a table.
It makes me easily control an existing database without any domain object.
For example,
require 'active_record'
ActiveRecord::Base.establish_connection(
:adapter => 'sqlite3',
:database => 'db/development.sqlite3'
)
clsas User < ActiveRecord::Base
end
User.find 1
=> #<User id: 1, account: "admin", created_at: "2008-05-18 20:08:37", etc.>
Some people would use database admin such as phpMyAdmin to
accomplish this kind of task, but I prefer anything in Ruby,
calling Ruby function, manipulating data without SQL and
any domain object. (i.e. I didn't have to load up entire environment.)
In DataMapper, I didn't find an easy way to accomplish this.
I am sorry if there's one but I didn't find it, please point out,
many thanks. In short, I would like to do this in DataMapper:
class User
include DataMapper::Resource
mapping :account, :created_at
end
or
class User
include DataMapper::Resource
mapping All
end
class User
include DataMapper::ResourceAll
end
or
class User
include DataMapper::Resource
mapping *storage_fields
end
The above User.storage_fields should return an Array,
telling all the fields in the table, e.g. [:account, :created_at, :etc]
or a Hash includes data type, e.g. {:account => String,
:created_at => DateTime}
then mapping *storage_fields should change to:
mapping *storage_fields.each_key.to_a
If it's possible, a feature returning the database schema as well:
DataMapper.repository.storages
# => [:users, :posts, :etc]
DataMapper.repository.storages_and_fields
# => {:users => {:account => String},
:posts => {:title => String, :content => Text}}
or returning DataObject::Field, DataObject::Storage, etc.
DataMapper.repository.storage
# => [#<DataObject::Storage @name='users' @fields=
[#<DataObject::Field @name='account' @type=String>]>]
If you feel this kind of feature is indeed needed or not bad for
adding it, I could try to provide a patch for it. Though I didn't
read the source code deeply, not knowning it's easy or not.
sincerely,
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index 06d62e4..adb130d 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,78 +1,78 @@
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-is-reflective"
- s.version = "1.3.0"
+ s.version = "1.3.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Lin Jen-Shin (godfat)"]
- s.date = "2013-05-20"
+ s.date = "2013-05-22"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
s.email = ["godfat (XD) godfat.org"]
s.executables = ["dm-is-reflective"]
s.files = [
".gitignore",
".gitmodules",
".travis.yml",
"CHANGES.md",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"TODO.md",
"bin/dm-is-reflective",
"dm-is-reflective.gemspec",
"lib/dm-is-reflective.rb",
"lib/dm-is-reflective/adapters/data_objects_adapter.rb",
"lib/dm-is-reflective/adapters/mysql_adapter.rb",
"lib/dm-is-reflective/adapters/postgres_adapter.rb",
"lib/dm-is-reflective/adapters/sqlite_adapter.rb",
"lib/dm-is-reflective/reflective.rb",
"lib/dm-is-reflective/runner.rb",
"lib/dm-is-reflective/test.rb",
"lib/dm-is-reflective/version.rb",
"task/.gitignore",
"task/gemgem.rb",
"test/setup_db.sh",
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
s.homepage = "https://github.com/godfat/dm-is-reflective"
s.licenses = ["Apache License 2.0"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "DataMapper plugin that helps you manipulate an existing database."
s.test_files = [
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-migrations>, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
index 9824f75..b703330 100644
--- a/lib/dm-is-reflective/version.rb
+++ b/lib/dm-is-reflective/version.rb
@@ -1,4 +1,4 @@
module DmIsReflective
- VERSION = '1.3.0'
+ VERSION = '1.3.1'
end
|
godfat/dm-is-reflective
|
1fb8192433e62feace743f832d7ecf66785b7551
|
fix test for sqlite, i am too tired to make sqlite report primary key
|
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index e114a77..48b4496 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,327 +1,335 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class Cat
include DataMapper::Resource
property :id, Serial
belongs_to :user
belongs_to :super_user
property :user_id , Integer,
:unique_index => [:usu, :u]
property :super_user_id, Integer,
:unique_index => [:usu],
:index => [:su]
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
Tables = %w[abstract_cats abstract_comments
abstract_super_users abstract_users]
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
def cat_indices
- @cat_indices ||=
- [[ :id, {:unique_index => :abstract_cats_pkey, :key => true}],
+ @cat_indices ||= begin
+ id = case DataMapper.repository.adapter.class.name
+ when 'DataMapper::Adapters::SqliteAdapter'
+ nil
+ else
+ [:id, {:unique_index => :abstract_cats_pkey, :key => true}]
+ end
+ [id ,
[:super_user_id, {:unique_index => :unique_abstract_cats_usu ,
:index => :index_abstract_cats_su }] ,
[ :user_id, {:unique_index => [:unique_abstract_cats_usu ,
- :unique_abstract_cats_u]}] ]
+ :unique_abstract_cats_u]}] ].
+ compact
+ end
end
def cat_fields
@cat_fields ||=
[[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_cats_pkey}.merge(AttrCommonPK)],
[:super_user_id, Integer,
{:unique_index => :unique_abstract_cats_usu,
:index => :index_abstract_cats_su }.merge(AttrCommon)],
[:user_id , Integer,
{:unique_index => [:unique_abstract_cats_usu,
:unique_abstract_cats_u]}.merge(AttrCommon)]]
end
def comment_fields
@comment_fields ||= begin
[[:body , DataMapper::Property::Text , AttrText],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
[:title , String ,
{:length => 50, :default => 'default title', :allow_nil => false}],
[:user_id, Integer ,
{:index => :index_abstract_comments_user}.merge(AttrCommon)]]
end
end
def user_fields
@user_fields ||=
[[:created_at, DateTime, AttrCommon],
[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
[:login, String, {:length => 70}.merge(AttrCommon)],
[:sig, DataMapper::Property::Text, AttrText]]
end
def super_user_fields
@super_user_fields ||= begin
type = case DataMapper.repository.adapter.class.name
when 'DataMapper::Adapters::MysqlAdapter'
Integer
else
DataMapper::Property::Boolean
end
[[:bool, type, AttrCommon],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
end
end
before do
@dm = setup_data_mapper
[Cat, Comment, User, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_cats' => cat_fields,
'abstract_comments' => comment_fields,
'abstract_users' => user_fields,
'abstract_super_users' => super_user_fields)
end
should 'indices' do
sort_fields(@dm.indices('abstract_cats')).should.eq cat_indices
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractCat" ,
"#{scope}::AbstractComment" ,
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
|
godfat/dm-is-reflective
|
6b72c63ee4268fed9aad72cc942d25a86ea01994
|
mysql works now
|
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
index 11fec07..661e621 100644
--- a/lib/dm-is-reflective/adapters/mysql_adapter.rb
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -1,102 +1,105 @@
module DmIsReflective::MysqlAdapter
include DataMapper
def storages
select('SHOW TABLES')
end
- private
- # construct needed table metadata
- def reflective_query_storage storage
- sql_indices = <<-SQL
+ def indices storage
+ sql = <<-SQL
SELECT column_name, index_name, non_unique
FROM `information_schema`.`statistics`
WHERE table_schema = ? AND table_name = ?
SQL
- sql_columns = <<-SQL
+ select(Ext::String.compress_lines(sql),
+ reflective_table_schema, storage).group_by(&:column_name).
+ inject({}) do |r, (column, idxs)|
+ primary = idxs.find{ |i| i.index_name == 'PRIMARY' }
+ primary.index_name = :"#{storage}_pkey" if primary
+ key = !!primary
+ idx_uni, idx_com = idxs.partition{ |i| i.non_unique == 0 }.map{ |i|
+ if i.empty?
+ nil
+ elsif i.size == 1
+ i.first.index_name.to_sym
+ else
+ i.map{ |ii| ii.index_name.to_sym }
+ end
+ }
+
+ r[column.to_sym] = reflective_indices_hash(key, idx_uni, idx_com)
+ r
+ end
+ end
+
+ private
+ # construct needed table metadata
+ def reflective_query_storage storage
+ sql = <<-SQL
SELECT column_name, column_key, column_default, is_nullable,
data_type, character_maximum_length, extra, table_name
FROM `information_schema`.`columns`
WHERE table_schema = ? AND table_name = ?
SQL
- # TODO: can we fix this shit in dm-mysql-adapter?
- path = (options[:path] || options['path'] ||
- options[:database] || options['database']).sub('/', '')
-
- indices =
- select(Ext::String.compress_lines(sql_indices), path, storage).
- group_by(&:column_name)
-
- select(Ext::String.compress_lines(sql_columns), path, storage).
- map do |column|
- if idx = indices[column.column_name]
- idx_uni, idx_com = idx.partition{ |i| i.non_unique == 0 }.map{ |i|
- if i.empty?
- nil
- elsif i.size == 1
- i.first.index_name.to_sym
- else
- i.map{ |ii| ii.index_name.to_sym }
- end
- }
- else
- idx_uni, idx_com = nil
- end
-
- column.instance_eval <<-RUBY
- def unique_index; #{idx_uni.inspect}; end
- def index ; #{idx_com.inspect}; end
- RUBY
+ idxs = indices(storage)
- column
- end
+ select(Ext::String.compress_lines(sql),
+ reflective_table_schema, storage).map do |f|
+ f.define_singleton_method(:indices){ idxs[f.column_name.to_sym] }
+ f
+ end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.data_type
end
- def reflective_attributes field, attrs = {}
+ def reflective_attributes field, attrs={}
+ attrs.merge!(field.indices) if field.indices
+
attrs[:serial] = true if field.extra == 'auto_increment'
if field.column_key == 'PRI'
attrs[:key] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
- else
- attrs[:unique_index] = field.unique_index if field.unique_index
- attrs[ :index] = field. index if field. index
end
attrs[:allow_nil] = field.is_nullable == 'YES'
attrs[:default] = field.column_default if
field.column_default
attrs[:length] = field.character_maximum_length if
field.character_maximum_length
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'YEAR' ; Integer
when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Integer
when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Property::Decimal
when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
when 'TIME' ; Time
when 'DATE' ; Date
when 'DATETIME', 'TIMESTAMP' ; DateTime
when 'BOOL', 'BOOLEAN' ; Property::Boolean
when /\w*TEXT/ ; Property::Text
end || super(primitive)
end
+
+ def reflective_table_schema
+ # TODO: can we fix this shit in dm-mysql-adapter?
+ (options[:path] || options['path'] ||
+ options[:database] || options['database']).sub('/', '')
+ end
end
|
godfat/dm-is-reflective
|
f6a408e4c8ba6506c3f19dd495b1e5f6cdc1c639
|
sqlite doesn't treat primary key an unique index :(
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index cddfc88..9ace5d7 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,102 +1,98 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
- private
- def reflective_query_storage storage
- sql_indices = <<-SQL
+ def indices storage
+ sql = <<-SQL
SELECT name, sql FROM sqlite_master
WHERE type = 'index' AND tbl_name = ?
SQL
- indices = select(sql_indices, storage).inject({}){ |r, field|
- columns = field.sql[/\(.+\)/].scan(/\w+/)
- uniqueness = !!field.sql[/CREATE UNIQUE INDEX/]
+ select(Ext::String.compress_lines(sql), storage).inject({}){ |r, idx|
+ columns = idx.sql[/\(.+\)/].scan(/\w+/)
+ uniqueness = !!idx.sql[/CREATE UNIQUE INDEX/]
columns.each{ |c|
type = if uniqueness then :unique_index else :index end
r[c] ||= {:unique_index => [], :index => []}
- r[c][type] << field.name
+ r[c][type] << idx.name.to_sym
}
+ r
+ }.inject({}){ |r, (column, idxs)|
+ idx_uni, idx_com = [:unique_index, :index].map{ |type|
+ i = idxs[type]
+ if i.empty?
+ nil
+ elsif i.size == 1
+ i.first.to_sym
+ else
+ i.map(&:to_sym)
+ end
+ }
+ r[column.to_sym] = reflective_indices_hash(false, idx_uni, idx_com)
r
}
+ end
- select('PRAGMA table_info(?)', storage).map{ |field|
- if idx = indices[field.name]
- idx_uni, idx_com = [:unique_index, :index].map{ |type|
- i = idx[type]
- if i.empty?
- nil
- elsif i.size == 1
- i.first.to_sym
- else
- i.map(&:to_sym)
- end
- }
- else
- idx_uni, idx_com = nil
- end
-
- field.instance_eval <<-RUBY
- def table_name ; '#{storage}' ; end
- def index ; #{idx_com.inspect}; end
- def unique_index; #{idx_uni.inspect}; end
- RUBY
-
- field
+ private
+ def reflective_query_storage storage
+ idxs = indices(storage)
+ select('PRAGMA table_info(?)', storage).map{ |f|
+ f.define_singleton_method(:storage){ storage }
+ f.define_singleton_method(:indices){ idxs[f.name.to_sym] }
+ f
}
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
- def reflective_attributes field, attrs = {}
+ def reflective_attributes field, attrs={}
+ attrs.merge!(field.indices) if field.indices
+
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
- attrs[:unique_index] = :"#{field.table_name}_pkey"
+ attrs[:unique_index] = :"#{field.storage}_pkey"
end
- attrs[:unique_index] = field.unique_index if field.unique_index
- attrs[ :index] = field. index if field. index
-
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
bca34b07b4d3b2036d8a6c506d0695b47c34e407
|
simpler code
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 4143445..92fd9e8 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,104 +1,103 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
def indices storage
sql = <<-SQL
SELECT a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relname = ?
SQL
select(Ext::String.compress_lines(sql), storage).group_by(&:attname).
inject({}) do |r, (column, idxs)|
key = !!idxs.find(&:indisprimary)
idx_uni, idx_com = idxs.partition(&:indisunique).map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
r[column.to_sym] = reflective_indices_hash(key, idx_uni, idx_com)
r
end
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
idxs = indices(storage)
- select(Ext::String.compress_lines(sql), storage).map do |field|
- field.define_singleton_method(:indices){ idxs }
- field
+ select(Ext::String.compress_lines(sql), storage).map do |f|
+ f.define_singleton_method(:indices){ idxs[f.column_name.to_sym] }
+ f
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs={}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
- idx = field.indices[field.column_name.to_sym]
- attrs.merge!(idx) if idx
+ attrs.merge!(field.indices) if field.indices
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
182982740eb16285d5d21c5c7b6e6f4604f3be1f
|
consistent name
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 7de8f5d..4143445 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,104 +1,104 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
def indices storage
sql = <<-SQL
SELECT a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relname = ?
SQL
select(Ext::String.compress_lines(sql), storage).group_by(&:attname).
inject({}) do |r, (column, idxs)|
key = !!idxs.find(&:indisprimary)
idx_uni, idx_com = idxs.partition(&:indisunique).map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
r[column.to_sym] = reflective_indices_hash(key, idx_uni, idx_com)
r
end
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
idxs = indices(storage)
- select(Ext::String.compress_lines(sql), storage).map do |column|
- column.define_singleton_method(:indices){ idxs }
- column
+ select(Ext::String.compress_lines(sql), storage).map do |field|
+ field.define_singleton_method(:indices){ idxs }
+ field
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs={}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
idx = field.indices[field.column_name.to_sym]
attrs.merge!(idx) if idx
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
2fbaa01cbb4ba068781ffbae47f2132e55a8088d
|
make sure indices method is available
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index 4c506e5..d176c25 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,144 +1,150 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
+ # returns all indices in the storage.
+ def indices storage
+ reflective_auto_load_adapter_extension
+ indices(storage) # call the overrided method
+ end
+
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
model
end
def reflective_lookup_primitive primitive
warn "#{primitive} not found for #{self.class}: #{caller.inspect}"
String # falling back to the universal interface
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
def reflective_indices_hash key, idx_uni, idx_com
h = {}
h[:key] = key if key
h[:unique_index] = idx_uni if idx_uni
h[ :index] = idx_com if idx_com
h
end
end
|
godfat/dm-is-reflective
|
c2b511eeb28f2652fe0f0bb3b55d197eb5528d0e
|
introduce indices for accessing indices
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index f77d215..4c506e5 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,136 +1,144 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
model
end
def reflective_lookup_primitive primitive
warn "#{primitive} not found for #{self.class}: #{caller.inspect}"
String # falling back to the universal interface
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
+
+ def reflective_indices_hash key, idx_uni, idx_com
+ h = {}
+ h[:key] = key if key
+ h[:unique_index] = idx_uni if idx_uni
+ h[ :index] = idx_com if idx_com
+ h
+ end
end
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index c96f952..7de8f5d 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,110 +1,104 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
- private
- def reflective_query_storage storage
- sql_indices = <<-SQL
+ def indices storage
+ sql = <<-SQL
SELECT a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relname = ?
SQL
- sql_columns = <<-SQL
- SELECT column_name, column_default, is_nullable,
- character_maximum_length, udt_name
- FROM "information_schema"."columns"
- WHERE table_schema = current_schema() AND table_name = ?
- SQL
-
- indices =
- select(Ext::String.compress_lines(sql_indices), storage).
- group_by(&:attname)
-
- select(Ext::String.compress_lines(sql_columns), storage).map do |column|
- if idx = indices[column.column_name]
- is_key = !!idx.find{ |i| i.indisprimary }
- idx_uni, idx_com = idx.partition{ |i| i.indisunique }.map{ |i|
+ select(Ext::String.compress_lines(sql), storage).group_by(&:attname).
+ inject({}) do |r, (column, idxs)|
+ key = !!idxs.find(&:indisprimary)
+ idx_uni, idx_com = idxs.partition(&:indisunique).map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
- else
- is_key = false
- idx_uni, idx_com = nil
+
+ r[column.to_sym] = reflective_indices_hash(key, idx_uni, idx_com)
+ r
end
+ end
- column.instance_eval <<-RUBY
- def key? ; #{is_key} ; end
- def unique_index; #{idx_uni.inspect}; end
- def index ; #{idx_com.inspect}; end
- RUBY
+ private
+ def reflective_query_storage storage
+ sql = <<-SQL
+ SELECT column_name, column_default, is_nullable,
+ character_maximum_length, udt_name
+ FROM "information_schema"."columns"
+ WHERE table_schema = current_schema() AND table_name = ?
+ SQL
+
+ idxs = indices(storage)
+ select(Ext::String.compress_lines(sql), storage).map do |column|
+ column.define_singleton_method(:indices){ idxs }
column
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
- def reflective_attributes field, attrs = {}
+ def reflective_attributes field, attrs={}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
- attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
- attrs[:key] = true if field.key?
-
- attrs[:unique_index] = field.unique_index if field.unique_index
- attrs[ :index] = field. index if field. index
+ idx = field.indices[field.column_name.to_sym]
+ attrs.merge!(idx) if idx
+ attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index 46e190e..e114a77 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,314 +1,327 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class Cat
include DataMapper::Resource
property :id, Serial
belongs_to :user
belongs_to :super_user
property :user_id , Integer,
:unique_index => [:usu, :u]
property :super_user_id, Integer,
:unique_index => [:usu],
:index => [:su]
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
Tables = %w[abstract_cats abstract_comments
abstract_super_users abstract_users]
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
+ def cat_indices
+ @cat_indices ||=
+ [[ :id, {:unique_index => :abstract_cats_pkey, :key => true}],
+ [:super_user_id, {:unique_index => :unique_abstract_cats_usu ,
+ :index => :index_abstract_cats_su }] ,
+ [ :user_id, {:unique_index => [:unique_abstract_cats_usu ,
+ :unique_abstract_cats_u]}] ]
+ end
+
def cat_fields
@cat_fields ||=
[[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_cats_pkey}.merge(AttrCommonPK)],
[:super_user_id, Integer,
{:unique_index => :unique_abstract_cats_usu,
:index => :index_abstract_cats_su }.merge(AttrCommon)],
[:user_id , Integer,
{:unique_index => [:unique_abstract_cats_usu,
:unique_abstract_cats_u]}.merge(AttrCommon)]]
end
def comment_fields
@comment_fields ||= begin
[[:body , DataMapper::Property::Text , AttrText],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
[:title , String ,
{:length => 50, :default => 'default title', :allow_nil => false}],
[:user_id, Integer ,
{:index => :index_abstract_comments_user}.merge(AttrCommon)]]
end
end
def user_fields
@user_fields ||=
[[:created_at, DateTime, AttrCommon],
[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
[:login, String, {:length => 70}.merge(AttrCommon)],
[:sig, DataMapper::Property::Text, AttrText]]
end
def super_user_fields
@super_user_fields ||= begin
type = case DataMapper.repository.adapter.class.name
when 'DataMapper::Adapters::MysqlAdapter'
Integer
else
DataMapper::Property::Boolean
end
[[:bool, type, AttrCommon],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
end
end
before do
@dm = setup_data_mapper
[Cat, Comment, User, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_cats' => cat_fields,
'abstract_comments' => comment_fields,
'abstract_users' => user_fields,
'abstract_super_users' => super_user_fields)
end
+ should 'indices' do
+ sort_fields(@dm.indices('abstract_cats')).should.eq cat_indices
+ end
+
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractCat" ,
"#{scope}::AbstractComment" ,
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
|
godfat/dm-is-reflective
|
e0f581d8463110c24bf314df95e14497dc6dfe54
|
version bump and prepare release
|
diff --git a/CHANGES.md b/CHANGES.md
index 481a11e..71422f3 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,240 +1,249 @@
# CHANGES
+## dm-is-reflective 1.3.0, 2013-05-20
+
+* Warn instead of raising a TypeError if a datatype cannot be found.
+ We fallback to use String.
+* Now it works for multiple composite keys.
+* If there's no key defined, it would pick the first unique index as the key.
+* If a field name is conflicted, it would try to resolve it by appending a
+ underscore to the field name.
+
## dm-is-reflective 1.2.0, 2013-05-14
* We got a bunch of internal renaming.
* Added DataMapper::Resource#to_source.
* Added an executable which generates sources for you.
* Fixed MySQL issues with setting up with a hash rather than URI.
* Fixed SQLite issues without loading dm-migrations.
## dm-is-reflective 1.1.0, 2013-01-11
* The need for dm-migrations is now removed.
* Added a few more datatypes. Thanks @onewheelskyward
* Tested against dm-core 1.2.0.
## dm-is-reflective 1.0.1, 2012-05-16
* allow_nil is more close to db's semantics, not required. Thanks miaout17.
`:allow_nil` allows empty value, but `:required` does not. So here we
always use `:allow_nil` to match db's semantics.
## dm-is-reflective 1.0.0, 2011-06-16
* updated against dm-core 1.1.0
## dm-is-reflective 0.9.0, 2010-07-05
* adapted to dm-core 1.0.0
* reanmed AbstractAdapter to DataObjectsAdapter
## dm-is-reflective 0.8.0, 2009-09-16
* require dm-core 0.10.0 and above now
* Serial would map to Serial not Integer now
* no more type_map now
* no more Extlib::Hook to load adapter
## dm-mapping 0.7.1, never released as a gem
don't open module Migration and edit it, instead, use include, more see:
* added DataMapper::Mapping::AbstractAdapter
* added DataMapper::Mapping::Sqlite3Adapter
* added DataMapper::Mapping::MysqlAdapter
* added DataMapper::Mapping::PostgresAdapter
* each adapter was included in related adapter in DataMapper.
* Model#fields now accept repository name as argument
there's differences between adapters,
Sqlite3 added default => 'UL' in Boolean type,
Mysql can't tell whether it's a Boolean or Tinyint,
and Postgres is fine. see test/abstract.rb: super_user_fields for detail.
## dm-mapping 0.7.0, 2008-09-01
* feature added
- added postgres support.
* bug fixed
- fixed key mapping in mysql adapter. PRI and MUL are all keys.
- use DM::Text.size as default text size in sqlite3.
## dm-mapping 0.6.2, 2008-08-30
* mapping more data types for mysql.
* don't map TINYINT to TrueClass with mysql, skip it in type_map.
## dm-mapping 0.6.1, 2008-08-22
* gem 'dm-core', '>=0.9.3' instead of '=0.9.3'
## dm-mapping 0.6.0, 2008-08-16
* mapping returns an array of properties indicating fields it mapped.
* performance boosted by refactored mapping implementation.
* changed the way using auto_genclass!, now accepts args like mapping!
* changed fields to return field name with Symbol instead of String.
this would make it be more consistent with DataMapper.
* storage names remain String.
* added more mysql data type to map
* use Extlib::Hook to setup dm-mapping instead of stupid alias_method.
* removed ensure_require in model. always setup DataMapper before define model.
## dm-mapping 0.5.0, 2008-08-14
* feature added
- added mysql support.
- reflect size 65535 in TEXT for sqlite3.
* bug fixed
- reflect VARCHAR(size) instead of default size in sqlite3.
* misc
- renamed sqlite3adapter to sqlite3_adapter.
## dm-mapping 0.4.1, 2008-08-14
* removed type hack, replaced with rejecting special type to lookup.
## dm-mapping 0.4.0, 2008-08-04
* added Migration#auto_genclass!.
* updated README.
* added more rdoc.
## dm-mapping 0.3.0, 2008-08-04
* added support of mapping Integer, DateTime, etc.
* renamed some internals.
* changed the way requiring adapter. no more setup first.
* added Migration#storages_and_fields
* added mapping :serial => true for primary key.
* added mapping :default, and :nullable.
* added support of mapping name. (through passing symbol or string)
* added support of multiple arguments.
* removed Mapping::All, use /.*/ instead.
## dm-mapping 0.2.1, 2008-08-03
* fixed a bug that type map should lookup for parent.
* fixed a bug that sql type could be lower case.
fixed by calling upcase.
## dm-mapping 0.2.0, 2008-08-02
* added Sqlite3Adapter::Migration#fields
* added DataMapper::Model#mapping
* added DataMapper::Model#fields
* added DataMapper::TypeMap#find_primitive for reversed lookup.
mapping SQL type back to Ruby type.
* added corresponded test.
## dm-mapping 0.1.0, 2008-07-27
* birthday!
* added DataMapper.repository.storages for sqlite3.
* please refer:
<http://groups.google.com/group/datamapper/browse_thread/thread/b9ca41120c5c9389>
original message:
from Lin Jen-Shin
to DataMapper
cc godfat
date Sun, Jul 27, 2008 at 5:40 PM
subject Manipulate an existing database.
mailed-by gmail.com
Greetings,
DataMapper looks very promising for me, so I am thinking of
using it in the near future. I hate separate my domain objects into
two parts in Rails, writing migration and switching to ActiveRecord,
vice versa, is very annoying to me.
But there's a very convenient feature to me in ActiveRecord,
that is ActiveRecord automatically mapping all fields in a table.
It makes me easily control an existing database without any domain object.
For example,
require 'active_record'
ActiveRecord::Base.establish_connection(
:adapter => 'sqlite3',
:database => 'db/development.sqlite3'
)
clsas User < ActiveRecord::Base
end
User.find 1
=> #<User id: 1, account: "admin", created_at: "2008-05-18 20:08:37", etc.>
Some people would use database admin such as phpMyAdmin to
accomplish this kind of task, but I prefer anything in Ruby,
calling Ruby function, manipulating data without SQL and
any domain object. (i.e. I didn't have to load up entire environment.)
In DataMapper, I didn't find an easy way to accomplish this.
I am sorry if there's one but I didn't find it, please point out,
many thanks. In short, I would like to do this in DataMapper:
class User
include DataMapper::Resource
mapping :account, :created_at
end
or
class User
include DataMapper::Resource
mapping All
end
class User
include DataMapper::ResourceAll
end
or
class User
include DataMapper::Resource
mapping *storage_fields
end
The above User.storage_fields should return an Array,
telling all the fields in the table, e.g. [:account, :created_at, :etc]
or a Hash includes data type, e.g. {:account => String,
:created_at => DateTime}
then mapping *storage_fields should change to:
mapping *storage_fields.each_key.to_a
If it's possible, a feature returning the database schema as well:
DataMapper.repository.storages
# => [:users, :posts, :etc]
DataMapper.repository.storages_and_fields
# => {:users => {:account => String},
:posts => {:title => String, :content => Text}}
or returning DataObject::Field, DataObject::Storage, etc.
DataMapper.repository.storage
# => [#<DataObject::Storage @name='users' @fields=
[#<DataObject::Field @name='account' @type=String>]>]
If you feel this kind of feature is indeed needed or not bad for
adding it, I could try to provide a patch for it. Though I didn't
read the source code deeply, not knowning it's easy or not.
sincerely,
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index 8fa3284..06d62e4 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,78 +1,78 @@
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-is-reflective"
- s.version = "1.2.0"
+ s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Lin Jen-Shin (godfat)"]
s.date = "2013-05-20"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
s.email = ["godfat (XD) godfat.org"]
s.executables = ["dm-is-reflective"]
s.files = [
".gitignore",
".gitmodules",
".travis.yml",
"CHANGES.md",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"TODO.md",
"bin/dm-is-reflective",
"dm-is-reflective.gemspec",
"lib/dm-is-reflective.rb",
"lib/dm-is-reflective/adapters/data_objects_adapter.rb",
"lib/dm-is-reflective/adapters/mysql_adapter.rb",
"lib/dm-is-reflective/adapters/postgres_adapter.rb",
"lib/dm-is-reflective/adapters/sqlite_adapter.rb",
"lib/dm-is-reflective/reflective.rb",
"lib/dm-is-reflective/runner.rb",
"lib/dm-is-reflective/test.rb",
"lib/dm-is-reflective/version.rb",
"task/.gitignore",
"task/gemgem.rb",
"test/setup_db.sh",
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
s.homepage = "https://github.com/godfat/dm-is-reflective"
s.licenses = ["Apache License 2.0"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "DataMapper plugin that helps you manipulate an existing database."
s.test_files = [
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-migrations>, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
index f02522a..9824f75 100644
--- a/lib/dm-is-reflective/version.rb
+++ b/lib/dm-is-reflective/version.rb
@@ -1,4 +1,4 @@
module DmIsReflective
- VERSION = '1.2.0'
+ VERSION = '1.3.0'
end
|
godfat/dm-is-reflective
|
e7a27cbb2168b5d152f84e6cb6aed63ad5d6f1d7
|
warn instead of raising TypeError if the datatype cannot be found
|
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index a8c34e2..8fa3284 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,78 +1,78 @@
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-is-reflective"
s.version = "1.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Lin Jen-Shin (godfat)"]
- s.date = "2013-05-14"
+ s.date = "2013-05-20"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
s.email = ["godfat (XD) godfat.org"]
s.executables = ["dm-is-reflective"]
s.files = [
".gitignore",
".gitmodules",
".travis.yml",
"CHANGES.md",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"TODO.md",
"bin/dm-is-reflective",
"dm-is-reflective.gemspec",
"lib/dm-is-reflective.rb",
"lib/dm-is-reflective/adapters/data_objects_adapter.rb",
"lib/dm-is-reflective/adapters/mysql_adapter.rb",
"lib/dm-is-reflective/adapters/postgres_adapter.rb",
"lib/dm-is-reflective/adapters/sqlite_adapter.rb",
"lib/dm-is-reflective/reflective.rb",
"lib/dm-is-reflective/runner.rb",
"lib/dm-is-reflective/test.rb",
"lib/dm-is-reflective/version.rb",
"task/.gitignore",
"task/gemgem.rb",
"test/setup_db.sh",
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
s.homepage = "https://github.com/godfat/dm-is-reflective"
s.licenses = ["Apache License 2.0"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "DataMapper plugin that helps you manipulate an existing database."
s.test_files = [
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-migrations>, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index a58d40e..f77d215 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,135 +1,136 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
model
end
def reflective_lookup_primitive primitive
- raise TypeError.new("#{primitive} not found for #{self.class}")
+ warn "#{primitive} not found for #{self.class}: #{caller.inspect}"
+ String # falling back to the universal interface
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
end
|
godfat/dm-is-reflective
|
dec0d59e7be9bfd463187582f08fc1a94998818f
|
now mysql supports multiple composite keys
|
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
index c2b0044..11fec07 100644
--- a/lib/dm-is-reflective/adapters/mysql_adapter.rb
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -1,76 +1,102 @@
module DmIsReflective::MysqlAdapter
include DataMapper
def storages
select('SHOW TABLES')
end
private
# construct needed table metadata
def reflective_query_storage storage
- sql = <<-SQL
- SELECT c.column_name, c.column_key, c.column_default, c.is_nullable,
- c.data_type, c.character_maximum_length, c.extra, c.table_name,
- s.index_name
- FROM `information_schema`.`columns` c
- LEFT JOIN `information_schema`.`statistics` s
- ON c.column_name = s.column_name
- WHERE c.table_schema = ? AND c.table_name = ?
- GROUP BY c.column_name;
+ sql_indices = <<-SQL
+ SELECT column_name, index_name, non_unique
+ FROM `information_schema`.`statistics`
+ WHERE table_schema = ? AND table_name = ?
+ SQL
+
+ sql_columns = <<-SQL
+ SELECT column_name, column_key, column_default, is_nullable,
+ data_type, character_maximum_length, extra, table_name
+ FROM `information_schema`.`columns`
+ WHERE table_schema = ? AND table_name = ?
SQL
# TODO: can we fix this shit in dm-mysql-adapter?
path = (options[:path] || options['path'] ||
options[:database] || options['database']).sub('/', '')
- select(Ext::String.compress_lines(sql), path, storage)
+ indices =
+ select(Ext::String.compress_lines(sql_indices), path, storage).
+ group_by(&:column_name)
+
+ select(Ext::String.compress_lines(sql_columns), path, storage).
+ map do |column|
+ if idx = indices[column.column_name]
+ idx_uni, idx_com = idx.partition{ |i| i.non_unique == 0 }.map{ |i|
+ if i.empty?
+ nil
+ elsif i.size == 1
+ i.first.index_name.to_sym
+ else
+ i.map{ |ii| ii.index_name.to_sym }
+ end
+ }
+ else
+ idx_uni, idx_com = nil
+ end
+
+ column.instance_eval <<-RUBY
+ def unique_index; #{idx_uni.inspect}; end
+ def index ; #{idx_com.inspect}; end
+ RUBY
+
+ column
+ end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.data_type
end
def reflective_attributes field, attrs = {}
- attrs[:serial] = true if field.extra == 'auto_increment'
+ attrs[:serial] = true if field.extra == 'auto_increment'
- case field.column_key
- when 'PRI'
+ if field.column_key == 'PRI'
attrs[:key] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
- when 'UNI'
- attrs[:unique_index] = :"#{field.index_name}"
- when 'MUL'
- attrs[:index] = :"#{field.index_name}"
+ else
+ attrs[:unique_index] = field.unique_index if field.unique_index
+ attrs[ :index] = field. index if field. index
end
attrs[:allow_nil] = field.is_nullable == 'YES'
- attrs[:default] = field.column_default if
+ attrs[:default] = field.column_default if
field.column_default
- attrs[:length] = field.character_maximum_length if
+ attrs[:length] = field.character_maximum_length if
field.character_maximum_length
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'YEAR' ; Integer
when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Integer
when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Property::Decimal
when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
when 'TIME' ; Time
when 'DATE' ; Date
when 'DATETIME', 'TIMESTAMP' ; DateTime
when 'BOOL', 'BOOLEAN' ; Property::Boolean
when /\w*TEXT/ ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
f1506ef707cc80d980606b4c9e368a4064d1f920
|
more compact
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index b856eef..c96f952 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,113 +1,110 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql_indices = <<-SQL
- SELECT
- a.attname, i.relname, ix.indisprimary, ix.indisunique
- FROM
- pg_class t, pg_class i, pg_index ix, pg_attribute a
- WHERE
- t.oid = ix.indrelid
- AND i.oid = ix.indexrelid
- AND a.attrelid = t.oid
- AND a.attnum = ANY(ix.indkey)
- AND t.relkind = 'r'
- AND t.relname = ?
+ SELECT a.attname, i.relname, ix.indisprimary, ix.indisunique
+ FROM pg_class t, pg_class i, pg_index ix, pg_attribute a
+ WHERE t.oid = ix.indrelid
+ AND i.oid = ix.indexrelid
+ AND a.attrelid = t.oid
+ AND a.attnum = ANY(ix.indkey)
+ AND t.relkind = 'r'
+ AND t.relname = ?
SQL
sql_columns = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name
- FROM "information_schema"."columns"
- WHERE table_schema = current_schema() AND table_name = ?
+ FROM "information_schema"."columns"
+ WHERE table_schema = current_schema() AND table_name = ?
SQL
indices =
select(Ext::String.compress_lines(sql_indices), storage).
group_by(&:attname)
select(Ext::String.compress_lines(sql_columns), storage).map do |column|
if idx = indices[column.column_name]
is_key = !!idx.find{ |i| i.indisprimary }
idx_uni, idx_com = idx.partition{ |i| i.indisunique }.map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
else
is_key = false
idx_uni, idx_com = nil
end
column.instance_eval <<-RUBY
def key? ; #{is_key} ; end
def unique_index; #{idx_uni.inspect}; end
def index ; #{idx_com.inspect}; end
RUBY
column
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:key] = true if field.key?
attrs[:unique_index] = field.unique_index if field.unique_index
attrs[ :index] = field. index if field. index
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
fcb61f9a7f8d676a3478cf79c2fe9b403f00924e
|
consistent name
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index ad61752..18de2ea 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,101 +1,101 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
- sql = <<-SQL
+ sql_indices = <<-SQL
SELECT name, sql FROM sqlite_master
WHERE type = 'index' AND tbl_name = ?
SQL
- indices = select(sql, storage).inject({}){ |r, field|
+ indices = select(sql_indices, storage).inject({}){ |r, field|
columns = field.sql[/\(.+\)/].scan(/\w+/)
uniqueness = !!field.sql[/CREATE UNIQUE INDEX/]
columns.each{ |c|
type = if uniqueness then :unique_index else :index end
r[c] ||= {:unique_index => [], :index => []}
r[c][type] << field.name
}
r
}
select('PRAGMA table_info(?)', storage).map{ |field|
if idx = indices[field.name]
idx_uni, idx_com = [:unique_index, :index].map{ |type|
i = idx[type]
if i.empty?
nil
elsif i.size == 1
i.first.to_sym
else
i.map(&:to_sym)
end
}
else
idx_uni, idx_com = nil
end
field.instance_eval <<-RUBY
def table_name ; '#{storage}' ; end
def index ; #{idx_com.inspect}; end
def unique_index; #{idx_uni.inspect}; end
RUBY
field
}
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
end
attrs[:unique_index] = field.unique_index if field.unique_index
attrs[ :index] = field. index if field. index
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
eab05e376d744dea21d4fd0401def39adba90d2a
|
sqlite for multiple composite keys support
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index e03150e..ad61752 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,90 +1,101 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT name, sql FROM sqlite_master
WHERE type = 'index' AND tbl_name = ?
SQL
- indices = select(sql, storage)
+ indices = select(sql, storage).inject({}){ |r, field|
+ columns = field.sql[/\(.+\)/].scan(/\w+/)
+ uniqueness = !!field.sql[/CREATE UNIQUE INDEX/]
- select('PRAGMA table_info(?)', storage).map{ |field|
- index = indices.find{ |idx|
- idx.sql =~ /ON "#{storage}" \("#{field.name}"\)/ }
+ columns.each{ |c|
+ type = if uniqueness then :unique_index else :index end
+ r[c] ||= {:unique_index => [], :index => []}
+ r[c][type] << field.name
+ }
- field.instance_eval <<-RUBY
- def index_name
- #{"'#{index.name}'" if index}
- end
+ r
+ }
- def uniqueness
- #{!!(index.sql =~ /UNIQUE INDEX/) if index}
- end
+ select('PRAGMA table_info(?)', storage).map{ |field|
+ if idx = indices[field.name]
+ idx_uni, idx_com = [:unique_index, :index].map{ |type|
+ i = idx[type]
+ if i.empty?
+ nil
+ elsif i.size == 1
+ i.first.to_sym
+ else
+ i.map(&:to_sym)
+ end
+ }
+ else
+ idx_uni, idx_com = nil
+ end
- def table_name
- '#{storage}'
- end
+ field.instance_eval <<-RUBY
+ def table_name ; '#{storage}' ; end
+ def index ; #{idx_com.inspect}; end
+ def unique_index; #{idx_uni.inspect}; end
RUBY
+
field
}
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
end
- if field.index_name
- if field.uniqueness
- attrs[:unique_index] = :"#{field.index_name}"
- else
- attrs[:index] = :"#{field.index_name}"
- end
- end
+ attrs[:unique_index] = field.unique_index if field.unique_index
+ attrs[ :index] = field. index if field. index
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
0c575784826fa3a7c6607d022bd2a59503dbf3b7
|
add test case for multiple composite keys
|
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index a8480f6..46e190e 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,285 +1,314 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
+ class Cat
+ include DataMapper::Resource
+ property :id, Serial
+
+ belongs_to :user
+ belongs_to :super_user
+
+ property :user_id , Integer,
+ :unique_index => [:usu, :u]
+ property :super_user_id, Integer,
+ :unique_index => [:usu],
+ :index => [:su]
+ end
+
+ class Comment
+ include DataMapper::Resource
+ belongs_to :user, :required => false
+
+ property :id, Serial
+ property :title, String, :length => 50, :default => 'default title',
+ :allow_nil => false
+ property :body, Text
+
+ is :reflective
+ end
+
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
- class Comment
- include DataMapper::Resource
- belongs_to :user, :required => false
-
- property :id, Serial
- property :title, String, :length => 50, :default => 'default title',
- :allow_nil => false
- property :body, Text
-
- is :reflective
- end
-
- Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
+ Tables = %w[abstract_cats abstract_comments
+ abstract_super_users abstract_users]
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
- def user_fields
- @user_fields ||=
- [[:created_at, DateTime, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
- [:login, String, {:length => 70}.merge(AttrCommon)],
- [:sig, DataMapper::Property::Text, AttrText]]
+ def cat_fields
+ @cat_fields ||=
+ [[:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_cats_pkey}.merge(AttrCommonPK)],
+ [:super_user_id, Integer,
+ {:unique_index => :unique_abstract_cats_usu,
+ :index => :index_abstract_cats_su }.merge(AttrCommon)],
+ [:user_id , Integer,
+ {:unique_index => [:unique_abstract_cats_usu,
+ :unique_abstract_cats_u]}.merge(AttrCommon)]]
end
def comment_fields
@comment_fields ||= begin
[[:body , DataMapper::Property::Text , AttrText],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
[:title , String ,
{:length => 50, :default => 'default title', :allow_nil => false}],
[:user_id, Integer ,
{:index => :index_abstract_comments_user}.merge(AttrCommon)]]
end
end
+ def user_fields
+ @user_fields ||=
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+
def super_user_fields
@super_user_fields ||= begin
type = case DataMapper.repository.adapter.class.name
when 'DataMapper::Adapters::MysqlAdapter'
Integer
else
DataMapper::Property::Boolean
end
[[:bool, type, AttrCommon],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
end
end
before do
@dm = setup_data_mapper
- [User, Comment, SuperUser].each(&:auto_migrate!)
+ [Cat, Comment, User, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
- }.should.eq('abstract_users' => user_fields ,
- 'abstract_comments' => comment_fields ,
+ }.should.eq('abstract_cats' => cat_fields,
+ 'abstract_comments' => comment_fields,
+ 'abstract_users' => user_fields,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
- ["#{scope}::AbstractComment",
+ ["#{scope}::AbstractCat" ,
+ "#{scope}::AbstractComment" ,
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
|
godfat/dm-is-reflective
|
e83895f5e2d81b8ccadd29caa2cbb9c8a9efa813
|
consistent order
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index c67e69a..b856eef 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,113 +1,113 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql_indices = <<-SQL
SELECT
a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM
pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE
t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relname = ?
SQL
sql_columns = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
indices =
select(Ext::String.compress_lines(sql_indices), storage).
group_by(&:attname)
select(Ext::String.compress_lines(sql_columns), storage).map do |column|
if idx = indices[column.column_name]
is_key = !!idx.find{ |i| i.indisprimary }
idx_uni, idx_com = idx.partition{ |i| i.indisunique }.map{ |i|
if i.empty?
nil
elsif i.size == 1
i.first.relname.to_sym
else
i.map{ |ii| ii.relname.to_sym }
end
}
else
is_key = false
idx_uni, idx_com = nil
end
column.instance_eval <<-RUBY
def key? ; #{is_key} ; end
- def index ; #{idx_com.inspect}; end
def unique_index; #{idx_uni.inspect}; end
+ def index ; #{idx_com.inspect}; end
RUBY
column
end
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:key] = true if field.key?
attrs[:unique_index] = field.unique_index if field.unique_index
attrs[ :index] = field. index if field. index
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
96188f53dcb1e7882ef09b8c965a3a0173e728bb
|
new indention
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index 3f5eda4..6972f29 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,106 +1,106 @@
module DmIsReflective
autoload :VERSION, 'dm-is-reflective/version'
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
- when Regexp;
- break name if name.to_s =~ target
+ when Regexp;
+ break name if name.to_s =~ target
- when Symbol, String;
- break name if name == target.to_sym
+ when Symbol, String;
+ break name if name == target.to_sym
- when Class;
- break name if type == target
+ when Class;
+ break name if type == target
- else
- raise ArgumentError.new("invalid argument: #{target.inspect}")
+ else
+ raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflect_property(reflected, type, attrs) if
reflected.kind_of?(Symbol)
}.compact
if key.empty? && k = properties.find{ |p| p.unique_index }
property k.name, k.primitive, :key => true
end
finalize if respond_to?(:finalize)
result
end
def reflect_property reflected, type, attrs
property reflected, type, attrs
rescue ArgumentError => e
if e.message =~ /cannot be used as a property name/
reflect_property "#{reflected}_", type,
{:field => reflected.to_s}.merge(attrs)
end
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
687a3ce9571c9f6116c0545b52b45c347c513675
|
so now it should work fairly well for composite keys with postgres
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 0d67fc3..9471856 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,104 +1,115 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
- sql_key = <<-SQL
- SELECT column_name FROM "information_schema"."key_column_usage"
- WHERE table_schema = current_schema() AND table_name = ?
- SQL
-
- sql_index = <<-SQL
+ sql_indices = <<-SQL
SELECT
- (i.relname, ix.indisunique)
+ a.attname, i.relname, ix.indisprimary, ix.indisunique
FROM
pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE
t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
- AND a.attname = column_name
AND t.relkind = 'r'
AND t.relname = ?
SQL
- sql = <<-SQL
+ sql_columns = <<-SQL
SELECT column_name, column_default, is_nullable,
- character_maximum_length, udt_name,
- (#{sql_key}) AS key, (#{sql_index}) AS indexname_uniqueness
+ character_maximum_length, udt_name
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- select(Ext::String.compress_lines(sql), storage, storage, storage)
- end
+ indices =
+ select(Ext::String.compress_lines(sql_indices), storage).
+ group_by(&:attname)
+
+ select(Ext::String.compress_lines(sql_columns), storage).map do |column|
+ idx = indices[column.column_name]
+
+ if idx
+ is_key = !!idx.find{ |i| i.indisprimary }
+ idx_uni, idx_com = idx.partition{ |i| i.indisunique }.map{ |i|
+ if i.empty?
+ nil
+ elsif i.size == 1
+ i.first.relname.to_sym
+ else
+ i.map{ |ii| ii.relname.to_sym }
+ end
+ }
+ else
+ is_key = false
+ idx_uni, idx_com = nil
+ end
+ column.instance_eval <<-RUBY
+ def key? ; #{is_key} ; end
+ def index ; #{idx_com.inspect}; end
+ def unique_index; #{idx_uni.inspect}; end
+ RUBY
+ column
+ end
+ end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
- # find out index and unique index
- if field.indexname_uniqueness
- index_name, uniqueness = field.indexname_uniqueness[1..-2].split(',')
- end
-
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
- attrs[:key] = true if field.column_name == field.key
+ attrs[:key] = true if field.key?
- if index_name
- if uniqueness
- attrs[:unique_index] = :"#{index_name}"
- else
- attrs[:index] = :"#{index_name}"
- end
- end
+ attrs[:unique_index] = field.unique_index if field.unique_index
+ attrs[ :index] = field. index if field. index
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index ae831ff..a8480f6 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,291 +1,285 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
def user_fields
@user_fields ||=
[[:created_at, DateTime, AttrCommon],
[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
[:login, String, {:length => 70}.merge(AttrCommon)],
[:sig, DataMapper::Property::Text, AttrText]]
end
def comment_fields
@comment_fields ||= begin
- index_name = case DataMapper.repository.adapter.class.name
- when 'DataMapper::Adapters::PostgresAdapter'
- :unique_index
- else
- :index
- end
[[:body , DataMapper::Property::Text , AttrText],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
[:title , String ,
{:length => 50, :default => 'default title', :allow_nil => false}],
[:user_id, Integer ,
- {index_name => :index_abstract_comments_user}.merge(AttrCommon)]]
+ {:index => :index_abstract_comments_user}.merge(AttrCommon)]]
end
end
def super_user_fields
@super_user_fields ||= begin
type = case DataMapper.repository.adapter.class.name
when 'DataMapper::Adapters::MysqlAdapter'
Integer
else
DataMapper::Property::Boolean
end
[[:bool, type, AttrCommon],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
end
end
before do
@dm = setup_data_mapper
[User, Comment, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_users' => user_fields ,
'abstract_comments' => comment_fields ,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractComment",
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
|
godfat/dm-is-reflective
|
b851f823e77e2fd22c873eb86eaf85a063f9619b
|
choose another name if the name is already taken
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index d7dfbad..3f5eda4 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,96 +1,106 @@
module DmIsReflective
autoload :VERSION, 'dm-is-reflective/version'
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
when Regexp;
break name if name.to_s =~ target
when Symbol, String;
break name if name == target.to_sym
when Class;
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
- property(reflected, type, attrs) if reflected.kind_of?(Symbol)
+ reflect_property(reflected, type, attrs) if
+ reflected.kind_of?(Symbol)
}.compact
if key.empty? && k = properties.find{ |p| p.unique_index }
property k.name, k.primitive, :key => true
end
finalize if respond_to?(:finalize)
result
end
+ def reflect_property reflected, type, attrs
+ property reflected, type, attrs
+ rescue ArgumentError => e
+ if e.message =~ /cannot be used as a property name/
+ reflect_property "#{reflected}_", type,
+ {:field => reflected.to_s}.merge(attrs)
+ end
+ end
+
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
9a01142fe7a0d4587f08050d28a20aae06d45d9d
|
find an unique index for key if there's no key
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index bec1fa3..d7dfbad 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,92 +1,96 @@
module DmIsReflective
autoload :VERSION, 'dm-is-reflective/version'
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
when Regexp;
break name if name.to_s =~ target
when Symbol, String;
break name if name == target.to_sym
when Class;
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
property(reflected, type, attrs) if reflected.kind_of?(Symbol)
}.compact
+ if key.empty? && k = properties.find{ |p| p.unique_index }
+ property k.name, k.primitive, :key => true
+ end
+
finalize if respond_to?(:finalize)
result
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
2da66468442c962972b6705a85f21a2041074ac3
|
we already finalize when reflect
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index c83def9..a58d40e 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,136 +1,135 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
- model.finalize if model.respond_to?(:finalize)
model
end
def reflective_lookup_primitive primitive
raise TypeError.new("#{primitive} not found for #{self.class}")
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
end
|
godfat/dm-is-reflective
|
e11fdf01b57f37b0d27e3de189860573ccc5ffc4
|
now mysql could also adopt unique indices and common indices
|
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
index 83b3a0b..c2b0044 100644
--- a/lib/dm-is-reflective/adapters/mysql_adapter.rb
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -1,63 +1,76 @@
module DmIsReflective::MysqlAdapter
include DataMapper
def storages
select('SHOW TABLES')
end
private
# construct needed table metadata
def reflective_query_storage storage
sql = <<-SQL
- SELECT column_name, column_default, is_nullable, data_type,
- character_maximum_length, column_key, extra
- FROM `information_schema`.`columns`
- WHERE `table_schema` = ? AND `table_name` = ?
+ SELECT c.column_name, c.column_key, c.column_default, c.is_nullable,
+ c.data_type, c.character_maximum_length, c.extra, c.table_name,
+ s.index_name
+ FROM `information_schema`.`columns` c
+ LEFT JOIN `information_schema`.`statistics` s
+ ON c.column_name = s.column_name
+ WHERE c.table_schema = ? AND c.table_name = ?
+ GROUP BY c.column_name;
SQL
# TODO: can we fix this shit in dm-mysql-adapter?
- path = options[:path] || options['path'] ||
- options[:database] || options['database']
+ path = (options[:path] || options['path'] ||
+ options[:database] || options['database']).sub('/', '')
- select(Ext::String.compress_lines(sql), path.sub('/', ''), storage)
+ select(Ext::String.compress_lines(sql), path, storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.data_type
end
def reflective_attributes field, attrs = {}
attrs[:serial] = true if field.extra == 'auto_increment'
- attrs[:key] = true if field.column_key == 'PRI'
+
+ case field.column_key
+ when 'PRI'
+ attrs[:key] = true
+ attrs[:unique_index] = :"#{field.table_name}_pkey"
+ when 'UNI'
+ attrs[:unique_index] = :"#{field.index_name}"
+ when 'MUL'
+ attrs[:index] = :"#{field.index_name}"
+ end
attrs[:allow_nil] = field.is_nullable == 'YES'
attrs[:default] = field.column_default if
field.column_default
attrs[:length] = field.character_maximum_length if
field.character_maximum_length
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'YEAR' ; Integer
when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Integer
when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Property::Decimal
when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
when 'TIME' ; Time
when 'DATE' ; Date
when 'DATETIME', 'TIMESTAMP' ; DateTime
when 'BOOL', 'BOOLEAN' ; Property::Boolean
when /\w*TEXT/ ; Property::Text
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 8cf5fa0..0d67fc3 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,104 +1,104 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql_key = <<-SQL
SELECT column_name FROM "information_schema"."key_column_usage"
WHERE table_schema = current_schema() AND table_name = ?
SQL
sql_index = <<-SQL
SELECT
(i.relname, ix.indisunique)
FROM
pg_class t, pg_class i, pg_index ix, pg_attribute a
WHERE
t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND a.attname = column_name
AND t.relkind = 'r'
AND t.relname = ?
SQL
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name,
(#{sql_key}) AS key, (#{sql_index}) AS indexname_uniqueness
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
select(Ext::String.compress_lines(sql), storage, storage, storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
if field.column_default
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
end
# find out index and unique index
if field.indexname_uniqueness
index_name, uniqueness = field.indexname_uniqueness[1..-2].split(',')
end
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:key] = true if field.column_name == field.key
if index_name
if uniqueness
- attrs[:unique_index] = index_name.to_sym
+ attrs[:unique_index] = :"#{index_name}"
else
- attrs[:index] = index_name.to_sym
+ attrs[:index] = :"#{index_name}"
end
end
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index 0443105..e03150e 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,90 +1,90 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT name, sql FROM sqlite_master
WHERE type = 'index' AND tbl_name = ?
SQL
indices = select(sql, storage)
select('PRAGMA table_info(?)', storage).map{ |field|
index = indices.find{ |idx|
idx.sql =~ /ON "#{storage}" \("#{field.name}"\)/ }
field.instance_eval <<-RUBY
def index_name
#{"'#{index.name}'" if index}
end
def uniqueness
#{!!(index.sql =~ /UNIQUE INDEX/) if index}
end
def table_name
'#{storage}'
end
RUBY
field
}
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
end
if field.index_name
if field.uniqueness
- attrs[:unique_index] = field.index_name.to_sym
+ attrs[:unique_index] = :"#{field.index_name}"
else
- attrs[:index] = field.index_name.to_sym
+ attrs[:index] = :"#{field.index_name}"
end
end
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index 31d81d0..ae831ff 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,285 +1,291 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
def user_fields
@user_fields ||=
[[:created_at, DateTime, AttrCommon],
[:id, DataMapper::Property::Serial,
{:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
[:login, String, {:length => 70}.merge(AttrCommon)],
[:sig, DataMapper::Property::Text, AttrText]]
end
def comment_fields
@comment_fields ||= begin
index_name = case DataMapper.repository.adapter.class.name
- when 'DataMapper::Adapters::SqliteAdapter'
- :index
- else
+ when 'DataMapper::Adapters::PostgresAdapter'
:unique_index
+ else
+ :index
end
[[:body , DataMapper::Property::Text , AttrText],
[:id , DataMapper::Property::Serial,
{:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
[:title , String ,
{:length => 50, :default => 'default title', :allow_nil => false}],
[:user_id, Integer ,
{index_name => :index_abstract_comments_user}.merge(AttrCommon)]]
end
end
- # there's differences between adapters
def super_user_fields
- @super_user_fields ||=
- [[:bool, DataMapper::Property::Boolean, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
+ @super_user_fields ||= begin
+ type = case DataMapper.repository.adapter.class.name
+ when 'DataMapper::Adapters::MysqlAdapter'
+ Integer
+ else
+ DataMapper::Property::Boolean
+ end
+ [[:bool, type, AttrCommon],
+ [:id , DataMapper::Property::Serial,
+ {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
+ end
end
before do
@dm = setup_data_mapper
[User, Comment, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_users' => user_fields ,
'abstract_comments' => comment_fields ,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractComment",
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
|
godfat/dm-is-reflective
|
aad6cb91f2381b7ba4d82924f74a0f2bdcba08d3
|
fix indention
|
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
index deda4dc..83b3a0b 100644
--- a/lib/dm-is-reflective/adapters/mysql_adapter.rb
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -1,63 +1,63 @@
module DmIsReflective::MysqlAdapter
include DataMapper
def storages
select('SHOW TABLES')
end
private
# construct needed table metadata
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name, column_default, is_nullable, data_type,
character_maximum_length, column_key, extra
FROM `information_schema`.`columns`
WHERE `table_schema` = ? AND `table_name` = ?
SQL
# TODO: can we fix this shit in dm-mysql-adapter?
path = options[:path] || options['path'] ||
options[:database] || options['database']
select(Ext::String.compress_lines(sql), path.sub('/', ''), storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.data_type
end
def reflective_attributes field, attrs = {}
- attrs[:serial] = true if field.extra == 'auto_increment'
- attrs[:key] = true if field.column_key == 'PRI'
+ attrs[:serial] = true if field.extra == 'auto_increment'
+ attrs[:key] = true if field.column_key == 'PRI'
- attrs[:allow_nil] = field.is_nullable == 'YES'
- attrs[:default] = field.column_default if
- field.column_default
+ attrs[:allow_nil] = field.is_nullable == 'YES'
+ attrs[:default] = field.column_default if
+ field.column_default
- attrs[:length] = field.character_maximum_length if
- field.character_maximum_length
+ attrs[:length] = field.character_maximum_length if
+ field.character_maximum_length
- attrs
+ attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'YEAR' ; Integer
when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Integer
when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Property::Decimal
when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
when 'TIME' ; Time
when 'DATE' ; Date
when 'DATETIME', 'TIMESTAMP' ; DateTime
when 'BOOL', 'BOOLEAN' ; Property::Boolean
when /\w*TEXT/ ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
979efc0f6d67b4d58a22a6ad01ef5baa0dafc461
|
sqlite_adapter.rb: follow uniqueness
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index a1e2ec4..0443105 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,80 +1,90 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT name, sql FROM sqlite_master
WHERE type = 'index' AND tbl_name = ?
SQL
indices = select(sql, storage)
select('PRAGMA table_info(?)', storage).map{ |field|
- index_name = indices.find{ |idx|
+ index = indices.find{ |idx|
idx.sql =~ /ON "#{storage}" \("#{field.name}"\)/ }
field.instance_eval <<-RUBY
def index_name
- #{"'#{index_name.name}'" if index_name}
+ #{"'#{index.name}'" if index}
+ end
+
+ def uniqueness
+ #{!!(index.sql =~ /UNIQUE INDEX/) if index}
end
def table_name
'#{storage}'
end
RUBY
field
}
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
attrs[:unique_index] = :"#{field.table_name}_pkey"
end
- attrs[:index] = field.index_name.to_sym if field.index_name
+ if field.index_name
+ if field.uniqueness
+ attrs[:unique_index] = field.index_name.to_sym
+ else
+ attrs[:index] = field.index_name.to_sym
+ end
+ end
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
43ed2b25e0d09a809d3606e2e6e9d8564cf5b617
|
now sqlite matches postgres
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index 4dc20e0..a1e2ec4 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,56 +1,80 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
- select('PRAGMA table_info(?)', storage)
+ sql = <<-SQL
+ SELECT name, sql FROM sqlite_master
+ WHERE type = 'index' AND tbl_name = ?
+ SQL
+ indices = select(sql, storage)
+
+ select('PRAGMA table_info(?)', storage).map{ |field|
+ index_name = indices.find{ |idx|
+ idx.sql =~ /ON "#{storage}" \("#{field.name}"\)/ }
+
+ field.instance_eval <<-RUBY
+ def index_name
+ #{"'#{index_name.name}'" if index_name}
+ end
+
+ def table_name
+ '#{storage}'
+ end
+ RUBY
+ field
+ }
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
- attrs[:key] = true
- attrs[:serial] = true
+ attrs[:key] = true
+ attrs[:serial] = true
+ attrs[:unique_index] = :"#{field.table_name}_pkey"
end
+
+ attrs[:index] = field.index_name.to_sym if field.index_name
+
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index ff6dc29..31d81d0 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,250 +1,285 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
+ def user_fields
+ @user_fields ||=
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+ def comment_fields
+ @comment_fields ||= begin
+ index_name = case DataMapper.repository.adapter.class.name
+ when 'DataMapper::Adapters::SqliteAdapter'
+ :index
+ else
+ :unique_index
+ end
+ [[:body , DataMapper::Property::Text , AttrText],
+ [:id , DataMapper::Property::Serial,
+ {:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
+
+ [:title , String ,
+ {:length => 50, :default => 'default title', :allow_nil => false}],
+
+ [:user_id, Integer ,
+ {index_name => :index_abstract_comments_user}.merge(AttrCommon)]]
+ end
+ end
+
+ # there's differences between adapters
+ def super_user_fields
+ @super_user_fields ||=
+ [[:bool, DataMapper::Property::Boolean, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
+ end
before do
@dm = setup_data_mapper
[User, Comment, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_users' => user_fields ,
'abstract_comments' => comment_fields ,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractComment",
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
diff --git a/test/test_mysql.rb b/test/test_mysql.rb
index d8a1dfa..f5a61d1 100644
--- a/test/test_mysql.rb
+++ b/test/test_mysql.rb
@@ -1,52 +1,24 @@
require 'dm-is-reflective/test'
require_adapter 'mysql'
describe 'mysql' do
if ENV['TRAVIS']
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'mysql' ,
:username => 'root' ,
:host => 'localhost' ,
:database => 'myapp_test')
end
else
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'mysql' ,
:username => 'dm_is_reflective',
:password => 'godfat' ,
:host => 'localhost' ,
:database => 'dm_is_reflective')
end
end
- def user_fields
- @user_fields ||=
- [[:created_at, DateTime, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
- [:login, String, {:length => 70}.merge(AttrCommon)],
- [:sig, DataMapper::Property::Text, AttrText]]
- end
-
- def comment_fields
- @comment_fields ||=
- [[:body, DataMapper::Property::Text, AttrText],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
- [:title, String, {:length => 50, :default => 'default title',
- :allow_nil => false}],
- [:user_id, Integer,
- {:unique_index => :index_abstract_comments_user}.merge(AttrCommon)]]
- end
-
- # there's differences between adapters
- def super_user_fields
- @super_user_fields ||=
- [[:bool, Integer, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
- end
-
behaves_like :reflective
end if defined?(DataMapper::Adapters::MysqlAdapter)
diff --git a/test/test_postgres.rb b/test/test_postgres.rb
index b7e4aa0..fc739dc 100644
--- a/test/test_postgres.rb
+++ b/test/test_postgres.rb
@@ -1,52 +1,24 @@
require 'dm-is-reflective/test'
require_adapter 'postgres'
describe 'postgres' do
if ENV['TRAVIS']
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'postgres' ,
:username => 'postgres' ,
:host => 'localhost' ,
:database => 'myapp_test')
end
else
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'postgres' ,
:username => 'dm_is_reflective',
:password => 'godfat' ,
:host => 'localhost' ,
:database => 'dm_is_reflective')
end
end
- def user_fields
- @user_fields ||=
- [[:created_at, DateTime, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
- [:login, String, {:length => 70}.merge(AttrCommon)],
- [:sig, DataMapper::Property::Text, AttrText]]
- end
-
- def comment_fields
- @comment_fields ||=
- [[:body, DataMapper::Property::Text, AttrText],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
- [:title, String, {:length => 50, :default => 'default title',
- :allow_nil => false}],
- [:user_id, Integer,
- {:unique_index => :index_abstract_comments_user}.merge(AttrCommon)]]
- end
-
- # there's differences between adapters
- def super_user_fields
- @super_user_fields ||=
- [[:bool, DataMapper::Property::Boolean, AttrCommon],
- [:id, DataMapper::Property::Serial,
- {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
- end
-
behaves_like :reflective
end if defined?(DataMapper::Adapters::PostgresAdapter)
diff --git a/test/test_sqlite.rb b/test/test_sqlite.rb
index 6d50b34..1c67510 100644
--- a/test/test_sqlite.rb
+++ b/test/test_sqlite.rb
@@ -1,36 +1,11 @@
require 'dm-is-reflective/test'
require_adapter 'sqlite'
describe 'sqlite' do
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'sqlite', :database => ':memory:')
end
- def user_fields
- @user_fields ||=
- [[:created_at, DateTime, AttrCommon],
- [:id, DataMapper::Property::Serial, AttrCommonPK],
- [:login, String, {:length => 70}.merge(AttrCommon)],
- [:sig, DataMapper::Property::Text, AttrText]]
- end
-
- def comment_fields
- @comment_fields ||=
- [[:body, DataMapper::Property::Text, AttrText],
- [:id, DataMapper::Property::Serial, AttrCommonPK],
- [:title, String, {:length => 50, :default => 'default title',
- :allow_nil => false}],
- [:user_id, Integer,
- {:index => :index_abstract_comments_user}.merge(AttrCommon)]]
- end
-
- # there's differences between adapters
- def super_user_fields
- @super_user_fields ||=
- [[:bool, DataMapper::Property::Boolean, AttrCommon],
- [:id, DataMapper::Property::Serial, AttrCommonPK]]
- end
-
behaves_like :reflective
end if defined?(DataMapper::Adapters::SqliteAdapter)
|
godfat/dm-is-reflective
|
cf989b3ea4283706c2d2435b1146a4022f11b430
|
reduce lines
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index cb90f52..4dc20e0 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,57 +1,56 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
- SELECT name
- FROM sqlite_master
+ SELECT name FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
select('PRAGMA table_info(?)', storage)
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
attrs[:serial] = true
end
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
22fc29a09746c742810b66499827a9a3ee4391df
|
now we properly setup :unique_index for postgres. TODO: sqlite, mysql
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index ddba375..8cf5fa0 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,72 +1,104 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql_key = <<-SQL
SELECT column_name FROM "information_schema"."key_column_usage"
WHERE table_schema = current_schema() AND table_name = ?
SQL
+ sql_index = <<-SQL
+ SELECT
+ (i.relname, ix.indisunique)
+ FROM
+ pg_class t, pg_class i, pg_index ix, pg_attribute a
+ WHERE
+ t.oid = ix.indrelid
+ AND i.oid = ix.indexrelid
+ AND a.attrelid = t.oid
+ AND a.attnum = ANY(ix.indkey)
+ AND a.attname = column_name
+ AND t.relkind = 'r'
+ AND t.relname = ?
+ SQL
+
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name,
- (#{sql_key}) AS key
+ (#{sql_key}) AS key, (#{sql_index}) AS indexname_uniqueness
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- select(Ext::String.compress_lines(sql), storage, storage)
+ select(Ext::String.compress_lines(sql), storage, storage, storage)
end
+
+
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
- field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
- field.column_default
+ if field.column_default
+ field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1')
+ end
+
+ # find out index and unique index
+ if field.indexname_uniqueness
+ index_name, uniqueness = field.indexname_uniqueness[1..-2].split(',')
+ end
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
- attrs[:key] = true if field.key == field.column_name
+ attrs[:key] = true if field.column_name == field.key
+
+ if index_name
+ if uniqueness
+ attrs[:unique_index] = index_name.to_sym
+ else
+ attrs[:index] = index_name.to_sym
+ end
+ end
+
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index ce8125b..ff6dc29 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,279 +1,250 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
- def user_fields
- [[:created_at, DateTime, AttrCommon],
- [:id, DataMapper::Property::Serial, AttrCommonPK],
- [:login, String, {:length => 70}.merge(AttrCommon)],
- [:sig, DataMapper::Property::Text, AttrText]]
- end
-
- def comment_fields
- [[:body, DataMapper::Property::Text, AttrText],
- [:id, DataMapper::Property::Serial, AttrCommonPK],
- [:title, String, {:length => 50, :default => 'default title',
- :allow_nil => false}],
- [:user_id, Integer, AttrCommon]]
- end
- # there's differences between adapters
- def super_user_fields
- mysql = defined?(DataMapper::Adapters::MysqlAdapter) &&
- DataMapper::Adapters::MysqlAdapter
- case DataMapper.repository.adapter
- when mysql
- # Mysql couldn't tell it's boolean or tinyint
- [[:bool, Integer, AttrCommon],
- [:id, DataMapper::Property::Serial, AttrCommonPK]]
-
- else
- [[:bool, DataMapper::Property::Boolean, AttrCommon],
- [:id, DataMapper::Property::Serial, AttrCommonPK]]
- end
- end
before do
@dm = setup_data_mapper
[User, Comment, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_users' => user_fields ,
'abstract_comments' => comment_fields ,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
["#{scope}::AbstractComment",
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
diff --git a/test/test_mysql.rb b/test/test_mysql.rb
index 7c08f74..d8a1dfa 100644
--- a/test/test_mysql.rb
+++ b/test/test_mysql.rb
@@ -1,23 +1,52 @@
require 'dm-is-reflective/test'
require_adapter 'mysql'
describe 'mysql' do
if ENV['TRAVIS']
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'mysql' ,
:username => 'root' ,
:host => 'localhost' ,
:database => 'myapp_test')
end
else
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'mysql' ,
:username => 'dm_is_reflective',
:password => 'godfat' ,
:host => 'localhost' ,
:database => 'dm_is_reflective')
end
end
+
+ def user_fields
+ @user_fields ||=
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+
+ def comment_fields
+ @comment_fields ||=
+ [[:body, DataMapper::Property::Text, AttrText],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
+ [:title, String, {:length => 50, :default => 'default title',
+ :allow_nil => false}],
+ [:user_id, Integer,
+ {:unique_index => :index_abstract_comments_user}.merge(AttrCommon)]]
+ end
+
+ # there's differences between adapters
+ def super_user_fields
+ @super_user_fields ||=
+ [[:bool, Integer, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
+ end
+
behaves_like :reflective
end if defined?(DataMapper::Adapters::MysqlAdapter)
diff --git a/test/test_postgres.rb b/test/test_postgres.rb
index 479fc61..b7e4aa0 100644
--- a/test/test_postgres.rb
+++ b/test/test_postgres.rb
@@ -1,23 +1,52 @@
require 'dm-is-reflective/test'
require_adapter 'postgres'
describe 'postgres' do
if ENV['TRAVIS']
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'postgres' ,
:username => 'postgres' ,
:host => 'localhost' ,
:database => 'myapp_test')
end
else
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'postgres' ,
:username => 'dm_is_reflective',
:password => 'godfat' ,
:host => 'localhost' ,
:database => 'dm_is_reflective')
end
end
+
+ def user_fields
+ @user_fields ||=
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_users_pkey}.merge(AttrCommonPK)],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+
+ def comment_fields
+ @comment_fields ||=
+ [[:body, DataMapper::Property::Text, AttrText],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_comments_pkey}.merge(AttrCommonPK)],
+ [:title, String, {:length => 50, :default => 'default title',
+ :allow_nil => false}],
+ [:user_id, Integer,
+ {:unique_index => :index_abstract_comments_user}.merge(AttrCommon)]]
+ end
+
+ # there's differences between adapters
+ def super_user_fields
+ @super_user_fields ||=
+ [[:bool, DataMapper::Property::Boolean, AttrCommon],
+ [:id, DataMapper::Property::Serial,
+ {:unique_index => :abstract_super_users_pkey}.merge(AttrCommonPK)]]
+ end
+
behaves_like :reflective
end if defined?(DataMapper::Adapters::PostgresAdapter)
diff --git a/test/test_sqlite.rb b/test/test_sqlite.rb
index ffd3e84..6d50b34 100644
--- a/test/test_sqlite.rb
+++ b/test/test_sqlite.rb
@@ -1,10 +1,36 @@
require 'dm-is-reflective/test'
require_adapter 'sqlite'
describe 'sqlite' do
def setup_data_mapper
DataMapper.setup(:default, :adapter => 'sqlite', :database => ':memory:')
end
+
+ def user_fields
+ @user_fields ||=
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial, AttrCommonPK],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+
+ def comment_fields
+ @comment_fields ||=
+ [[:body, DataMapper::Property::Text, AttrText],
+ [:id, DataMapper::Property::Serial, AttrCommonPK],
+ [:title, String, {:length => 50, :default => 'default title',
+ :allow_nil => false}],
+ [:user_id, Integer,
+ {:index => :index_abstract_comments_user}.merge(AttrCommon)]]
+ end
+
+ # there's differences between adapters
+ def super_user_fields
+ @super_user_fields ||=
+ [[:bool, DataMapper::Property::Boolean, AttrCommon],
+ [:id, DataMapper::Property::Serial, AttrCommonPK]]
+ end
+
behaves_like :reflective
end if defined?(DataMapper::Adapters::SqliteAdapter)
|
godfat/dm-is-reflective
|
998a5688887dba59c11a53481f96c10bbb48f203
|
postgres_adapter.rb: use subquery to reduce query number
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 61d5224..ddba375 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,74 +1,72 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
- sql = <<-SQL
+ sql_key = <<-SQL
SELECT column_name FROM "information_schema"."key_column_usage"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- keys = select(Ext::String.compress_lines(sql), storage)
-
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
character_maximum_length, udt_name,
- '#{keys.first}' AS key
+ (#{sql_key}) AS key
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- select(Ext::String.compress_lines(sql), storage)
+ select(Ext::String.compress_lines(sql), storage, storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
field.column_default
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
attrs[:key] = true if field.key == field.column_name
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
d8d3f71449cdf9fd4797ae627066b9262f55f821
|
postgres_adapter.rb: this way we don't have to instance_eval
|
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
index 005b354..61d5224 100644
--- a/lib/dm-is-reflective/adapters/postgres_adapter.rb
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -1,80 +1,74 @@
module DmIsReflective::PostgresAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT table_name FROM "information_schema"."tables"
WHERE table_schema = current_schema()
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name FROM "information_schema"."key_column_usage"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- keys = select(Ext::String.compress_lines(sql), storage).to_set
+ keys = select(Ext::String.compress_lines(sql), storage)
sql = <<-SQL
SELECT column_name, column_default, is_nullable,
- character_maximum_length, udt_name
+ character_maximum_length, udt_name,
+ '#{keys.first}' AS key
FROM "information_schema"."columns"
WHERE table_schema = current_schema() AND table_name = ?
SQL
- select(Ext::String.compress_lines(sql), storage).map{ |struct|
- struct.instance_eval <<-RUBY
- def key?
- #{keys.member?(struct.column_name)}
- end
- RUBY
- struct
- }
+ select(Ext::String.compress_lines(sql), storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.udt_name
end
def reflective_attributes field, attrs = {}
# strip data type
field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
field.column_default
attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
- attrs[:key] = true if field.key?
+ attrs[:key] = true if field.key == field.column_name
attrs[:allow_nil] = field.is_nullable == 'YES'
# strip string quotation
attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
field.column_default && !attrs[:serial]
if field.character_maximum_length
attrs[:length] = field.character_maximum_length
elsif field.udt_name.upcase == 'TEXT'
attrs[:length] = Property::Text.length
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when /^INT\d+$/ ; Integer
when /^FLOAT\d+$/ ; Float
when 'VARCHAR', 'BPCHAR'; String
when 'TIMESTAMP', 'DATE'; DateTime
when 'TEXT' ; Property::Text
when 'BOOL' ; Property::Boolean
when 'NUMERIC' ; Property::Decimal
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
87eef6c36ea0eef924810aed0d62ec393cebb534
|
CHANGES.md: prepare release!
|
diff --git a/CHANGES.md b/CHANGES.md
index 08ad50b..481a11e 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,232 +1,240 @@
# CHANGES
+## dm-is-reflective 1.2.0, 2013-05-14
+
+* We got a bunch of internal renaming.
+* Added DataMapper::Resource#to_source.
+* Added an executable which generates sources for you.
+* Fixed MySQL issues with setting up with a hash rather than URI.
+* Fixed SQLite issues without loading dm-migrations.
+
## dm-is-reflective 1.1.0, 2013-01-11
* The need for dm-migrations is now removed.
* Added a few more datatypes. Thanks @onewheelskyward
* Tested against dm-core 1.2.0.
## dm-is-reflective 1.0.1, 2012-05-16
* allow_nil is more close to db's semantics, not required. Thanks miaout17.
`:allow_nil` allows empty value, but `:required` does not. So here we
always use `:allow_nil` to match db's semantics.
## dm-is-reflective 1.0.0, 2011-06-16
* updated against dm-core 1.1.0
## dm-is-reflective 0.9.0, 2010-07-05
* adapted to dm-core 1.0.0
* reanmed AbstractAdapter to DataObjectsAdapter
## dm-is-reflective 0.8.0, 2009-09-16
* require dm-core 0.10.0 and above now
* Serial would map to Serial not Integer now
* no more type_map now
* no more Extlib::Hook to load adapter
## dm-mapping 0.7.1, never released as a gem
don't open module Migration and edit it, instead, use include, more see:
* added DataMapper::Mapping::AbstractAdapter
* added DataMapper::Mapping::Sqlite3Adapter
* added DataMapper::Mapping::MysqlAdapter
* added DataMapper::Mapping::PostgresAdapter
* each adapter was included in related adapter in DataMapper.
* Model#fields now accept repository name as argument
there's differences between adapters,
Sqlite3 added default => 'UL' in Boolean type,
Mysql can't tell whether it's a Boolean or Tinyint,
and Postgres is fine. see test/abstract.rb: super_user_fields for detail.
## dm-mapping 0.7.0, 2008-09-01
* feature added
- added postgres support.
* bug fixed
- fixed key mapping in mysql adapter. PRI and MUL are all keys.
- use DM::Text.size as default text size in sqlite3.
## dm-mapping 0.6.2, 2008-08-30
* mapping more data types for mysql.
* don't map TINYINT to TrueClass with mysql, skip it in type_map.
## dm-mapping 0.6.1, 2008-08-22
* gem 'dm-core', '>=0.9.3' instead of '=0.9.3'
## dm-mapping 0.6.0, 2008-08-16
* mapping returns an array of properties indicating fields it mapped.
* performance boosted by refactored mapping implementation.
* changed the way using auto_genclass!, now accepts args like mapping!
* changed fields to return field name with Symbol instead of String.
this would make it be more consistent with DataMapper.
* storage names remain String.
* added more mysql data type to map
* use Extlib::Hook to setup dm-mapping instead of stupid alias_method.
* removed ensure_require in model. always setup DataMapper before define model.
## dm-mapping 0.5.0, 2008-08-14
* feature added
- added mysql support.
- reflect size 65535 in TEXT for sqlite3.
* bug fixed
- reflect VARCHAR(size) instead of default size in sqlite3.
* misc
- renamed sqlite3adapter to sqlite3_adapter.
## dm-mapping 0.4.1, 2008-08-14
* removed type hack, replaced with rejecting special type to lookup.
## dm-mapping 0.4.0, 2008-08-04
* added Migration#auto_genclass!.
* updated README.
* added more rdoc.
## dm-mapping 0.3.0, 2008-08-04
* added support of mapping Integer, DateTime, etc.
* renamed some internals.
* changed the way requiring adapter. no more setup first.
* added Migration#storages_and_fields
* added mapping :serial => true for primary key.
* added mapping :default, and :nullable.
* added support of mapping name. (through passing symbol or string)
* added support of multiple arguments.
* removed Mapping::All, use /.*/ instead.
## dm-mapping 0.2.1, 2008-08-03
* fixed a bug that type map should lookup for parent.
* fixed a bug that sql type could be lower case.
fixed by calling upcase.
## dm-mapping 0.2.0, 2008-08-02
* added Sqlite3Adapter::Migration#fields
* added DataMapper::Model#mapping
* added DataMapper::Model#fields
* added DataMapper::TypeMap#find_primitive for reversed lookup.
mapping SQL type back to Ruby type.
* added corresponded test.
## dm-mapping 0.1.0, 2008-07-27
* birthday!
* added DataMapper.repository.storages for sqlite3.
* please refer:
<http://groups.google.com/group/datamapper/browse_thread/thread/b9ca41120c5c9389>
original message:
from Lin Jen-Shin
to DataMapper
cc godfat
date Sun, Jul 27, 2008 at 5:40 PM
subject Manipulate an existing database.
mailed-by gmail.com
Greetings,
DataMapper looks very promising for me, so I am thinking of
using it in the near future. I hate separate my domain objects into
two parts in Rails, writing migration and switching to ActiveRecord,
vice versa, is very annoying to me.
But there's a very convenient feature to me in ActiveRecord,
that is ActiveRecord automatically mapping all fields in a table.
It makes me easily control an existing database without any domain object.
For example,
require 'active_record'
ActiveRecord::Base.establish_connection(
:adapter => 'sqlite3',
:database => 'db/development.sqlite3'
)
clsas User < ActiveRecord::Base
end
User.find 1
=> #<User id: 1, account: "admin", created_at: "2008-05-18 20:08:37", etc.>
Some people would use database admin such as phpMyAdmin to
accomplish this kind of task, but I prefer anything in Ruby,
calling Ruby function, manipulating data without SQL and
any domain object. (i.e. I didn't have to load up entire environment.)
In DataMapper, I didn't find an easy way to accomplish this.
I am sorry if there's one but I didn't find it, please point out,
many thanks. In short, I would like to do this in DataMapper:
class User
include DataMapper::Resource
mapping :account, :created_at
end
or
class User
include DataMapper::Resource
mapping All
end
class User
include DataMapper::ResourceAll
end
or
class User
include DataMapper::Resource
mapping *storage_fields
end
The above User.storage_fields should return an Array,
telling all the fields in the table, e.g. [:account, :created_at, :etc]
or a Hash includes data type, e.g. {:account => String,
:created_at => DateTime}
then mapping *storage_fields should change to:
mapping *storage_fields.each_key.to_a
If it's possible, a feature returning the database schema as well:
DataMapper.repository.storages
# => [:users, :posts, :etc]
DataMapper.repository.storages_and_fields
# => {:users => {:account => String},
:posts => {:title => String, :content => Text}}
or returning DataObject::Field, DataObject::Storage, etc.
DataMapper.repository.storage
# => [#<DataObject::Storage @name='users' @fields=
[#<DataObject::Field @name='account' @type=String>]>]
If you feel this kind of feature is indeed needed or not bad for
adding it, I could try to provide a patch for it. Though I didn't
read the source code deeply, not knowning it's easy or not.
sincerely,
|
godfat/dm-is-reflective
|
3fd03677239ab1f9976317a31fc353e3f9549b54
|
i am too tired to write doc, but some words to the executable
|
diff --git a/README.md b/README.md
index 8ba912c..e5ed9bd 100644
--- a/README.md
+++ b/README.md
@@ -1,143 +1,157 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
+### Generating sources from a DATABASE_URI
+
+We also have an executable to generate sources for you.
+
+```
+Usage: dm-is-reflective DATABASE_URI
+ -s, --scope SCOPE SCOPE where the models should go (default: Object)
+ -o, --output DIRECTORY DIRECTORY where the output goes (default: dm-is-reflective)
+ -h, --help Print this message
+ -v, --version Print the version
+```
+
+### API
+
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
# you can also generate the source from models:
puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
## LICENSE:
Apache License 2.0
Copyright (c) 2008-2013, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
godfat/dm-is-reflective
|
9b3c6c1fed3d22d220d781e28b6903ee1e4eaada
|
now an executable dm-is-reflective is added. close #2
|
diff --git a/bin/dm-is-reflective b/bin/dm-is-reflective
new file mode 100755
index 0000000..3ba4452
--- /dev/null
+++ b/bin/dm-is-reflective
@@ -0,0 +1,4 @@
+#!/usr/bin/env ruby
+
+require 'dm-is-reflective/runner'
+DmIsReflective::Runner.run
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index aa9da3a..a8c34e2 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,76 +1,78 @@
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-is-reflective"
- s.version = "1.1.0"
+ s.version = "1.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Lin Jen-Shin (godfat)"]
- s.date = "2013-05-13"
+ s.date = "2013-05-14"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
s.email = ["godfat (XD) godfat.org"]
+ s.executables = ["dm-is-reflective"]
s.files = [
".gitignore",
".gitmodules",
".travis.yml",
"CHANGES.md",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"TODO.md",
- "aaa/dm-is-reflective",
+ "bin/dm-is-reflective",
"dm-is-reflective.gemspec",
"lib/dm-is-reflective.rb",
"lib/dm-is-reflective/adapters/data_objects_adapter.rb",
"lib/dm-is-reflective/adapters/mysql_adapter.rb",
"lib/dm-is-reflective/adapters/postgres_adapter.rb",
"lib/dm-is-reflective/adapters/sqlite_adapter.rb",
"lib/dm-is-reflective/reflective.rb",
+ "lib/dm-is-reflective/runner.rb",
"lib/dm-is-reflective/test.rb",
"lib/dm-is-reflective/version.rb",
"task/.gitignore",
"task/gemgem.rb",
"test/setup_db.sh",
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
s.homepage = "https://github.com/godfat/dm-is-reflective"
s.licenses = ["Apache License 2.0"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "DataMapper plugin that helps you manipulate an existing database."
s.test_files = [
"test/test_mysql.rb",
"test/test_postgres.rb",
"test/test_sqlite.rb",
"test/test_to_source.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-migrations>, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective/runner.rb b/lib/dm-is-reflective/runner.rb
new file mode 100644
index 0000000..ca00a07
--- /dev/null
+++ b/lib/dm-is-reflective/runner.rb
@@ -0,0 +1,87 @@
+
+require 'dm-is-reflective'
+
+module DmIsReflective::Runner
+ module_function
+ def options
+ @options ||=
+ [['-s, --scope SCOPE' ,
+ 'SCOPE where the models should go (default: Object)' ],
+ ['-o, --output DIRECTORY' ,
+ 'DIRECTORY where the output goes (default: dm-is-reflective)'],
+ ['-h, --help' , 'Print this message' ],
+ ['-v, --version', 'Print the version' ]]
+ end
+
+ def run argv=ARGV
+ puts(help) and exit if argv.empty?
+ generate(*parse(argv))
+ end
+
+ def generate uri, scope, output
+ require 'fileutils'
+ FileUtils.mkdir_p(output)
+ DataMapper.setup(:default, uri).auto_genclass!(:scope => scope).
+ each do |model|
+ path = "#{output}/#{model.name.gsub(/::/, '').
+ gsub(/([A-Z])/, '_\1').
+ downcase[1..-1]}.rb"
+ File.open(path, 'w') do |file|
+ file.puts model.to_source
+ end
+ end
+ end
+
+ def parse argv
+ uri, scope, output = ['sqlite::memory:', Object, 'dm-is-reflective']
+ until argv.empty?
+ case arg = argv.shift
+ when /^-s=?(.+)?/, /^--scope=?(.+)?/
+ name = $1 || argv.shift
+ scope = if Object.const_defined?(name)
+ Object.const_get(name)
+ else
+ mkconst_p(name)
+ end
+
+ when /^-o=?(.+)?/, /^--output=?(.+)?/
+ output = $1 || argv.shift
+
+ when /^-h/, '--help'
+ puts(help)
+ exit
+
+ when /^-v/, '--version'
+ puts(DmIsReflective::VERSION)
+ exit
+
+ else
+ uri = arg
+ end
+ end
+ [uri, scope, output]
+ end
+
+ def mkconst_p name
+ name.split('::').inject(Object) do |ret, mod|
+ if Object.const_defined?(mod)
+ ret.const_get(mod)
+ else
+ ret.const_set(mod, Module.new)
+ end
+ end
+ end
+
+ def help
+ maxn = options.transpose.first.map(&:size).max
+ maxd = options.transpose.last .map(&:size).max
+ "Usage: dm-is-reflective DATABASE_URI\n" +
+ options.map{ |(name, desc)|
+ if desc.empty?
+ name
+ else
+ sprintf(" %-*s %-*s", maxn, name, maxd, desc)
+ end
+ }.join("\n")
+ end
+end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
index 9c91933..f02522a 100644
--- a/lib/dm-is-reflective/version.rb
+++ b/lib/dm-is-reflective/version.rb
@@ -1,4 +1,4 @@
module DmIsReflective
- VERSION = '1.1.0'
+ VERSION = '1.2.0'
end
|
godfat/dm-is-reflective
|
edeab507abe1b0047f92b7259480f4fc6896f674
|
autoload :VERSION
|
diff --git a/lib/dm-is-reflective/reflective.rb b/lib/dm-is-reflective/reflective.rb
index 7b6fa39..bec1fa3 100644
--- a/lib/dm-is-reflective/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,90 +1,92 @@
module DmIsReflective
+ autoload :VERSION, 'dm-is-reflective/version'
+
include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
when Regexp;
break name if name.to_s =~ target
when Symbol, String;
break name if name == target.to_sym
when Class;
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
property(reflected, type, attrs) if reflected.kind_of?(Symbol)
}.compact
finalize if respond_to?(:finalize)
result
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
end # of DmIsReflective
|
godfat/dm-is-reflective
|
706ee92f8da35b7642b43729edc41508be3fb7c4
|
there's no longer a Is::Reflective
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index a52806f..c83def9 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,136 +1,136 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
- opts[:scope] ||= Is::Reflective
+ opts[:scope] ||= DmIsReflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
model.finalize if model.respond_to?(:finalize)
model
end
def reflective_lookup_primitive primitive
raise TypeError.new("#{primitive} not found for #{self.class}")
end
def reflective_auto_load_adapter_extension
# TODO: can we fix this shit in dm-mysql-adapter?
name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
end
|
godfat/dm-is-reflective
|
fbd0469d09946f3a15835ef17210affd056dea7a
|
supports_serial? is defined in dm-migrations! :(
|
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
index f593665..cb90f52 100644
--- a/lib/dm-is-reflective/adapters/sqlite_adapter.rb
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -1,57 +1,57 @@
module DmIsReflective::SqliteAdapter
include DataMapper
def storages
sql = <<-SQL
SELECT name
FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
select(Ext::String.compress_lines(sql))
end
private
def reflective_query_storage storage
select('PRAGMA table_info(?)', storage)
end
def reflective_field_name field
field.name
end
def reflective_primitive field
field.type.gsub(/\(\d+\)/, '')
end
def reflective_attributes field, attrs = {}
if field.pk != 0
attrs[:key] = true
- attrs[:serial] = true if supports_serial?
+ attrs[:serial] = true
end
attrs[:allow_nil] = field.notnull == 0
attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
if field.type.upcase == 'TEXT'
attrs[:length] = Property::Text.length
else
ergo = field.type.match(/\((\d+)\)/)
size = ergo && ergo[1].to_i
attrs[:length] = size if size
end
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'INTEGER' ; Integer
when 'REAL', 'NUMERIC'; Float
when 'VARCHAR' ; String
when 'TIMESTAMP' ; DateTime
when 'BOOLEAN' ; Property::Boolean
when 'TEXT' ; Property::Text
end || super(primitive)
end
end
|
godfat/dm-is-reflective
|
5c1a5629b809f5b067798b72466d058cb3ce13a7
|
avoid using addressable would be faster
|
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
index 93c072e..a52806f 100644
--- a/lib/dm-is-reflective/adapters/data_objects_adapter.rb
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -1,134 +1,136 @@
module DmIsReflective::DataObjectsAdapter
include DataMapper
# returns all tables' name in the repository.
# e.g.
# ['comments', 'users']
def storages
reflective_auto_load_adapter_extension
storages # call the overrided method
end
# returns all fields, with format [[name, type, attrs]]
# e.g.
# [[:created_at, DateTime, {:required => false}],
# [:email, String, {:required => false, :size => 255,
# :default => '[email protected]'}],
# [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
# :key => true}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]
def fields storage
reflective_query_storage(storage).map{ |field|
attr = reflective_attributes(field)
type = reflective_lookup_primitive(reflective_primitive(field))
pick = if attr[:serial] && type == Integer
Property::Serial
else
type
end
[reflective_field_name(field).to_sym, pick, attr]
}
end
# returns a hash with storage names in keys and
# corresponded fields in values. e.g.
# {'users' => [[:id, Integer, {:required => true,
# :serial => true,
# :key => true}],
# [:email, String, {:required => false,
# :default => '[email protected]'}],
# [:created_at, DateTime, {:required => false}],
# [:salt_first, String, {:required => false, :size => 50}],
# [:salt_second, String, {:required => false, :size => 50}]]}
# see AbstractAdapter#storages and AbstractAdapter#fields for detail
def storages_and_fields
storages.inject({}){ |result, storage|
result[storage] = fields(storage)
result
}
end
# automaticly generate model class(es) and reflect
# all fields with reflect /.*/ for you.
# e.g.
# dm.auto_genclass!
# # => [DataMapper::Is::Reflective::User,
# # DataMapper::Is::Reflective::SchemaInfo,
# # DataMapper::Is::Reflective::Session]
#
# you can change the scope of generated models:
# e.g.
# dm.auto_genclass! :scope => Object
# # => [User, SchemaInfo, Session]
#
# you can generate classes for tables you specified only:
# e.g.
# dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# # => [PhpbbUser, PhpbbPost, PhpbbConfig]
#
# you can generate classes with String too:
# e.g.
# dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# # => [User, Config]
#
# you can generate a class only:
# e.g.
# dm.auto_genclass! :storages => 'users'
# # => [DataMapper::Is::Reflective::User]
def auto_genclass! opts = {}
opts[:scope] ||= Is::Reflective
opts[:storages] ||= /.*/
opts[:storages] = [opts[:storages]].flatten
storages.map{ |storage|
mapped = opts[:storages].each{ |target|
case target
when Regexp;
break storage if storage =~ target
when Symbol, String;
break storage if storage == target.to_s
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
}.compact
end
private
def reflective_query_storage storage
reflective_auto_load_adapter_extension
reflective_query_storage(storage) # call the overrided method
end
def reflective_genclass storage, scope
model = Class.new
model.__send__(:include, Resource)
model.is(:reflective)
model.storage_names[:default] = storage
scope.const_set(Inflector.classify(storage), model)
model.__send__(:reflect, /.*/)
model.finalize if model.respond_to?(:finalize)
model
end
def reflective_lookup_primitive primitive
raise TypeError.new("#{primitive} not found for #{self.class}")
end
def reflective_auto_load_adapter_extension
+ # TODO: can we fix this shit in dm-mysql-adapter?
+ name = options[:adapter] || options['adapter']
# TODO: can we fix this adapter name in dm-sqlite-adapter?
- adapter = options[:adapter].sub(/\Asqlite3\Z/, 'sqlite')
+ adapter = name.sub(/\Asqlite3\Z/, 'sqlite')
require "dm-is-reflective/adapters/#{adapter}_adapter"
class_name = "#{Inflector.camelize(adapter)}Adapter"
Adapters.const_get(class_name).__send__(:include,
DmIsReflective.const_get(class_name))
end
end
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
index 35c07c8..deda4dc 100644
--- a/lib/dm-is-reflective/adapters/mysql_adapter.rb
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -1,60 +1,63 @@
module DmIsReflective::MysqlAdapter
include DataMapper
def storages
select('SHOW TABLES')
end
private
# construct needed table metadata
def reflective_query_storage storage
sql = <<-SQL
SELECT column_name, column_default, is_nullable, data_type,
character_maximum_length, column_key, extra
FROM `information_schema`.`columns`
WHERE `table_schema` = ? AND `table_name` = ?
SQL
- select(Ext::String.compress_lines(sql),
- options[:path].sub('/', ''), storage)
+ # TODO: can we fix this shit in dm-mysql-adapter?
+ path = options[:path] || options['path'] ||
+ options[:database] || options['database']
+
+ select(Ext::String.compress_lines(sql), path.sub('/', ''), storage)
end
def reflective_field_name field
field.column_name
end
def reflective_primitive field
field.data_type
end
def reflective_attributes field, attrs = {}
attrs[:serial] = true if field.extra == 'auto_increment'
attrs[:key] = true if field.column_key == 'PRI'
attrs[:allow_nil] = field.is_nullable == 'YES'
attrs[:default] = field.column_default if
field.column_default
attrs[:length] = field.character_maximum_length if
field.character_maximum_length
attrs
end
def reflective_lookup_primitive primitive
case primitive.upcase
when 'YEAR' ; Integer
when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Integer
when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
; Property::Decimal
when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
when 'TIME' ; Time
when 'DATE' ; Date
when 'DATETIME', 'TIMESTAMP' ; DateTime
when 'BOOL', 'BOOLEAN' ; Property::Boolean
when /\w*TEXT/ ; Property::Text
end || super(primitive)
end
end
diff --git a/test/test_mysql.rb b/test/test_mysql.rb
index 67100e2..7c08f74 100644
--- a/test/test_mysql.rb
+++ b/test/test_mysql.rb
@@ -1,17 +1,23 @@
require 'dm-is-reflective/test'
require_adapter 'mysql'
describe 'mysql' do
if ENV['TRAVIS']
def setup_data_mapper
- DataMapper.setup(:default, 'mysql://root@localhost/myapp_test')
+ DataMapper.setup(:default, :adapter => 'mysql' ,
+ :username => 'root' ,
+ :host => 'localhost' ,
+ :database => 'myapp_test')
end
else
def setup_data_mapper
- DataMapper.setup(:default,
- 'mysql://dm_is_reflective:godfat@localhost/dm_is_reflective')
+ DataMapper.setup(:default, :adapter => 'mysql' ,
+ :username => 'dm_is_reflective',
+ :password => 'godfat' ,
+ :host => 'localhost' ,
+ :database => 'dm_is_reflective')
end
end
behaves_like :reflective
end if defined?(DataMapper::Adapters::MysqlAdapter)
diff --git a/test/test_postgres.rb b/test/test_postgres.rb
index 7b78a0d..479fc61 100644
--- a/test/test_postgres.rb
+++ b/test/test_postgres.rb
@@ -1,17 +1,23 @@
require 'dm-is-reflective/test'
require_adapter 'postgres'
describe 'postgres' do
if ENV['TRAVIS']
def setup_data_mapper
- DataMapper.setup(:default, 'postgres://postgres@localhost/myapp_test')
+ DataMapper.setup(:default, :adapter => 'postgres' ,
+ :username => 'postgres' ,
+ :host => 'localhost' ,
+ :database => 'myapp_test')
end
else
def setup_data_mapper
- DataMapper.setup(:default,
- 'postgres://dm_is_reflective:godfat@localhost/dm_is_reflective')
+ DataMapper.setup(:default, :adapter => 'postgres' ,
+ :username => 'dm_is_reflective',
+ :password => 'godfat' ,
+ :host => 'localhost' ,
+ :database => 'dm_is_reflective')
end
end
behaves_like :reflective
end if defined?(DataMapper::Adapters::PostgresAdapter)
|
godfat/dm-is-reflective
|
4748dfd2bd338583e15b8bfad160e0575a96f17d
|
fix to_source test with other tests running together
|
diff --git a/test/test_to_source.rb b/test/test_to_source.rb
index 03b0d5e..e7e068c 100644
--- a/test/test_to_source.rb
+++ b/test/test_to_source.rb
@@ -1,52 +1,55 @@
require 'dm-is-reflective/test'
describe 'DataMapper::Resource#to_source' do
DataMapper.setup(:default, :adapter => 'in_memory')
+ Comment.create # enforce Comment#user_id generated
should 'match Abstract::User' do
Abstract::User.to_source.should.eq <<-RUBY
class ::Abstract::User < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :login, DataMapper::Property::String, {:primitive=>String, :length=>70}
property :sig, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
property :created_at, DataMapper::Property::DateTime, {:primitive=>DateTime}
end
RUBY
end
should 'match Abstract::Comment' do
Abstract::Comment.to_source.should.eq <<-RUBY
class ::Abstract::Comment < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :title, DataMapper::Property::String, {:primitive=>String, :length=>50, :default=>"default title", :allow_nil=>false}
property :body, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+property :user_id, DataMapper::Property::Integer, {:primitive=>Integer, :index=>:user, :required=>false, :key=>false, :unique=>false, :min=>1, :max=>2147483647}
end
RUBY
end
should 'match Abstract::Comment::Abstract::Comment' do
Abstract::Comment.to_source(Abstract::Comment).should.eq <<-RUBY
class Abstract::Comment::Abstract::Comment < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :title, DataMapper::Property::String, {:primitive=>String, :length=>50, :default=>"default title", :allow_nil=>false}
property :body, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+property :user_id, DataMapper::Property::Integer, {:primitive=>Integer, :index=>:user, :required=>false, :key=>false, :unique=>false, :min=>1, :max=>2147483647}
end
RUBY
end
should 'match Abstract::Comment::Abstract::User' do
Abstract::User.to_source('Abstract::Comment').should.eq <<-RUBY
class Abstract::Comment::Abstract::User < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :login, DataMapper::Property::String, {:primitive=>String, :length=>70}
property :sig, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
property :created_at, DataMapper::Property::DateTime, {:primitive=>DateTime}
end
RUBY
end
end
|
godfat/dm-is-reflective
|
763d876a797e96ed3f53cc5e8368a01fe6f19286
|
i am very tired of those extra scopes
|
diff --git a/Rakefile b/Rakefile
index a6c0dca..7d5a237 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,28 +1,28 @@
# encoding: utf-8
begin
require "#{dir = File.dirname(__FILE__)}/task/gemgem"
rescue LoadError
sh "git submodule update --init"
exec Gem.ruby, "-S", "rake", *ARGV
end
Gemgem.dir = dir
($LOAD_PATH << File.expand_path("#{Gemgem.dir}/lib" )).uniq!
desc 'Generate gemspec'
task 'gem:spec' do
Gemgem.spec = Gemgem.create do |s|
require 'dm-is-reflective/version'
s.name = 'dm-is-reflective'
- s.version = DataMapper::Is::Reflective::VERSION
+ s.version = DmIsReflective::VERSION
%w[dm-core dm-do-adapter].each{ |g| s.add_runtime_dependency(g) }
%w[dm-migrations
dm-sqlite-adapter
dm-mysql-adapter
dm-postgres-adapter].each{ |g| s.add_development_dependency(g) }
end
Gemgem.write
end
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
index 161e547..aa9da3a 100644
--- a/dm-is-reflective.gemspec
+++ b/dm-is-reflective.gemspec
@@ -1,67 +1,76 @@
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-is-reflective"
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Lin Jen-Shin (godfat)"]
- s.date = "2013-05-11"
+ s.date = "2013-05-13"
s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
s.email = ["godfat (XD) godfat.org"]
s.files = [
".gitignore",
".gitmodules",
+ ".travis.yml",
"CHANGES.md",
"Gemfile",
"LICENSE",
"README.md",
"Rakefile",
"TODO.md",
+ "aaa/dm-is-reflective",
"dm-is-reflective.gemspec",
"lib/dm-is-reflective.rb",
- "lib/dm-is-reflective/is/adapters/data_objects_adapter.rb",
- "lib/dm-is-reflective/is/adapters/mysql_adapter.rb",
- "lib/dm-is-reflective/is/adapters/postgres_adapter.rb",
- "lib/dm-is-reflective/is/adapters/sqlite_adapter.rb",
- "lib/dm-is-reflective/is/reflective.rb",
+ "lib/dm-is-reflective/adapters/data_objects_adapter.rb",
+ "lib/dm-is-reflective/adapters/mysql_adapter.rb",
+ "lib/dm-is-reflective/adapters/postgres_adapter.rb",
+ "lib/dm-is-reflective/adapters/sqlite_adapter.rb",
+ "lib/dm-is-reflective/reflective.rb",
+ "lib/dm-is-reflective/test.rb",
"lib/dm-is-reflective/version.rb",
"task/.gitignore",
"task/gemgem.rb",
- "test/abstract.rb",
"test/setup_db.sh",
- "test/test_dm-is-reflective.rb"]
+ "test/test_mysql.rb",
+ "test/test_postgres.rb",
+ "test/test_sqlite.rb",
+ "test/test_to_source.rb"]
s.homepage = "https://github.com/godfat/dm-is-reflective"
s.licenses = ["Apache License 2.0"]
s.require_paths = ["lib"]
s.rubygems_version = "2.0.3"
s.summary = "DataMapper plugin that helps you manipulate an existing database."
- s.test_files = ["test/test_dm-is-reflective.rb"]
+ s.test_files = [
+ "test/test_mysql.rb",
+ "test/test_postgres.rb",
+ "test/test_sqlite.rb",
+ "test/test_to_source.rb"]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, [">= 0"])
s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-migrations>, [">= 0"])
s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
else
s.add_dependency(%q<dm-core>, [">= 0"])
s.add_dependency(%q<dm-do-adapter>, [">= 0"])
s.add_dependency(%q<dm-migrations>, [">= 0"])
s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
end
end
diff --git a/lib/dm-is-reflective.rb b/lib/dm-is-reflective.rb
index ceeffab..7bbe2db 100644
--- a/lib/dm-is-reflective.rb
+++ b/lib/dm-is-reflective.rb
@@ -1,12 +1,11 @@
-gem 'dm-core', '>=1.0.0'
require 'dm-core'
require 'dm-do-adapter'
-require 'dm-is-reflective/is/reflective'
-require 'dm-is-reflective/is/adapters/data_objects_adapter'
+require 'dm-is-reflective/reflective'
+require 'dm-is-reflective/adapters/data_objects_adapter'
-DataMapper::Model.append_extensions(DataMapper::Is::Reflective)
+DataMapper::Model.append_extensions(DmIsReflective)
DataMapper::Adapters::DataObjectsAdapter.__send__(:include,
- DataMapper::Is::Reflective::DataObjectsAdapter)
+ DmIsReflective::DataObjectsAdapter)
diff --git a/lib/dm-is-reflective/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
new file mode 100644
index 0000000..93c072e
--- /dev/null
+++ b/lib/dm-is-reflective/adapters/data_objects_adapter.rb
@@ -0,0 +1,134 @@
+
+module DmIsReflective::DataObjectsAdapter
+ include DataMapper
+
+ # returns all tables' name in the repository.
+ # e.g.
+ # ['comments', 'users']
+ def storages
+ reflective_auto_load_adapter_extension
+ storages # call the overrided method
+ end
+
+ # returns all fields, with format [[name, type, attrs]]
+ # e.g.
+ # [[:created_at, DateTime, {:required => false}],
+ # [:email, String, {:required => false, :size => 255,
+ # :default => '[email protected]'}],
+ # [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
+ # :key => true}],
+ # [:salt_first, String, {:required => false, :size => 50}],
+ # [:salt_second, String, {:required => false, :size => 50}]]
+ def fields storage
+ reflective_query_storage(storage).map{ |field|
+ attr = reflective_attributes(field)
+ type = reflective_lookup_primitive(reflective_primitive(field))
+ pick = if attr[:serial] && type == Integer
+ Property::Serial
+ else
+ type
+ end
+ [reflective_field_name(field).to_sym, pick, attr]
+ }
+ end
+
+ # returns a hash with storage names in keys and
+ # corresponded fields in values. e.g.
+ # {'users' => [[:id, Integer, {:required => true,
+ # :serial => true,
+ # :key => true}],
+ # [:email, String, {:required => false,
+ # :default => '[email protected]'}],
+ # [:created_at, DateTime, {:required => false}],
+ # [:salt_first, String, {:required => false, :size => 50}],
+ # [:salt_second, String, {:required => false, :size => 50}]]}
+ # see AbstractAdapter#storages and AbstractAdapter#fields for detail
+ def storages_and_fields
+ storages.inject({}){ |result, storage|
+ result[storage] = fields(storage)
+ result
+ }
+ end
+
+ # automaticly generate model class(es) and reflect
+ # all fields with reflect /.*/ for you.
+ # e.g.
+ # dm.auto_genclass!
+ # # => [DataMapper::Is::Reflective::User,
+ # # DataMapper::Is::Reflective::SchemaInfo,
+ # # DataMapper::Is::Reflective::Session]
+ #
+ # you can change the scope of generated models:
+ # e.g.
+ # dm.auto_genclass! :scope => Object
+ # # => [User, SchemaInfo, Session]
+ #
+ # you can generate classes for tables you specified only:
+ # e.g.
+ # dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
+ # # => [PhpbbUser, PhpbbPost, PhpbbConfig]
+ #
+ # you can generate classes with String too:
+ # e.g.
+ # dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
+ # # => [User, Config]
+ #
+ # you can generate a class only:
+ # e.g.
+ # dm.auto_genclass! :storages => 'users'
+ # # => [DataMapper::Is::Reflective::User]
+ def auto_genclass! opts = {}
+ opts[:scope] ||= Is::Reflective
+ opts[:storages] ||= /.*/
+ opts[:storages] = [opts[:storages]].flatten
+
+ storages.map{ |storage|
+
+ mapped = opts[:storages].each{ |target|
+ case target
+ when Regexp;
+ break storage if storage =~ target
+
+ when Symbol, String;
+ break storage if storage == target.to_s
+
+ else
+ raise ArgumentError.new("invalid argument: #{target.inspect}")
+ end
+ }
+
+ reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
+ }.compact
+ end
+
+ private
+ def reflective_query_storage storage
+ reflective_auto_load_adapter_extension
+ reflective_query_storage(storage) # call the overrided method
+ end
+
+ def reflective_genclass storage, scope
+ model = Class.new
+ model.__send__(:include, Resource)
+ model.is(:reflective)
+ model.storage_names[:default] = storage
+ scope.const_set(Inflector.classify(storage), model)
+ model.__send__(:reflect, /.*/)
+ model.finalize if model.respond_to?(:finalize)
+ model
+ end
+
+ def reflective_lookup_primitive primitive
+ raise TypeError.new("#{primitive} not found for #{self.class}")
+ end
+
+ def reflective_auto_load_adapter_extension
+ # TODO: can we fix this adapter name in dm-sqlite-adapter?
+ adapter = options[:adapter].sub(/\Asqlite3\Z/, 'sqlite')
+
+ require "dm-is-reflective/adapters/#{adapter}_adapter"
+ class_name = "#{Inflector.camelize(adapter)}Adapter"
+ Adapters.const_get(class_name).__send__(:include,
+ DmIsReflective.const_get(class_name))
+ end
+end
diff --git a/lib/dm-is-reflective/adapters/mysql_adapter.rb b/lib/dm-is-reflective/adapters/mysql_adapter.rb
new file mode 100644
index 0000000..35c07c8
--- /dev/null
+++ b/lib/dm-is-reflective/adapters/mysql_adapter.rb
@@ -0,0 +1,60 @@
+
+module DmIsReflective::MysqlAdapter
+ include DataMapper
+
+ def storages
+ select('SHOW TABLES')
+ end
+
+ private
+ # construct needed table metadata
+ def reflective_query_storage storage
+ sql = <<-SQL
+ SELECT column_name, column_default, is_nullable, data_type,
+ character_maximum_length, column_key, extra
+ FROM `information_schema`.`columns`
+ WHERE `table_schema` = ? AND `table_name` = ?
+ SQL
+
+ select(Ext::String.compress_lines(sql),
+ options[:path].sub('/', ''), storage)
+ end
+
+ def reflective_field_name field
+ field.column_name
+ end
+
+ def reflective_primitive field
+ field.data_type
+ end
+
+ def reflective_attributes field, attrs = {}
+ attrs[:serial] = true if field.extra == 'auto_increment'
+ attrs[:key] = true if field.column_key == 'PRI'
+
+ attrs[:allow_nil] = field.is_nullable == 'YES'
+ attrs[:default] = field.column_default if
+ field.column_default
+
+ attrs[:length] = field.character_maximum_length if
+ field.character_maximum_length
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when 'YEAR' ; Integer
+ when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
+ ; Integer
+ when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
+ ; Property::Decimal
+ when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
+ when 'TIME' ; Time
+ when 'DATE' ; Date
+ when 'DATETIME', 'TIMESTAMP' ; DateTime
+ when 'BOOL', 'BOOLEAN' ; Property::Boolean
+ when /\w*TEXT/ ; Property::Text
+ end || super(primitive)
+ end
+end
diff --git a/lib/dm-is-reflective/adapters/postgres_adapter.rb b/lib/dm-is-reflective/adapters/postgres_adapter.rb
new file mode 100644
index 0000000..005b354
--- /dev/null
+++ b/lib/dm-is-reflective/adapters/postgres_adapter.rb
@@ -0,0 +1,80 @@
+
+module DmIsReflective::PostgresAdapter
+ include DataMapper
+
+ def storages
+ sql = <<-SQL
+ SELECT table_name FROM "information_schema"."tables"
+ WHERE table_schema = current_schema()
+ SQL
+
+ select(Ext::String.compress_lines(sql))
+ end
+
+ private
+ def reflective_query_storage storage
+ sql = <<-SQL
+ SELECT column_name FROM "information_schema"."key_column_usage"
+ WHERE table_schema = current_schema() AND table_name = ?
+ SQL
+
+ keys = select(Ext::String.compress_lines(sql), storage).to_set
+
+ sql = <<-SQL
+ SELECT column_name, column_default, is_nullable,
+ character_maximum_length, udt_name
+ FROM "information_schema"."columns"
+ WHERE table_schema = current_schema() AND table_name = ?
+ SQL
+
+ select(Ext::String.compress_lines(sql), storage).map{ |struct|
+ struct.instance_eval <<-RUBY
+ def key?
+ #{keys.member?(struct.column_name)}
+ end
+ RUBY
+ struct
+ }
+ end
+
+ def reflective_field_name field
+ field.column_name
+ end
+
+ def reflective_primitive field
+ field.udt_name
+ end
+
+ def reflective_attributes field, attrs = {}
+ # strip data type
+ field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
+ field.column_default
+
+ attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
+ attrs[:key] = true if field.key?
+ attrs[:allow_nil] = field.is_nullable == 'YES'
+ # strip string quotation
+ attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
+ field.column_default && !attrs[:serial]
+
+ if field.character_maximum_length
+ attrs[:length] = field.character_maximum_length
+ elsif field.udt_name.upcase == 'TEXT'
+ attrs[:length] = Property::Text.length
+ end
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when /^INT\d+$/ ; Integer
+ when /^FLOAT\d+$/ ; Float
+ when 'VARCHAR', 'BPCHAR'; String
+ when 'TIMESTAMP', 'DATE'; DateTime
+ when 'TEXT' ; Property::Text
+ when 'BOOL' ; Property::Boolean
+ when 'NUMERIC' ; Property::Decimal
+ end || super(primitive)
+ end
+end
diff --git a/lib/dm-is-reflective/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
new file mode 100644
index 0000000..f593665
--- /dev/null
+++ b/lib/dm-is-reflective/adapters/sqlite_adapter.rb
@@ -0,0 +1,57 @@
+
+module DmIsReflective::SqliteAdapter
+ include DataMapper
+
+ def storages
+ sql = <<-SQL
+ SELECT name
+ FROM sqlite_master
+ WHERE type = 'table' AND NOT name = 'sqlite_sequence'
+ SQL
+
+ select(Ext::String.compress_lines(sql))
+ end
+
+ private
+ def reflective_query_storage storage
+ select('PRAGMA table_info(?)', storage)
+ end
+
+ def reflective_field_name field
+ field.name
+ end
+
+ def reflective_primitive field
+ field.type.gsub(/\(\d+\)/, '')
+ end
+
+ def reflective_attributes field, attrs = {}
+ if field.pk != 0
+ attrs[:key] = true
+ attrs[:serial] = true if supports_serial?
+ end
+ attrs[:allow_nil] = field.notnull == 0
+ attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
+
+ if field.type.upcase == 'TEXT'
+ attrs[:length] = Property::Text.length
+ else
+ ergo = field.type.match(/\((\d+)\)/)
+ size = ergo && ergo[1].to_i
+ attrs[:length] = size if size
+ end
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when 'INTEGER' ; Integer
+ when 'REAL', 'NUMERIC'; Float
+ when 'VARCHAR' ; String
+ when 'TIMESTAMP' ; DateTime
+ when 'BOOLEAN' ; Property::Boolean
+ when 'TEXT' ; Property::Text
+ end || super(primitive)
+ end
+end
diff --git a/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb
deleted file mode 100644
index 3232cbd..0000000
--- a/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-
-module DataMapper
- module Is::Reflective
- module DataObjectsAdapter
- # returns all tables' name in the repository.
- # e.g.
- # ['comments', 'users']
- def storages
- reflective_auto_load_adapter_extension
- storages # call the overrided method
- end
-
- # returns all fields, with format [[name, type, attrs]]
- # e.g.
- # [[:created_at, DateTime, {:required => false}],
- # [:email, String, {:required => false, :size => 255,
- # :default => '[email protected]'}],
- # [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
- # :key => true}],
- # [:salt_first, String, {:required => false, :size => 50}],
- # [:salt_second, String, {:required => false, :size => 50}]]
- def fields storage
- reflective_query_storage(storage).map{ |field|
- attr = reflective_attributes(field)
- type = reflective_lookup_primitive(reflective_primitive(field))
- pick = if attr[:serial] && type == Integer
- Property::Serial
- else
- type
- end
- [reflective_field_name(field).to_sym, pick, attr]
- }
- end
-
- # returns a hash with storage names in keys and
- # corresponded fields in values. e.g.
- # {'users' => [[:id, Integer, {:required => true,
- # :serial => true,
- # :key => true}],
- # [:email, String, {:required => false,
- # :default => '[email protected]'}],
- # [:created_at, DateTime, {:required => false}],
- # [:salt_first, String, {:required => false, :size => 50}],
- # [:salt_second, String, {:required => false, :size => 50}]]}
- # see AbstractAdapter#storages and AbstractAdapter#fields for detail
- def storages_and_fields
- storages.inject({}){ |result, storage|
- result[storage] = fields(storage)
- result
- }
- end
-
- # automaticly generate model class(es) and reflect
- # all fields with reflect /.*/ for you.
- # e.g.
- # dm.auto_genclass!
- # # => [DataMapper::Is::Reflective::User,
- # # DataMapper::Is::Reflective::SchemaInfo,
- # # DataMapper::Is::Reflective::Session]
- #
- # you can change the scope of generated models:
- # e.g.
- # dm.auto_genclass! :scope => Object
- # # => [User, SchemaInfo, Session]
- #
- # you can generate classes for tables you specified only:
- # e.g.
- # dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
- # # => [PhpbbUser, PhpbbPost, PhpbbConfig]
- #
- # you can generate classes with String too:
- # e.g.
- # dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
- # # => [User, Config]
- #
- # you can generate a class only:
- # e.g.
- # dm.auto_genclass! :storages => 'users'
- # # => [DataMapper::Is::Reflective::User]
- def auto_genclass! opts = {}
- opts[:scope] ||= Is::Reflective
- opts[:storages] ||= /.*/
- opts[:storages] = [opts[:storages]].flatten
-
- storages.map{ |storage|
-
- mapped = opts[:storages].each{ |target|
- case target
- when Regexp;
- break storage if storage =~ target
-
- when Symbol, String;
- break storage if storage == target.to_s
-
- else
- raise ArgumentError.new("invalid argument: #{target.inspect}")
- end
- }
-
- reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
- }.compact
- end
-
- private
- def reflective_query_storage storage
- reflective_auto_load_adapter_extension
- reflective_query_storage(storage) # call the overrided method
- end
-
- def reflective_genclass storage, scope
- model = Class.new
- model.__send__(:include, Resource)
- model.is(:reflective)
- model.storage_names[:default] = storage
- scope.const_set(Inflector.classify(storage), model)
- model.__send__(:reflect, /.*/)
- model.finalize if model.respond_to?(:finalize)
- model
- end
-
- def reflective_lookup_primitive primitive
- raise TypeError.new("#{primitive} not found for #{self.class}")
- end
-
- def reflective_auto_load_adapter_extension
- # TODO: can we fix this adapter name in dm-sqlite-adapter?
- adapter = options[:adapter].sub(/\Asqlite3\Z/, 'sqlite')
-
- require "dm-is-reflective/is/adapters/#{adapter}_adapter"
- class_name = "#{Inflector.camelize(adapter)}Adapter"
- Adapters.const_get(class_name).__send__(:include,
- Is::Reflective.const_get(class_name))
- end
- end
- end
-end
diff --git a/lib/dm-is-reflective/is/adapters/mysql_adapter.rb b/lib/dm-is-reflective/is/adapters/mysql_adapter.rb
deleted file mode 100644
index 999e032..0000000
--- a/lib/dm-is-reflective/is/adapters/mysql_adapter.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-
-module DataMapper
- module Is::Reflective
- module MysqlAdapter
- def storages
- select('SHOW TABLES')
- end
-
- private
- # construct needed table metadata
- def reflective_query_storage storage
- sql = <<-SQL
- SELECT column_name, column_default, is_nullable, data_type,
- character_maximum_length, column_key, extra
- FROM `information_schema`.`columns`
- WHERE `table_schema` = ? AND `table_name` = ?
- SQL
-
- select(Ext::String.compress_lines(sql),
- options[:path].sub('/', ''), storage)
- end
-
- def reflective_field_name field
- field.column_name
- end
-
- def reflective_primitive field
- field.data_type
- end
-
- def reflective_attributes field, attrs = {}
- attrs[:serial] = true if field.extra == 'auto_increment'
- attrs[:key] = true if field.column_key == 'PRI'
-
- attrs[:allow_nil] = field.is_nullable == 'YES'
- attrs[:default] = field.column_default if
- field.column_default
-
- attrs[:length] = field.character_maximum_length if
- field.character_maximum_length
-
- attrs
- end
-
- def reflective_lookup_primitive primitive
- case primitive.upcase
- when 'YEAR' ; Integer
- when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
- ; Integer
- when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
- ; Property::Decimal
- when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
- when 'TIME' ; Time
- when 'DATE' ; Date
- when 'DATETIME', 'TIMESTAMP' ; DateTime
- when 'BOOL', 'BOOLEAN' ; Property::Boolean
- when /\w*TEXT/ ; Property::Text
- end || super(primitive)
- end
- end
- end
-end
diff --git a/lib/dm-is-reflective/is/adapters/postgres_adapter.rb b/lib/dm-is-reflective/is/adapters/postgres_adapter.rb
deleted file mode 100644
index 5d4ca58..0000000
--- a/lib/dm-is-reflective/is/adapters/postgres_adapter.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-
-module DataMapper
- module Is::Reflective
- module PostgresAdapter
- def storages
- sql = <<-SQL
- SELECT table_name FROM "information_schema"."tables"
- WHERE table_schema = current_schema()
- SQL
-
- select(Ext::String.compress_lines(sql))
- end
-
- private
- def reflective_query_storage storage
- sql = <<-SQL
- SELECT column_name FROM "information_schema"."key_column_usage"
- WHERE table_schema = current_schema() AND table_name = ?
- SQL
-
- keys = select(Ext::String.compress_lines(sql), storage).to_set
-
- sql = <<-SQL
- SELECT column_name, column_default, is_nullable,
- character_maximum_length, udt_name
- FROM "information_schema"."columns"
- WHERE table_schema = current_schema() AND table_name = ?
- SQL
-
- select(Ext::String.compress_lines(sql), storage).map{ |struct|
- struct.instance_eval <<-RUBY
- def key?
- #{keys.member?(struct.column_name)}
- end
- RUBY
- struct
- }
- end
-
- def reflective_field_name field
- field.column_name
- end
-
- def reflective_primitive field
- field.udt_name
- end
-
- def reflective_attributes field, attrs = {}
- # strip data type
- field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
- field.column_default
-
- attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
- attrs[:key] = true if field.key?
- attrs[:allow_nil] = field.is_nullable == 'YES'
- # strip string quotation
- attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
- field.column_default && !attrs[:serial]
-
- if field.character_maximum_length
- attrs[:length] = field.character_maximum_length
- elsif field.udt_name.upcase == 'TEXT'
- attrs[:length] = Property::Text.length
- end
-
- attrs
- end
-
- def reflective_lookup_primitive primitive
- case primitive.upcase
- when /^INT\d+$/ ; Integer
- when /^FLOAT\d+$/ ; Float
- when 'VARCHAR', 'BPCHAR'; String
- when 'TIMESTAMP', 'DATE'; DateTime
- when 'TEXT' ; Property::Text
- when 'BOOL' ; Property::Boolean
- when 'NUMERIC' ; Property::Decimal
- end || super(primitive)
- end
- end
- end
-end
diff --git a/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb
deleted file mode 100644
index ed6156f..0000000
--- a/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-
-module DataMapper
- module Is::Reflective
- module SqliteAdapter
- def storages
- sql = <<-SQL
- SELECT name
- FROM sqlite_master
- WHERE type = 'table' AND NOT name = 'sqlite_sequence'
- SQL
-
- select(Ext::String.compress_lines(sql))
- end
-
- private
- def reflective_query_storage storage
- select('PRAGMA table_info(?)', storage)
- end
-
- def reflective_field_name field
- field.name
- end
-
- def reflective_primitive field
- field.type.gsub(/\(\d+\)/, '')
- end
-
- def reflective_attributes field, attrs = {}
- if field.pk != 0
- attrs[:key] = true
- attrs[:serial] = true if supports_serial?
- end
- attrs[:allow_nil] = field.notnull == 0
- attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
-
- if field.type.upcase == 'TEXT'
- attrs[:length] = Property::Text.length
- else
- ergo = field.type.match(/\((\d+)\)/)
- size = ergo && ergo[1].to_i
- attrs[:length] = size if size
- end
-
- attrs
- end
-
- def reflective_lookup_primitive primitive
- case primitive.upcase
- when 'INTEGER' ; Integer
- when 'REAL', 'NUMERIC'; Float
- when 'VARCHAR' ; String
- when 'TIMESTAMP' ; DateTime
- when 'BOOLEAN' ; Property::Boolean
- when 'TEXT' ; Property::Text
- end || super(primitive)
- end
- end
- end
-end
diff --git a/lib/dm-is-reflective/is/reflective.rb b/lib/dm-is-reflective/reflective.rb
similarity index 96%
rename from lib/dm-is-reflective/is/reflective.rb
rename to lib/dm-is-reflective/reflective.rb
index 4bd8102..7b6fa39 100644
--- a/lib/dm-is-reflective/is/reflective.rb
+++ b/lib/dm-is-reflective/reflective.rb
@@ -1,94 +1,90 @@
-module DataMapper
-module Is
-module Reflective
+module DmIsReflective
+ include DataMapper
def is_reflective
extend ClassMethod
end
module ClassMethod
# it simply calls Migration#fields(self.storage_name)
# e.g.
# DataMapper.repository.adapter.fields storage_name
def fields repo = default_repository_name
DataMapper.repository(repo).adapter.fields(storage_name(repo))
end
# it automatically creates reflection from storage fields to properties.
# i.e. you don't have to specify any property if you are connecting
# to an existing database.
# you can pass it Regexp to map any field it matched, or just
# the field name in Symbol or String, or a Class telling it
# map any field which type equals to the Class.
# returned value is an array of properties indicating fields it mapped
# e.g.
# class User
# include DataMapper::Resource
# # reflect all
# reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# # #<Property:#<Class:0x18f89b8>:title>,
# # #<Property:#<Class:0x18f89b8>:body>,
# # #<Property:#<Class:0x18f89b8>:user_id>]
#
# # reflect all (with no argument at all)
# reflect
#
# # reflect for field name ended with _at, and started with salt_
# reflect /_at$/, /^salt_/
#
# # reflect id and email
# reflect :id, :email
#
# # reflect all fields with type String, and id
# reflect String, :id
#
# # reflect login, and all fields with type Integer
# reflect :login, Integer
# end
def reflect *targets
targets << /.*/ if targets.empty?
result = fields.map{ |field|
name, type, attrs = field
reflected = targets.each{ |target|
case target
when Regexp;
break name if name.to_s =~ target
when Symbol, String;
break name if name == target.to_sym
when Class;
break name if type == target
else
raise ArgumentError.new("invalid argument: #{target.inspect}")
end
}
property(reflected, type, attrs) if reflected.kind_of?(Symbol)
}.compact
finalize if respond_to?(:finalize)
result
end
def to_source scope=nil
<<-RUBY
class #{scope}::#{name} < #{superclass}
include DataMapper::Resource
#{
properties.map do |prop|
"property :#{prop.name}, #{prop.class.name}, #{prop.options}"
end.join("\n")
}
end
RUBY
end
end # of ClassMethod
-
-end # of Reflective
-end # of Is
-end # of DataMapper
+end # of DmIsReflective
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
index 4cfa243..ce8125b 100644
--- a/lib/dm-is-reflective/test.rb
+++ b/lib/dm-is-reflective/test.rb
@@ -1,279 +1,279 @@
require 'bacon'
Bacon.summary_on_exit
require 'dm-core'
require 'dm-migrations'
require 'dm-is-reflective'
module Abstract
class User
include DataMapper::Resource
has n, :comments
property :id, Serial
property :login, String, :length => 70
property :sig, Text
property :created_at, DateTime
is :reflective
end
class SuperUser
include DataMapper::Resource
property :id, Serial
property :bool, Boolean
is :reflective
end
class Comment
include DataMapper::Resource
belongs_to :user, :required => false
property :id, Serial
property :title, String, :length => 50, :default => 'default title',
:allow_nil => false
property :body, Text
is :reflective
end
Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
AttrCommon = {:allow_nil => true}
AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
AttrText = {:length => 65535}.merge(AttrCommon)
def self.next_id
@id ||= 0
@id += 1
end
end
include Abstract
shared :reflective do
def user_fields
[[:created_at, DateTime, AttrCommon],
[:id, DataMapper::Property::Serial, AttrCommonPK],
[:login, String, {:length => 70}.merge(AttrCommon)],
[:sig, DataMapper::Property::Text, AttrText]]
end
def comment_fields
[[:body, DataMapper::Property::Text, AttrText],
[:id, DataMapper::Property::Serial, AttrCommonPK],
[:title, String, {:length => 50, :default => 'default title',
:allow_nil => false}],
[:user_id, Integer, AttrCommon]]
end
# there's differences between adapters
def super_user_fields
mysql = defined?(DataMapper::Adapters::MysqlAdapter) &&
DataMapper::Adapters::MysqlAdapter
case DataMapper.repository.adapter
when mysql
# Mysql couldn't tell it's boolean or tinyint
[[:bool, Integer, AttrCommon],
[:id, DataMapper::Property::Serial, AttrCommonPK]]
else
[[:bool, DataMapper::Property::Boolean, AttrCommon],
[:id, DataMapper::Property::Serial, AttrCommonPK]]
end
end
before do
@dm = setup_data_mapper
[User, Comment, SuperUser].each(&:auto_migrate!)
end
def sort_fields fields
fields.sort_by{ |f| f.first.to_s }
end
def create_fake_model
model = Class.new
model.module_eval do
include DataMapper::Resource
property :id, DataMapper::Property::Serial
is :reflective
end
Abstract.const_set("Model#{Abstract.next_id}", model)
[model, setup_data_mapper]
end
def new_scope
Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
end
def test_create_comment
Comment.create(:title => 'XD')
Comment.first.title.should.eq 'XD'
end
def test_create_user
now = Time.now
User.create(:created_at => now)
User.first.created_at.asctime.should.eq now.asctime
now
end
should 'create comment' do
test_create_comment
end
should 'create user' do
test_create_user
end
should 'storages' do
@dm.storages.sort.should.eq Tables
sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
end
should 'reflect all' do
test_create_comment # for fixtures
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
local_dm.storages.sort.should.eq Tables
model.storage_name.should.eq 'abstract_comments'
model.send :reflect
model.all.size .should.eq 1
sort_fields(model.fields).should.eq comment_fields
model.first.title .should.eq 'XD'
end
should 'reflect and create' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect
model.create(:title => 'orz')
model.first.title.should.eq 'orz'
model.create
model.last.title.should.eq 'default title'
end
should 'storages and fields' do
sort_fields(@dm.fields('abstract_users')).should.eq user_fields
@dm.storages_and_fields.inject({}){ |r, i|
key, value = i
r[key] = value.sort_by{ |v| v.first.to_s }
r
}.should.eq('abstract_users' => user_fields ,
'abstract_comments' => comment_fields ,
'abstract_super_users' => super_user_fields)
end
should 'reflect type' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
model.send :reflect, Integer
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'reflect multiple' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_users'
model.send :reflect, :login, DataMapper::Property::Serial
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'login']
end
should 'reflect regexp' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
model.send :reflect, /id$/
model.properties.map(&:name).map(&:to_s).sort.should.eq \
['id', 'user_id']
end
should 'raise ArgumentError when giving invalid argument' do
lambda{
User.send :reflect, 29
}.should.raise ArgumentError
end
should 'allow empty string' do
Comment.new(:title => '').save.should.eq true
end
should 'auto_genclasses' do
scope = new_scope
@dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
- ["#{scope == Object ? '' : "#{scope}::"}AbstractComment",
+ ["#{scope}::AbstractComment",
"#{scope}::AbstractSuperUser",
"#{scope}::AbstractUser"]
comment = scope.const_get('AbstractComment')
sort_fields(comment.fields).should.eq comment_fields
test_create_comment
comment.first.title.should.eq 'XD'
comment.create(:title => 'orz', :body => 'dm-reflect')
comment.last.body.should.eq 'dm-reflect'
end
should 'auto_genclass' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => 'abstract_users').map(&:to_s).should.eq \
["#{scope}::AbstractUser"]
user = scope.const_get('AbstractUser')
sort_fields(user.fields).should.eq user_fields
now = test_create_user
user.first.created_at.asctime.should.eq now.asctime
user.create(:login => 'godfat')
user.last.login.should.eq 'godfat'
end
should 'auto_genclass with regexp' do
scope = new_scope
@dm.auto_genclass!(:scope => scope,
:storages => /_users$/).map(&:to_s).sort.should.eq \
["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
user = scope.const_get('AbstractSuperUser')
sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
end
should 'reflect return value' do
model, local_dm = create_fake_model
model.storage_names[:default] = 'abstract_comments'
mapped = model.send :reflect, /.*/
mapped.map(&:object_id).sort.should.eq \
model.properties.map(&:object_id).sort
end
end
module Kernel
def eq? rhs
self == rhs
end
def require_adapter adapter
require "dm-#{adapter}-adapter"
rescue LoadError
puts "skip #{adapter} test since it's not installed"
end
end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
index 3013ddb..9c91933 100644
--- a/lib/dm-is-reflective/version.rb
+++ b/lib/dm-is-reflective/version.rb
@@ -1,8 +1,4 @@
-module DataMapper
- module Is
- module Reflective
- VERSION = '1.1.0'
- end
- end
+module DmIsReflective
+ VERSION = '1.1.0'
end
|
godfat/dm-is-reflective
|
4f37a0286e0f74571e438d49ff6b28f8f6a2a471
|
not used anymore
|
diff --git a/Rakefile b/Rakefile
index 8bb7d55..a6c0dca 100644
--- a/Rakefile
+++ b/Rakefile
@@ -1,41 +1,28 @@
# encoding: utf-8
begin
require "#{dir = File.dirname(__FILE__)}/task/gemgem"
rescue LoadError
sh "git submodule update --init"
exec Gem.ruby, "-S", "rake", *ARGV
end
Gemgem.dir = dir
($LOAD_PATH << File.expand_path("#{Gemgem.dir}/lib" )).uniq!
desc 'Generate gemspec'
task 'gem:spec' do
Gemgem.spec = Gemgem.create do |s|
require 'dm-is-reflective/version'
s.name = 'dm-is-reflective'
s.version = DataMapper::Is::Reflective::VERSION
%w[dm-core dm-do-adapter].each{ |g| s.add_runtime_dependency(g) }
%w[dm-migrations
dm-sqlite-adapter
dm-mysql-adapter
dm-postgres-adapter].each{ |g| s.add_development_dependency(g) }
end
Gemgem.write
end
-
-desc 'auto_migrate database'
-task 'auto_migrate' do
- require 'dm-migrations'
- require './test/abstract'
- require './test/test_dm-is-reflective'
- include Abstract
- [:SqliteTest, :PostgresTest, :MysqlTest].each do |db|
- next unless Object.const_defined?(db)
- Object.const_get(db).setup_data_mapper
- [User, Comment, SuperUser].each(&:auto_migrate!)
- end
-end
|
godfat/dm-is-reflective
|
9257220844366841f660c1e9fb36c87ccc013957
|
switch to in memory sqlite and talk about to_source
|
diff --git a/README.md b/README.md
index 785b00b..8ba912c 100644
--- a/README.md
+++ b/README.md
@@ -1,140 +1,143 @@
# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
by Lin Jen-Shin ([godfat](http://godfat.org))
## LINKS:
* [github](https://github.com/godfat/dm-is-reflective)
* [rubygems](https://rubygems.org/gems/dm-is-reflective)
* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
## DESCRIPTION:
DataMapper plugin that helps you manipulate an existing database.
It creates mappings between existing columns and model's properties.
## REQUIREMENTS:
* dm-core
* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
## INSTALLATION:
``` shell
gem install dm-is-reflective
```
``` ruby
gem 'dm-is-reflective',
:git => 'git://github.com/godfat/dm-is-reflective.git',
:submodules => true
```
## SYNOPSIS:
``` ruby
require 'dm-is-reflective' # this would require 'dm-core'
-dm = DataMapper.setup :default, 'sqlite:db/dev.sqlite3'
+dm = DataMapper.setup :default, 'sqlite::memory:'
class User
include DataMapper::Resource
is :reflective
# map all, returning an array of properties indicating fields it mapped
reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
# #<Property:#<Class:0x18f89b8>:title>,
# #<Property:#<Class:0x18f89b8>:body>,
# #<Property:#<Class:0x18f89b8>:user_id>]
# map all (with no argument at all)
reflect
# mapping for field name ended with _at, and started with salt_
reflect /_at$/, /^salt_/
# mapping id and email
reflect :id, :email
# mapping all fields with type String, and id
reflect String, :id
# mapping login, and all fields with type Integer
reflect :login, Integer
end
# there's no guarantee of the order in storages array
dm.storages
# => ['users']
# there's no guarantee of the order in fields array
User.fields
# => [[:created_at, DateTime, {:required => false}],
[:email, String, {:required => false, :length => 255,
:default => '[email protected]'}],
[:id, Serial, {:required => true, :serial => true,
:key => true}],
[:salt_first, String, {:required => false, :length => 50}],
[:salt_second, String, {:required => false, :length => 50}]]
dm.fields('users').sort_by{ |field| field.first.to_s } ==
User.fields.sort_by{ |field| field.first.to_s }
# => true
dm.storages_and_fields
# => {'users' => [[:id, Serial, {:required => true,
:serial => true,
:key => true}],
[:email, String, {:required => false,
:default =>
'[email protected]'}],
[:created_at, DateTime, {:required => false}],
[:salt_first, String, {:required => false,
:length => 50}],
[:salt_second, String, {:required => false,
:length => 50}]]}
# there's no guarantee of the order in returned array
dm.auto_genclass!
# => [DataMapper::Is::Reflective::User,
DataMapper::Is::Reflective::SchemaInfo,
DataMapper::Is::Reflective::Session]
# you can change the scope of generated models:
dm.auto_genclass! :scope => Object
# => [User, SchemaInfo, Session]
# you can generate classes for tables you specified only:
dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
# => [PhpbbUser, PhpbbPost, PhpbbConfig]
# you can generate classes with String too:
dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
# => [User, Config]
# you can generate a class only:
dm.auto_genclass! :storages => 'users'
# => [DataMapper::Is::Reflective::User]
+
+# you can also generate the source from models:
+puts User.to_source
```
## CONTRIBUTORS:
* Andrew Kreps (@onewheelskyward)
* Lin Jen-Shin (@godfat)
## LICENSE:
Apache License 2.0
Copyright (c) 2008-2013, Lin Jen-Shin (godfat)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
godfat/dm-is-reflective
|
52e05bc06362e3e5985301f89db4b326b28c171b
|
prefix test
|
diff --git a/test/test_to_source.rb b/test/test_to_source.rb
index 5e0dabc..03b0d5e 100644
--- a/test/test_to_source.rb
+++ b/test/test_to_source.rb
@@ -1,29 +1,52 @@
require 'dm-is-reflective/test'
describe 'DataMapper::Resource#to_source' do
DataMapper.setup(:default, :adapter => 'in_memory')
should 'match Abstract::User' do
Abstract::User.to_source.should.eq <<-RUBY
class ::Abstract::User < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :login, DataMapper::Property::String, {:primitive=>String, :length=>70}
property :sig, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
property :created_at, DataMapper::Property::DateTime, {:primitive=>DateTime}
end
RUBY
end
should 'match Abstract::Comment' do
Abstract::Comment.to_source.should.eq <<-RUBY
class ::Abstract::Comment < Object
include DataMapper::Resource
property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
property :title, DataMapper::Property::String, {:primitive=>String, :length=>50, :default=>"default title", :allow_nil=>false}
property :body, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+end
+ RUBY
+ end
+
+ should 'match Abstract::Comment::Abstract::Comment' do
+ Abstract::Comment.to_source(Abstract::Comment).should.eq <<-RUBY
+class Abstract::Comment::Abstract::Comment < Object
+ include DataMapper::Resource
+ property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
+property :title, DataMapper::Property::String, {:primitive=>String, :length=>50, :default=>"default title", :allow_nil=>false}
+property :body, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+end
+ RUBY
+ end
+
+ should 'match Abstract::Comment::Abstract::User' do
+ Abstract::User.to_source('Abstract::Comment').should.eq <<-RUBY
+class Abstract::Comment::Abstract::User < Object
+ include DataMapper::Resource
+ property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
+property :login, DataMapper::Property::String, {:primitive=>String, :length=>70}
+property :sig, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+property :created_at, DataMapper::Property::DateTime, {:primitive=>DateTime}
end
RUBY
end
end
|
godfat/dm-is-reflective
|
f1b1bece818e1b26b38940ad92b25dac85f772da
|
add test for to_source
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b5a8ddf
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+tmp
+pkg
+doc
+ann-*
+Gemfile.lock
+
+*.rbc
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..358f09a
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "task"]
+ path = task
+ url = git://github.com/godfat/gemgem.git
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..502a832
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,15 @@
+before_install:
+ - 'git submodule update --init'
+ - mysql -e 'create database myapp_test;'
+ - psql -c 'create database myapp_test;' -U postgres
+
+script: 'ruby -r bundler/setup -S rake test'
+
+env:
+ - 'RBXOPT=-X19'
+
+rvm:
+ - 1.9.3
+ - 2.0.0
+ - rbx-head
+ - jruby-head
diff --git a/CHANGES.md b/CHANGES.md
new file mode 100644
index 0000000..08ad50b
--- /dev/null
+++ b/CHANGES.md
@@ -0,0 +1,232 @@
+# CHANGES
+
+## dm-is-reflective 1.1.0, 2013-01-11
+
+* The need for dm-migrations is now removed.
+* Added a few more datatypes. Thanks @onewheelskyward
+* Tested against dm-core 1.2.0.
+
+## dm-is-reflective 1.0.1, 2012-05-16
+
+* allow_nil is more close to db's semantics, not required. Thanks miaout17.
+ `:allow_nil` allows empty value, but `:required` does not. So here we
+ always use `:allow_nil` to match db's semantics.
+
+## dm-is-reflective 1.0.0, 2011-06-16
+
+* updated against dm-core 1.1.0
+
+## dm-is-reflective 0.9.0, 2010-07-05
+
+* adapted to dm-core 1.0.0
+* reanmed AbstractAdapter to DataObjectsAdapter
+
+## dm-is-reflective 0.8.0, 2009-09-16
+
+* require dm-core 0.10.0 and above now
+* Serial would map to Serial not Integer now
+* no more type_map now
+* no more Extlib::Hook to load adapter
+
+## dm-mapping 0.7.1, never released as a gem
+
+don't open module Migration and edit it, instead, use include, more see:
+
+* added DataMapper::Mapping::AbstractAdapter
+* added DataMapper::Mapping::Sqlite3Adapter
+* added DataMapper::Mapping::MysqlAdapter
+* added DataMapper::Mapping::PostgresAdapter
+* each adapter was included in related adapter in DataMapper.
+* Model#fields now accept repository name as argument
+
+there's differences between adapters,
+Sqlite3 added default => 'UL' in Boolean type,
+Mysql can't tell whether it's a Boolean or Tinyint,
+and Postgres is fine. see test/abstract.rb: super_user_fields for detail.
+
+## dm-mapping 0.7.0, 2008-09-01
+
+* feature added
+
+ - added postgres support.
+
+* bug fixed
+
+ - fixed key mapping in mysql adapter. PRI and MUL are all keys.
+ - use DM::Text.size as default text size in sqlite3.
+
+## dm-mapping 0.6.2, 2008-08-30
+
+* mapping more data types for mysql.
+* don't map TINYINT to TrueClass with mysql, skip it in type_map.
+
+## dm-mapping 0.6.1, 2008-08-22
+
+* gem 'dm-core', '>=0.9.3' instead of '=0.9.3'
+
+## dm-mapping 0.6.0, 2008-08-16
+
+* mapping returns an array of properties indicating fields it mapped.
+* performance boosted by refactored mapping implementation.
+* changed the way using auto_genclass!, now accepts args like mapping!
+* changed fields to return field name with Symbol instead of String.
+ this would make it be more consistent with DataMapper.
+* storage names remain String.
+* added more mysql data type to map
+* use Extlib::Hook to setup dm-mapping instead of stupid alias_method.
+* removed ensure_require in model. always setup DataMapper before define model.
+
+## dm-mapping 0.5.0, 2008-08-14
+
+* feature added
+
+ - added mysql support.
+ - reflect size 65535 in TEXT for sqlite3.
+
+* bug fixed
+
+ - reflect VARCHAR(size) instead of default size in sqlite3.
+
+* misc
+
+ - renamed sqlite3adapter to sqlite3_adapter.
+
+## dm-mapping 0.4.1, 2008-08-14
+
+* removed type hack, replaced with rejecting special type to lookup.
+
+## dm-mapping 0.4.0, 2008-08-04
+
+* added Migration#auto_genclass!.
+* updated README.
+* added more rdoc.
+
+## dm-mapping 0.3.0, 2008-08-04
+
+* added support of mapping Integer, DateTime, etc.
+* renamed some internals.
+* changed the way requiring adapter. no more setup first.
+* added Migration#storages_and_fields
+* added mapping :serial => true for primary key.
+* added mapping :default, and :nullable.
+* added support of mapping name. (through passing symbol or string)
+* added support of multiple arguments.
+* removed Mapping::All, use /.*/ instead.
+
+## dm-mapping 0.2.1, 2008-08-03
+
+* fixed a bug that type map should lookup for parent.
+* fixed a bug that sql type could be lower case.
+ fixed by calling upcase.
+
+## dm-mapping 0.2.0, 2008-08-02
+
+* added Sqlite3Adapter::Migration#fields
+* added DataMapper::Model#mapping
+* added DataMapper::Model#fields
+* added DataMapper::TypeMap#find_primitive for reversed lookup.
+ mapping SQL type back to Ruby type.
+* added corresponded test.
+
+## dm-mapping 0.1.0, 2008-07-27
+
+* birthday!
+* added DataMapper.repository.storages for sqlite3.
+* please refer:
+ <http://groups.google.com/group/datamapper/browse_thread/thread/b9ca41120c5c9389>
+
+original message:
+
+ from Lin Jen-Shin
+ to DataMapper
+ cc godfat
+ date Sun, Jul 27, 2008 at 5:40 PM
+ subject Manipulate an existing database.
+ mailed-by gmail.com
+
+ Greetings,
+
+ DataMapper looks very promising for me, so I am thinking of
+ using it in the near future. I hate separate my domain objects into
+ two parts in Rails, writing migration and switching to ActiveRecord,
+ vice versa, is very annoying to me.
+
+ But there's a very convenient feature to me in ActiveRecord,
+ that is ActiveRecord automatically mapping all fields in a table.
+ It makes me easily control an existing database without any domain object.
+
+ For example,
+
+ require 'active_record'
+
+ ActiveRecord::Base.establish_connection(
+ :adapter => 'sqlite3',
+ :database => 'db/development.sqlite3'
+ )
+
+ clsas User < ActiveRecord::Base
+ end
+
+ User.find 1
+ => #<User id: 1, account: "admin", created_at: "2008-05-18 20:08:37", etc.>
+
+ Some people would use database admin such as phpMyAdmin to
+ accomplish this kind of task, but I prefer anything in Ruby,
+ calling Ruby function, manipulating data without SQL and
+ any domain object. (i.e. I didn't have to load up entire environment.)
+
+ In DataMapper, I didn't find an easy way to accomplish this.
+ I am sorry if there's one but I didn't find it, please point out,
+ many thanks. In short, I would like to do this in DataMapper:
+
+ class User
+ include DataMapper::Resource
+ mapping :account, :created_at
+ end
+
+ or
+
+ class User
+ include DataMapper::Resource
+ mapping All
+ end
+
+ class User
+ include DataMapper::ResourceAll
+ end
+
+ or
+
+ class User
+ include DataMapper::Resource
+ mapping *storage_fields
+ end
+
+ The above User.storage_fields should return an Array,
+ telling all the fields in the table, e.g. [:account, :created_at, :etc]
+ or a Hash includes data type, e.g. {:account => String,
+ :created_at => DateTime}
+ then mapping *storage_fields should change to:
+
+ mapping *storage_fields.each_key.to_a
+
+ If it's possible, a feature returning the database schema as well:
+
+ DataMapper.repository.storages
+ # => [:users, :posts, :etc]
+
+ DataMapper.repository.storages_and_fields
+ # => {:users => {:account => String},
+ :posts => {:title => String, :content => Text}}
+
+ or returning DataObject::Field, DataObject::Storage, etc.
+
+ DataMapper.repository.storage
+ # => [#<DataObject::Storage @name='users' @fields=
+ [#<DataObject::Field @name='account' @type=String>]>]
+
+ If you feel this kind of feature is indeed needed or not bad for
+ adding it, I could try to provide a patch for it. Though I didn't
+ read the source code deeply, not knowning it's easy or not.
+
+ sincerely,
diff --git a/Gemfile b/Gemfile
new file mode 100644
index 0000000..a8d7039
--- /dev/null
+++ b/Gemfile
@@ -0,0 +1,7 @@
+
+source 'https://rubygems.org'
+
+gemspec
+
+gem 'rake'
+gem 'bacon'
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..785b00b
--- /dev/null
+++ b/README.md
@@ -0,0 +1,140 @@
+# dm-is-reflective [](http://travis-ci.org/godfat/dm-is-reflective)
+
+by Lin Jen-Shin ([godfat](http://godfat.org))
+
+## LINKS:
+
+* [github](https://github.com/godfat/dm-is-reflective)
+* [rubygems](https://rubygems.org/gems/dm-is-reflective)
+* [rdoc](http://rdoc.info/github/godfat/dm-is-reflective)
+
+## DESCRIPTION:
+
+DataMapper plugin that helps you manipulate an existing database.
+It creates mappings between existing columns and model's properties.
+
+## REQUIREMENTS:
+
+* dm-core
+* choose one: dm-sqlite-adapter, dm-postgres-adapter, dm-mysql-adapter
+
+## INSTALLATION:
+
+``` shell
+gem install dm-is-reflective
+```
+
+``` ruby
+gem 'dm-is-reflective',
+ :git => 'git://github.com/godfat/dm-is-reflective.git',
+ :submodules => true
+```
+
+## SYNOPSIS:
+
+``` ruby
+require 'dm-is-reflective' # this would require 'dm-core'
+dm = DataMapper.setup :default, 'sqlite:db/dev.sqlite3'
+
+class User
+ include DataMapper::Resource
+ is :reflective
+
+ # map all, returning an array of properties indicating fields it mapped
+ reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
+ # #<Property:#<Class:0x18f89b8>:title>,
+ # #<Property:#<Class:0x18f89b8>:body>,
+ # #<Property:#<Class:0x18f89b8>:user_id>]
+
+ # map all (with no argument at all)
+ reflect
+
+ # mapping for field name ended with _at, and started with salt_
+ reflect /_at$/, /^salt_/
+
+ # mapping id and email
+ reflect :id, :email
+
+ # mapping all fields with type String, and id
+ reflect String, :id
+
+ # mapping login, and all fields with type Integer
+ reflect :login, Integer
+end
+
+# there's no guarantee of the order in storages array
+dm.storages
+# => ['users']
+
+# there's no guarantee of the order in fields array
+User.fields
+# => [[:created_at, DateTime, {:required => false}],
+ [:email, String, {:required => false, :length => 255,
+ :default => '[email protected]'}],
+ [:id, Serial, {:required => true, :serial => true,
+ :key => true}],
+ [:salt_first, String, {:required => false, :length => 50}],
+ [:salt_second, String, {:required => false, :length => 50}]]
+
+dm.fields('users').sort_by{ |field| field.first.to_s } ==
+ User.fields.sort_by{ |field| field.first.to_s }
+# => true
+
+dm.storages_and_fields
+# => {'users' => [[:id, Serial, {:required => true,
+ :serial => true,
+ :key => true}],
+ [:email, String, {:required => false,
+ :default =>
+ '[email protected]'}],
+ [:created_at, DateTime, {:required => false}],
+ [:salt_first, String, {:required => false,
+ :length => 50}],
+ [:salt_second, String, {:required => false,
+ :length => 50}]]}
+
+# there's no guarantee of the order in returned array
+dm.auto_genclass!
+# => [DataMapper::Is::Reflective::User,
+ DataMapper::Is::Reflective::SchemaInfo,
+ DataMapper::Is::Reflective::Session]
+
+# you can change the scope of generated models:
+dm.auto_genclass! :scope => Object
+# => [User, SchemaInfo, Session]
+
+# you can generate classes for tables you specified only:
+dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
+# => [PhpbbUser, PhpbbPost, PhpbbConfig]
+
+# you can generate classes with String too:
+dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
+# => [User, Config]
+
+# you can generate a class only:
+dm.auto_genclass! :storages => 'users'
+# => [DataMapper::Is::Reflective::User]
+```
+
+## CONTRIBUTORS:
+
+* Andrew Kreps (@onewheelskyward)
+* Lin Jen-Shin (@godfat)
+
+## LICENSE:
+
+Apache License 2.0
+
+Copyright (c) 2008-2013, Lin Jen-Shin (godfat)
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/Rakefile b/Rakefile
new file mode 100644
index 0000000..8bb7d55
--- /dev/null
+++ b/Rakefile
@@ -0,0 +1,41 @@
+# encoding: utf-8
+
+begin
+ require "#{dir = File.dirname(__FILE__)}/task/gemgem"
+rescue LoadError
+ sh "git submodule update --init"
+ exec Gem.ruby, "-S", "rake", *ARGV
+end
+
+Gemgem.dir = dir
+($LOAD_PATH << File.expand_path("#{Gemgem.dir}/lib" )).uniq!
+
+desc 'Generate gemspec'
+task 'gem:spec' do
+ Gemgem.spec = Gemgem.create do |s|
+ require 'dm-is-reflective/version'
+ s.name = 'dm-is-reflective'
+ s.version = DataMapper::Is::Reflective::VERSION
+
+ %w[dm-core dm-do-adapter].each{ |g| s.add_runtime_dependency(g) }
+ %w[dm-migrations
+ dm-sqlite-adapter
+ dm-mysql-adapter
+ dm-postgres-adapter].each{ |g| s.add_development_dependency(g) }
+ end
+
+ Gemgem.write
+end
+
+desc 'auto_migrate database'
+task 'auto_migrate' do
+ require 'dm-migrations'
+ require './test/abstract'
+ require './test/test_dm-is-reflective'
+ include Abstract
+ [:SqliteTest, :PostgresTest, :MysqlTest].each do |db|
+ next unless Object.const_defined?(db)
+ Object.const_get(db).setup_data_mapper
+ [User, Comment, SuperUser].each(&:auto_migrate!)
+ end
+end
diff --git a/TODO.md b/TODO.md
new file mode 100644
index 0000000..6c748b9
--- /dev/null
+++ b/TODO.md
@@ -0,0 +1,8 @@
+# TODO
+
+* make sure reflective_lookup_primitive has everything as
+ type_map from dm-migrations
+
+* fix `rake auto_migrate`
+
+* use bacon instead of minitest
diff --git a/dm-is-reflective.gemspec b/dm-is-reflective.gemspec
new file mode 100644
index 0000000..161e547
--- /dev/null
+++ b/dm-is-reflective.gemspec
@@ -0,0 +1,67 @@
+# -*- encoding: utf-8 -*-
+
+Gem::Specification.new do |s|
+ s.name = "dm-is-reflective"
+ s.version = "1.1.0"
+
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
+ s.authors = ["Lin Jen-Shin (godfat)"]
+ s.date = "2013-05-11"
+ s.description = "DataMapper plugin that helps you manipulate an existing database.\nIt creates mappings between existing columns and model's properties."
+ s.email = ["godfat (XD) godfat.org"]
+ s.files = [
+ ".gitignore",
+ ".gitmodules",
+ "CHANGES.md",
+ "Gemfile",
+ "LICENSE",
+ "README.md",
+ "Rakefile",
+ "TODO.md",
+ "dm-is-reflective.gemspec",
+ "lib/dm-is-reflective.rb",
+ "lib/dm-is-reflective/is/adapters/data_objects_adapter.rb",
+ "lib/dm-is-reflective/is/adapters/mysql_adapter.rb",
+ "lib/dm-is-reflective/is/adapters/postgres_adapter.rb",
+ "lib/dm-is-reflective/is/adapters/sqlite_adapter.rb",
+ "lib/dm-is-reflective/is/reflective.rb",
+ "lib/dm-is-reflective/version.rb",
+ "task/.gitignore",
+ "task/gemgem.rb",
+ "test/abstract.rb",
+ "test/setup_db.sh",
+ "test/test_dm-is-reflective.rb"]
+ s.homepage = "https://github.com/godfat/dm-is-reflective"
+ s.licenses = ["Apache License 2.0"]
+ s.require_paths = ["lib"]
+ s.rubygems_version = "2.0.3"
+ s.summary = "DataMapper plugin that helps you manipulate an existing database."
+ s.test_files = ["test/test_dm-is-reflective.rb"]
+
+ if s.respond_to? :specification_version then
+ s.specification_version = 4
+
+ if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
+ s.add_runtime_dependency(%q<dm-core>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-do-adapter>, [">= 0"])
+ s.add_development_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_development_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_development_dependency(%q<dm-mysql-adapter>, [">= 0"])
+ s.add_development_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ else
+ s.add_dependency(%q<dm-core>, [">= 0"])
+ s.add_dependency(%q<dm-do-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ end
+ else
+ s.add_dependency(%q<dm-core>, [">= 0"])
+ s.add_dependency(%q<dm-do-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-mysql-adapter>, [">= 0"])
+ s.add_dependency(%q<dm-postgres-adapter>, [">= 0"])
+ end
+end
diff --git a/lib/dm-is-reflective.rb b/lib/dm-is-reflective.rb
new file mode 100644
index 0000000..ceeffab
--- /dev/null
+++ b/lib/dm-is-reflective.rb
@@ -0,0 +1,12 @@
+
+gem 'dm-core', '>=1.0.0'
+require 'dm-core'
+require 'dm-do-adapter'
+
+require 'dm-is-reflective/is/reflective'
+require 'dm-is-reflective/is/adapters/data_objects_adapter'
+
+DataMapper::Model.append_extensions(DataMapper::Is::Reflective)
+
+DataMapper::Adapters::DataObjectsAdapter.__send__(:include,
+ DataMapper::Is::Reflective::DataObjectsAdapter)
diff --git a/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb b/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb
new file mode 100644
index 0000000..3232cbd
--- /dev/null
+++ b/lib/dm-is-reflective/is/adapters/data_objects_adapter.rb
@@ -0,0 +1,136 @@
+
+module DataMapper
+ module Is::Reflective
+ module DataObjectsAdapter
+ # returns all tables' name in the repository.
+ # e.g.
+ # ['comments', 'users']
+ def storages
+ reflective_auto_load_adapter_extension
+ storages # call the overrided method
+ end
+
+ # returns all fields, with format [[name, type, attrs]]
+ # e.g.
+ # [[:created_at, DateTime, {:required => false}],
+ # [:email, String, {:required => false, :size => 255,
+ # :default => '[email protected]'}],
+ # [:id, DataMapper::Property::Serial, {:required => true, :serial => true,
+ # :key => true}],
+ # [:salt_first, String, {:required => false, :size => 50}],
+ # [:salt_second, String, {:required => false, :size => 50}]]
+ def fields storage
+ reflective_query_storage(storage).map{ |field|
+ attr = reflective_attributes(field)
+ type = reflective_lookup_primitive(reflective_primitive(field))
+ pick = if attr[:serial] && type == Integer
+ Property::Serial
+ else
+ type
+ end
+ [reflective_field_name(field).to_sym, pick, attr]
+ }
+ end
+
+ # returns a hash with storage names in keys and
+ # corresponded fields in values. e.g.
+ # {'users' => [[:id, Integer, {:required => true,
+ # :serial => true,
+ # :key => true}],
+ # [:email, String, {:required => false,
+ # :default => '[email protected]'}],
+ # [:created_at, DateTime, {:required => false}],
+ # [:salt_first, String, {:required => false, :size => 50}],
+ # [:salt_second, String, {:required => false, :size => 50}]]}
+ # see AbstractAdapter#storages and AbstractAdapter#fields for detail
+ def storages_and_fields
+ storages.inject({}){ |result, storage|
+ result[storage] = fields(storage)
+ result
+ }
+ end
+
+ # automaticly generate model class(es) and reflect
+ # all fields with reflect /.*/ for you.
+ # e.g.
+ # dm.auto_genclass!
+ # # => [DataMapper::Is::Reflective::User,
+ # # DataMapper::Is::Reflective::SchemaInfo,
+ # # DataMapper::Is::Reflective::Session]
+ #
+ # you can change the scope of generated models:
+ # e.g.
+ # dm.auto_genclass! :scope => Object
+ # # => [User, SchemaInfo, Session]
+ #
+ # you can generate classes for tables you specified only:
+ # e.g.
+ # dm.auto_genclass! :scope => Object, :storages => /^phpbb_/
+ # # => [PhpbbUser, PhpbbPost, PhpbbConfig]
+ #
+ # you can generate classes with String too:
+ # e.g.
+ # dm.auto_genclass! :storages => ['users', 'config'], :scope => Object
+ # # => [User, Config]
+ #
+ # you can generate a class only:
+ # e.g.
+ # dm.auto_genclass! :storages => 'users'
+ # # => [DataMapper::Is::Reflective::User]
+ def auto_genclass! opts = {}
+ opts[:scope] ||= Is::Reflective
+ opts[:storages] ||= /.*/
+ opts[:storages] = [opts[:storages]].flatten
+
+ storages.map{ |storage|
+
+ mapped = opts[:storages].each{ |target|
+ case target
+ when Regexp;
+ break storage if storage =~ target
+
+ when Symbol, String;
+ break storage if storage == target.to_s
+
+ else
+ raise ArgumentError.new("invalid argument: #{target.inspect}")
+ end
+ }
+
+ reflective_genclass(mapped, opts[:scope]) if mapped.kind_of?(String)
+ }.compact
+ end
+
+ private
+ def reflective_query_storage storage
+ reflective_auto_load_adapter_extension
+ reflective_query_storage(storage) # call the overrided method
+ end
+
+ def reflective_genclass storage, scope
+ model = Class.new
+ model.__send__(:include, Resource)
+ model.is(:reflective)
+ model.storage_names[:default] = storage
+ scope.const_set(Inflector.classify(storage), model)
+ model.__send__(:reflect, /.*/)
+ model.finalize if model.respond_to?(:finalize)
+ model
+ end
+
+ def reflective_lookup_primitive primitive
+ raise TypeError.new("#{primitive} not found for #{self.class}")
+ end
+
+ def reflective_auto_load_adapter_extension
+ # TODO: can we fix this adapter name in dm-sqlite-adapter?
+ adapter = options[:adapter].sub(/\Asqlite3\Z/, 'sqlite')
+
+ require "dm-is-reflective/is/adapters/#{adapter}_adapter"
+ class_name = "#{Inflector.camelize(adapter)}Adapter"
+ Adapters.const_get(class_name).__send__(:include,
+ Is::Reflective.const_get(class_name))
+ end
+ end
+ end
+end
diff --git a/lib/dm-is-reflective/is/adapters/mysql_adapter.rb b/lib/dm-is-reflective/is/adapters/mysql_adapter.rb
new file mode 100644
index 0000000..999e032
--- /dev/null
+++ b/lib/dm-is-reflective/is/adapters/mysql_adapter.rb
@@ -0,0 +1,62 @@
+
+module DataMapper
+ module Is::Reflective
+ module MysqlAdapter
+ def storages
+ select('SHOW TABLES')
+ end
+
+ private
+ # construct needed table metadata
+ def reflective_query_storage storage
+ sql = <<-SQL
+ SELECT column_name, column_default, is_nullable, data_type,
+ character_maximum_length, column_key, extra
+ FROM `information_schema`.`columns`
+ WHERE `table_schema` = ? AND `table_name` = ?
+ SQL
+
+ select(Ext::String.compress_lines(sql),
+ options[:path].sub('/', ''), storage)
+ end
+
+ def reflective_field_name field
+ field.column_name
+ end
+
+ def reflective_primitive field
+ field.data_type
+ end
+
+ def reflective_attributes field, attrs = {}
+ attrs[:serial] = true if field.extra == 'auto_increment'
+ attrs[:key] = true if field.column_key == 'PRI'
+
+ attrs[:allow_nil] = field.is_nullable == 'YES'
+ attrs[:default] = field.column_default if
+ field.column_default
+
+ attrs[:length] = field.character_maximum_length if
+ field.character_maximum_length
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when 'YEAR' ; Integer
+ when /\w*INT(EGER)?( SIGNED| UNSIGNED)?( ZEROFILL)?/
+ ; Integer
+ when /(DOUBLE|FLOAT|DECIMAL)( SIGNED| UNSIGNED)?( ZEROFILL)?/
+ ; Property::Decimal
+ when /\w*BLOB|\w*BINARY|ENUM|SET|CHAR/; String
+ when 'TIME' ; Time
+ when 'DATE' ; Date
+ when 'DATETIME', 'TIMESTAMP' ; DateTime
+ when 'BOOL', 'BOOLEAN' ; Property::Boolean
+ when /\w*TEXT/ ; Property::Text
+ end || super(primitive)
+ end
+ end
+ end
+end
diff --git a/lib/dm-is-reflective/is/adapters/postgres_adapter.rb b/lib/dm-is-reflective/is/adapters/postgres_adapter.rb
new file mode 100644
index 0000000..5d4ca58
--- /dev/null
+++ b/lib/dm-is-reflective/is/adapters/postgres_adapter.rb
@@ -0,0 +1,82 @@
+
+module DataMapper
+ module Is::Reflective
+ module PostgresAdapter
+ def storages
+ sql = <<-SQL
+ SELECT table_name FROM "information_schema"."tables"
+ WHERE table_schema = current_schema()
+ SQL
+
+ select(Ext::String.compress_lines(sql))
+ end
+
+ private
+ def reflective_query_storage storage
+ sql = <<-SQL
+ SELECT column_name FROM "information_schema"."key_column_usage"
+ WHERE table_schema = current_schema() AND table_name = ?
+ SQL
+
+ keys = select(Ext::String.compress_lines(sql), storage).to_set
+
+ sql = <<-SQL
+ SELECT column_name, column_default, is_nullable,
+ character_maximum_length, udt_name
+ FROM "information_schema"."columns"
+ WHERE table_schema = current_schema() AND table_name = ?
+ SQL
+
+ select(Ext::String.compress_lines(sql), storage).map{ |struct|
+ struct.instance_eval <<-RUBY
+ def key?
+ #{keys.member?(struct.column_name)}
+ end
+ RUBY
+ struct
+ }
+ end
+
+ def reflective_field_name field
+ field.column_name
+ end
+
+ def reflective_primitive field
+ field.udt_name
+ end
+
+ def reflective_attributes field, attrs = {}
+ # strip data type
+ field.column_default.gsub!(/(.*?)::[\w\s]*/, '\1') if
+ field.column_default
+
+ attrs[:serial] = true if field.column_default =~ /nextval\('\w+'\)/
+ attrs[:key] = true if field.key?
+ attrs[:allow_nil] = field.is_nullable == 'YES'
+ # strip string quotation
+ attrs[:default] = field.column_default.gsub(/^'(.*?)'$/, '\1') if
+ field.column_default && !attrs[:serial]
+
+ if field.character_maximum_length
+ attrs[:length] = field.character_maximum_length
+ elsif field.udt_name.upcase == 'TEXT'
+ attrs[:length] = Property::Text.length
+ end
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when /^INT\d+$/ ; Integer
+ when /^FLOAT\d+$/ ; Float
+ when 'VARCHAR', 'BPCHAR'; String
+ when 'TIMESTAMP', 'DATE'; DateTime
+ when 'TEXT' ; Property::Text
+ when 'BOOL' ; Property::Boolean
+ when 'NUMERIC' ; Property::Decimal
+ end || super(primitive)
+ end
+ end
+ end
+end
diff --git a/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb b/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb
new file mode 100644
index 0000000..ed6156f
--- /dev/null
+++ b/lib/dm-is-reflective/is/adapters/sqlite_adapter.rb
@@ -0,0 +1,59 @@
+
+module DataMapper
+ module Is::Reflective
+ module SqliteAdapter
+ def storages
+ sql = <<-SQL
+ SELECT name
+ FROM sqlite_master
+ WHERE type = 'table' AND NOT name = 'sqlite_sequence'
+ SQL
+
+ select(Ext::String.compress_lines(sql))
+ end
+
+ private
+ def reflective_query_storage storage
+ select('PRAGMA table_info(?)', storage)
+ end
+
+ def reflective_field_name field
+ field.name
+ end
+
+ def reflective_primitive field
+ field.type.gsub(/\(\d+\)/, '')
+ end
+
+ def reflective_attributes field, attrs = {}
+ if field.pk != 0
+ attrs[:key] = true
+ attrs[:serial] = true if supports_serial?
+ end
+ attrs[:allow_nil] = field.notnull == 0
+ attrs[:default] = field.dflt_value[1..-2] if field.dflt_value
+
+ if field.type.upcase == 'TEXT'
+ attrs[:length] = Property::Text.length
+ else
+ ergo = field.type.match(/\((\d+)\)/)
+ size = ergo && ergo[1].to_i
+ attrs[:length] = size if size
+ end
+
+ attrs
+ end
+
+ def reflective_lookup_primitive primitive
+ case primitive.upcase
+ when 'INTEGER' ; Integer
+ when 'REAL', 'NUMERIC'; Float
+ when 'VARCHAR' ; String
+ when 'TIMESTAMP' ; DateTime
+ when 'BOOLEAN' ; Property::Boolean
+ when 'TEXT' ; Property::Text
+ end || super(primitive)
+ end
+ end
+ end
+end
diff --git a/lib/dm-is-reflective/is/reflective.rb b/lib/dm-is-reflective/is/reflective.rb
new file mode 100644
index 0000000..4bd8102
--- /dev/null
+++ b/lib/dm-is-reflective/is/reflective.rb
@@ -0,0 +1,94 @@
+
+module DataMapper
+module Is
+module Reflective
+
+ def is_reflective
+ extend ClassMethod
+ end
+
+ module ClassMethod
+ # it simply calls Migration#fields(self.storage_name)
+ # e.g.
+ # DataMapper.repository.adapter.fields storage_name
+ def fields repo = default_repository_name
+ DataMapper.repository(repo).adapter.fields(storage_name(repo))
+ end
+
+ # it automatically creates reflection from storage fields to properties.
+ # i.e. you don't have to specify any property if you are connecting
+ # to an existing database.
+ # you can pass it Regexp to map any field it matched, or just
+ # the field name in Symbol or String, or a Class telling it
+ # map any field which type equals to the Class.
+ # returned value is an array of properties indicating fields it mapped
+ # e.g.
+ # class User
+ # include DataMapper::Resource
+ # # reflect all
+ # reflect /.*/ # e.g. => [#<Property:#<Class:0x18f89b8>:id>,
+ # # #<Property:#<Class:0x18f89b8>:title>,
+ # # #<Property:#<Class:0x18f89b8>:body>,
+ # # #<Property:#<Class:0x18f89b8>:user_id>]
+ #
+ # # reflect all (with no argument at all)
+ # reflect
+ #
+ # # reflect for field name ended with _at, and started with salt_
+ # reflect /_at$/, /^salt_/
+ #
+ # # reflect id and email
+ # reflect :id, :email
+ #
+ # # reflect all fields with type String, and id
+ # reflect String, :id
+ #
+ # # reflect login, and all fields with type Integer
+ # reflect :login, Integer
+ # end
+ def reflect *targets
+ targets << /.*/ if targets.empty?
+
+ result = fields.map{ |field|
+ name, type, attrs = field
+
+ reflected = targets.each{ |target|
+ case target
+ when Regexp;
+ break name if name.to_s =~ target
+
+ when Symbol, String;
+ break name if name == target.to_sym
+
+ when Class;
+ break name if type == target
+
+ else
+ raise ArgumentError.new("invalid argument: #{target.inspect}")
+ end
+ }
+
+ property(reflected, type, attrs) if reflected.kind_of?(Symbol)
+ }.compact
+
+ finalize if respond_to?(:finalize)
+ result
+ end
+
+ def to_source scope=nil
+<<-RUBY
+class #{scope}::#{name} < #{superclass}
+ include DataMapper::Resource
+ #{
+ properties.map do |prop|
+ "property :#{prop.name}, #{prop.class.name}, #{prop.options}"
+ end.join("\n")
+ }
+end
+RUBY
+ end
+ end # of ClassMethod
+
+end # of Reflective
+end # of Is
+end # of DataMapper
diff --git a/lib/dm-is-reflective/test.rb b/lib/dm-is-reflective/test.rb
new file mode 100644
index 0000000..4cfa243
--- /dev/null
+++ b/lib/dm-is-reflective/test.rb
@@ -0,0 +1,279 @@
+
+require 'bacon'
+Bacon.summary_on_exit
+
+require 'dm-core'
+require 'dm-migrations'
+require 'dm-is-reflective'
+
+module Abstract
+ class User
+ include DataMapper::Resource
+ has n, :comments
+
+ property :id, Serial
+ property :login, String, :length => 70
+ property :sig, Text
+ property :created_at, DateTime
+
+ is :reflective
+ end
+
+ class SuperUser
+ include DataMapper::Resource
+ property :id, Serial
+ property :bool, Boolean
+
+ is :reflective
+ end
+
+ class Comment
+ include DataMapper::Resource
+ belongs_to :user, :required => false
+
+ property :id, Serial
+ property :title, String, :length => 50, :default => 'default title',
+ :allow_nil => false
+ property :body, Text
+
+ is :reflective
+ end
+
+ Tables = ['abstract_comments', 'abstract_super_users', 'abstract_users']
+
+ AttrCommon = {:allow_nil => true}
+ AttrCommonPK = {:serial => true, :key => true, :allow_nil => false}
+ AttrText = {:length => 65535}.merge(AttrCommon)
+
+ def self.next_id
+ @id ||= 0
+ @id += 1
+ end
+end
+
+include Abstract
+
+shared :reflective do
+ def user_fields
+ [[:created_at, DateTime, AttrCommon],
+ [:id, DataMapper::Property::Serial, AttrCommonPK],
+ [:login, String, {:length => 70}.merge(AttrCommon)],
+ [:sig, DataMapper::Property::Text, AttrText]]
+ end
+
+ def comment_fields
+ [[:body, DataMapper::Property::Text, AttrText],
+ [:id, DataMapper::Property::Serial, AttrCommonPK],
+ [:title, String, {:length => 50, :default => 'default title',
+ :allow_nil => false}],
+ [:user_id, Integer, AttrCommon]]
+ end
+
+ # there's differences between adapters
+ def super_user_fields
+ mysql = defined?(DataMapper::Adapters::MysqlAdapter) &&
+ DataMapper::Adapters::MysqlAdapter
+ case DataMapper.repository.adapter
+ when mysql
+ # Mysql couldn't tell it's boolean or tinyint
+ [[:bool, Integer, AttrCommon],
+ [:id, DataMapper::Property::Serial, AttrCommonPK]]
+
+ else
+ [[:bool, DataMapper::Property::Boolean, AttrCommon],
+ [:id, DataMapper::Property::Serial, AttrCommonPK]]
+ end
+ end
+
+ before do
+ @dm = setup_data_mapper
+ [User, Comment, SuperUser].each(&:auto_migrate!)
+ end
+
+ def sort_fields fields
+ fields.sort_by{ |f| f.first.to_s }
+ end
+
+ def create_fake_model
+ model = Class.new
+ model.module_eval do
+ include DataMapper::Resource
+ property :id, DataMapper::Property::Serial
+ is :reflective
+ end
+ Abstract.const_set("Model#{Abstract.next_id}", model)
+ [model, setup_data_mapper]
+ end
+
+ def new_scope
+ Abstract.const_set("Scope#{Abstract.next_id}", Module.new)
+ end
+
+ def test_create_comment
+ Comment.create(:title => 'XD')
+ Comment.first.title.should.eq 'XD'
+ end
+
+ def test_create_user
+ now = Time.now
+ User.create(:created_at => now)
+ User.first.created_at.asctime.should.eq now.asctime
+ now
+ end
+
+ should 'create comment' do
+ test_create_comment
+ end
+
+ should 'create user' do
+ test_create_user
+ end
+
+ should 'storages' do
+ @dm.storages.sort.should.eq Tables
+ sort_fields(@dm.fields('abstract_comments')).should.eq comment_fields
+ end
+
+ should 'reflect all' do
+ test_create_comment # for fixtures
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_comments'
+
+ local_dm.storages.sort.should.eq Tables
+ model.storage_name.should.eq 'abstract_comments'
+
+ model.send :reflect
+ model.all.size .should.eq 1
+ sort_fields(model.fields).should.eq comment_fields
+ model.first.title .should.eq 'XD'
+ end
+
+ should 'reflect and create' do
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_comments'
+ model.send :reflect
+
+ model.create(:title => 'orz')
+ model.first.title.should.eq 'orz'
+
+ model.create
+ model.last.title.should.eq 'default title'
+ end
+
+ should 'storages and fields' do
+ sort_fields(@dm.fields('abstract_users')).should.eq user_fields
+
+ @dm.storages_and_fields.inject({}){ |r, i|
+ key, value = i
+ r[key] = value.sort_by{ |v| v.first.to_s }
+ r
+ }.should.eq('abstract_users' => user_fields ,
+ 'abstract_comments' => comment_fields ,
+ 'abstract_super_users' => super_user_fields)
+ end
+
+ should 'reflect type' do
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_comments'
+
+ model.send :reflect, DataMapper::Property::Serial
+ model.properties.map(&:name).map(&:to_s).sort.should.eq ['id']
+
+ model.send :reflect, Integer
+ model.properties.map(&:name).map(&:to_s).sort.should.eq \
+ ['id', 'user_id']
+ end
+
+ should 'reflect multiple' do
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_users'
+ model.send :reflect, :login, DataMapper::Property::Serial
+
+ model.properties.map(&:name).map(&:to_s).sort.should.eq \
+ ['id', 'login']
+ end
+
+ should 'reflect regexp' do
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_comments'
+ model.send :reflect, /id$/
+
+ model.properties.map(&:name).map(&:to_s).sort.should.eq \
+ ['id', 'user_id']
+ end
+
+ should 'raise ArgumentError when giving invalid argument' do
+ lambda{
+ User.send :reflect, 29
+ }.should.raise ArgumentError
+ end
+
+ should 'allow empty string' do
+ Comment.new(:title => '').save.should.eq true
+ end
+
+ should 'auto_genclasses' do
+ scope = new_scope
+ @dm.auto_genclass!(:scope => scope).map(&:to_s).sort.should.eq \
+ ["#{scope == Object ? '' : "#{scope}::"}AbstractComment",
+ "#{scope}::AbstractSuperUser",
+ "#{scope}::AbstractUser"]
+
+ comment = scope.const_get('AbstractComment')
+
+ sort_fields(comment.fields).should.eq comment_fields
+
+ test_create_comment
+
+ comment.first.title.should.eq 'XD'
+ comment.create(:title => 'orz', :body => 'dm-reflect')
+ comment.last.body.should.eq 'dm-reflect'
+ end
+
+ should 'auto_genclass' do
+ scope = new_scope
+ @dm.auto_genclass!(:scope => scope,
+ :storages => 'abstract_users').map(&:to_s).should.eq \
+ ["#{scope}::AbstractUser"]
+
+ user = scope.const_get('AbstractUser')
+ sort_fields(user.fields).should.eq user_fields
+
+ now = test_create_user
+
+ user.first.created_at.asctime.should.eq now.asctime
+ user.create(:login => 'godfat')
+ user.last.login.should.eq 'godfat'
+ end
+
+ should 'auto_genclass with regexp' do
+ scope = new_scope
+ @dm.auto_genclass!(:scope => scope,
+ :storages => /_users$/).map(&:to_s).sort.should.eq \
+ ["#{scope}::AbstractSuperUser", "#{scope}::AbstractUser"]
+
+ user = scope.const_get('AbstractSuperUser')
+ sort_fields(user.fields).should.eq sort_fields(SuperUser.fields)
+ end
+
+ should 'reflect return value' do
+ model, local_dm = create_fake_model
+ model.storage_names[:default] = 'abstract_comments'
+ mapped = model.send :reflect, /.*/
+
+ mapped.map(&:object_id).sort.should.eq \
+ model.properties.map(&:object_id).sort
+ end
+end
+
+module Kernel
+ def eq? rhs
+ self == rhs
+ end
+
+ def require_adapter adapter
+ require "dm-#{adapter}-adapter"
+ rescue LoadError
+ puts "skip #{adapter} test since it's not installed"
+ end
+end
diff --git a/lib/dm-is-reflective/version.rb b/lib/dm-is-reflective/version.rb
new file mode 100644
index 0000000..3013ddb
--- /dev/null
+++ b/lib/dm-is-reflective/version.rb
@@ -0,0 +1,8 @@
+
+module DataMapper
+ module Is
+ module Reflective
+ VERSION = '1.1.0'
+ end
+ end
+end
diff --git a/task b/task
new file mode 160000
index 0000000..54beaa8
--- /dev/null
+++ b/task
@@ -0,0 +1 @@
+Subproject commit 54beaa85d0beaa498e0e4e2e54e2da806aa8800b
diff --git a/test/setup_db.sh b/test/setup_db.sh
new file mode 100755
index 0000000..8475928
--- /dev/null
+++ b/test/setup_db.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+# postgres
+psql postgres -c 'CREATE USER dm_is_reflective'
+psql postgres -c "ALTER USER dm_is_reflective WITH PASSWORD 'godfat'"
+createdb 'dm_is_reflective'
+psql postgres -c 'ALTER DATABASE dm_is_reflective OWNER TO dm_is_reflective'
+
+# mysql
+mysql -u root -p -e 'GRANT USAGE ON dm_is_reflective.* TO dm_is_reflective@localhost IDENTIFIED BY "godfat"'
+mysql -u root -p -e 'CREATE DATABASE dm_is_reflective'
+mysql -u root -p -e 'GRANT ALL PRIVILEGES ON dm_is_reflective.* TO "dm_is_reflective"'
diff --git a/test/test_mysql.rb b/test/test_mysql.rb
new file mode 100644
index 0000000..67100e2
--- /dev/null
+++ b/test/test_mysql.rb
@@ -0,0 +1,17 @@
+
+require 'dm-is-reflective/test'
+require_adapter 'mysql'
+
+describe 'mysql' do
+ if ENV['TRAVIS']
+ def setup_data_mapper
+ DataMapper.setup(:default, 'mysql://root@localhost/myapp_test')
+ end
+ else
+ def setup_data_mapper
+ DataMapper.setup(:default,
+ 'mysql://dm_is_reflective:godfat@localhost/dm_is_reflective')
+ end
+ end
+ behaves_like :reflective
+end if defined?(DataMapper::Adapters::MysqlAdapter)
diff --git a/test/test_postgres.rb b/test/test_postgres.rb
new file mode 100644
index 0000000..7b78a0d
--- /dev/null
+++ b/test/test_postgres.rb
@@ -0,0 +1,17 @@
+
+require 'dm-is-reflective/test'
+require_adapter 'postgres'
+
+describe 'postgres' do
+ if ENV['TRAVIS']
+ def setup_data_mapper
+ DataMapper.setup(:default, 'postgres://postgres@localhost/myapp_test')
+ end
+ else
+ def setup_data_mapper
+ DataMapper.setup(:default,
+ 'postgres://dm_is_reflective:godfat@localhost/dm_is_reflective')
+ end
+ end
+ behaves_like :reflective
+end if defined?(DataMapper::Adapters::PostgresAdapter)
diff --git a/test/test_sqlite.rb b/test/test_sqlite.rb
new file mode 100644
index 0000000..ffd3e84
--- /dev/null
+++ b/test/test_sqlite.rb
@@ -0,0 +1,10 @@
+
+require 'dm-is-reflective/test'
+require_adapter 'sqlite'
+
+describe 'sqlite' do
+ def setup_data_mapper
+ DataMapper.setup(:default, :adapter => 'sqlite', :database => ':memory:')
+ end
+ behaves_like :reflective
+end if defined?(DataMapper::Adapters::SqliteAdapter)
diff --git a/test/test_to_source.rb b/test/test_to_source.rb
new file mode 100644
index 0000000..5e0dabc
--- /dev/null
+++ b/test/test_to_source.rb
@@ -0,0 +1,29 @@
+
+require 'dm-is-reflective/test'
+
+describe 'DataMapper::Resource#to_source' do
+ DataMapper.setup(:default, :adapter => 'in_memory')
+
+ should 'match Abstract::User' do
+ Abstract::User.to_source.should.eq <<-RUBY
+class ::Abstract::User < Object
+ include DataMapper::Resource
+ property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
+property :login, DataMapper::Property::String, {:primitive=>String, :length=>70}
+property :sig, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+property :created_at, DataMapper::Property::DateTime, {:primitive=>DateTime}
+end
+ RUBY
+ end
+
+ should 'match Abstract::Comment' do
+ Abstract::Comment.to_source.should.eq <<-RUBY
+class ::Abstract::Comment < Object
+ include DataMapper::Resource
+ property :id, DataMapper::Property::Serial, {:primitive=>Integer, :min=>1, :serial=>true}
+property :title, DataMapper::Property::String, {:primitive=>String, :length=>50, :default=>"default title", :allow_nil=>false}
+property :body, DataMapper::Property::Text, {:primitive=>String, :lazy=>true, :length=>65535}
+end
+ RUBY
+ end
+end
|
maravillas/linq-to-delicious
|
601ea0ba38aadf296f72f1ff0ccad8fa5f4f048e
|
Add ListBox to window with some default data
|
diff --git a/tasty/MainWindow.xaml b/tasty/MainWindow.xaml
index c86ef9d..0ab8939 100644
--- a/tasty/MainWindow.xaml
+++ b/tasty/MainWindow.xaml
@@ -1,23 +1,35 @@
<Window x:Class="tasty.MainWindow"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
- xmlns:src="clr-namespace:tasty.Commands"
- Title="Tasty" Height="404" Width="715">
+ xmlns:commands="clr-namespace:tasty.Commands"
+ xmlns:linq="clr-namespace:LinqToDelicious;assembly=LinqToDelicious"
+ Title="Tasty" Height="400" Width="700">
<Window.CommandBindings>
- <CommandBinding Command="src:FileCommands.Exit" Executed="Exit_Executed" CanExecute="Exit_CanExecute"/>
+ <CommandBinding Command="commands:FileCommands.Exit" Executed="Exit_Executed" CanExecute="Exit_CanExecute"/>
</Window.CommandBindings>
+
+ <Window.Resources>
+ <DataTemplate DataType="{x:Type linq:Post}">
+ <StackPanel>
+ <TextBlock Text="{Binding Path=Description}"/>
+ <TextBlock Text="{Binding Path=Extended}"/>
+ </StackPanel>
+ </DataTemplate>
+ </Window.Resources>
+
<Grid>
<StatusBar Height="23" Name="statusBar" VerticalAlignment="Bottom" />
- <Menu Height="22" Name="menu1" VerticalAlignment="Top">
+ <Menu Height="22" Name="menu" VerticalAlignment="Top">
<MenuItem Header="_File">
- <MenuItem Command="src:FileCommands.Exit"/>
+ <MenuItem Command="commands:FileCommands.Exit"/>
</MenuItem>
<MenuItem Header="_Edit">
<MenuItem Command="Cut" Header="Cu_t" />
<MenuItem Command="Copy" Header="_Copy" />
<MenuItem Command="Paste" Header="_Paste" IsEnabled="True" />
</MenuItem>
</Menu>
+ <ListBox Margin="0,22,0,23" Name="postListBox"/>
</Grid>
</Window>
diff --git a/tasty/MainWindow.xaml.cs b/tasty/MainWindow.xaml.cs
index a9c2405..dfc9e14 100644
--- a/tasty/MainWindow.xaml.cs
+++ b/tasty/MainWindow.xaml.cs
@@ -1,54 +1,74 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using LinqToDelicious;
using System.Net;
using System.Diagnostics;
namespace tasty
{
/// <summary>
/// Interaction logic for Window1.xaml
/// </summary>
public partial class MainWindow : Window
{
public IEnumerable<Post> Posts { get; private set; }
public MainWindow()
{
InitializeComponent();
-
+
Posts = new List<Post>();
+ }
+ protected override void OnInitialized(EventArgs e)
+ {
+ base.OnInitialized(e);
/*
var query = from post in new Delicious("username", "password").Posts
- where post.Date == new DateTime(2009, 1, 16)
- select post;
-
- foreach (var p in query)
+ where post.Date == new DateTime(2009, 1, 12)
+ select post;*/
+ var posts = new List<Post>()
{
- Debug.WriteLine(p);
- }*/
+ new Post(
+ "http://example.com/",
+ "a5a6f3d28d8dd549f3cad39fb0b34104",
+ "Example domain",
+ "example domain",
+ "An example site.",
+ "2008-12-12T07:45:52Z",
+ "762ee1d713648596931f798a7ba987e0"),
+ new Post(
+ "http://second-example.com/",
+ "ce67c6fbe4f79a521481060e2447001b",
+ "Another example domain",
+ "example domain another",
+ "Another example site.",
+ "2008-12-12T04:04:24Z",
+ "fa2a46d239ad4f089c3ce7961d958b2e")
+ };
+
+ postListBox.ItemsSource = posts;
}
private void Exit_Executed(object sender, ExecutedRoutedEventArgs e)
{
Close();
}
private void Exit_CanExecute(object sender, CanExecuteRoutedEventArgs e)
{
e.CanExecute = true;
}
}
}
|
maravillas/linq-to-delicious
|
80c9e96a1ccd6f0dfa82175da73ae93f857c94ec
|
Added menu bar to window and renamed
|
diff --git a/tasty.sln b/tasty.sln
index c9191bc..803a3fe 100644
--- a/tasty.sln
+++ b/tasty.sln
@@ -1,53 +1,54 @@

Microsoft Visual Studio Solution File, Format Version 10.00
# Visual Studio 2008
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tasty", "tasty\tasty.csproj", "{69B0D2E5-8BC8-4747-9225-7F7450153B86}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{A3A51F38-2E9A-4573-BC1D-C76771C569E9}"
ProjectSection(SolutionItems) = preProject
LocalTestRun.testrunconfig = LocalTestRun.testrunconfig
tasty.vsmdi = tasty.vsmdi
tasty1.vsmdi = tasty1.vsmdi
tasty2.vsmdi = tasty2.vsmdi
+ tasty3.vsmdi = tasty3.vsmdi
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDelicious", "LinqToDelicious\LinqToDelicious.csproj", "{C29DC32C-1DB9-4186-B707-E31E8C4C9209}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDeliciousTest", "LinqToDeliciousTest\LinqToDeliciousTest.csproj", "{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IQToolkit", "IQToolkit\IQToolkit.csproj", "{680B0226-FF3E-447B-BE5A-CB99698D4555}"
EndProject
Global
GlobalSection(SubversionScc) = preSolution
Svn-Managed = True
Manager = AnkhSVN - Subversion Support for Visual Studio
EndGlobalSection
GlobalSection(TestCaseManagementSettings) = postSolution
- CategoryFile = tasty2.vsmdi
+ CategoryFile = tasty3.vsmdi
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.Build.0 = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.ActiveCfg = Release|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.Build.0 = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.Build.0 = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.Build.0 = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.Build.0 = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.ActiveCfg = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
diff --git a/tasty/App.xaml b/tasty/App.xaml
index 64a0d58..8b669a5 100644
--- a/tasty/App.xaml
+++ b/tasty/App.xaml
@@ -1,8 +1,8 @@
<Application x:Class="tasty.App"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
- StartupUri="Window1.xaml">
+ StartupUri="MainWindow.xaml">
<Application.Resources>
</Application.Resources>
</Application>
diff --git a/tasty/Commands/FileCommands.cs b/tasty/Commands/FileCommands.cs
new file mode 100644
index 0000000..0a04597
--- /dev/null
+++ b/tasty/Commands/FileCommands.cs
@@ -0,0 +1,21 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Windows.Input;
+
+namespace tasty.Commands
+{
+ public class FileCommands
+ {
+ public static RoutedUICommand Exit { get; private set; }
+
+ static FileCommands()
+ {
+ Exit = new RoutedUICommand(
+ "E_xit",
+ "Exit",
+ typeof(FileCommands));
+ }
+ }
+}
diff --git a/tasty/MainWindow.xaml b/tasty/MainWindow.xaml
new file mode 100644
index 0000000..c86ef9d
--- /dev/null
+++ b/tasty/MainWindow.xaml
@@ -0,0 +1,23 @@
+<Window x:Class="tasty.MainWindow"
+ xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
+ xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
+ xmlns:src="clr-namespace:tasty.Commands"
+ Title="Tasty" Height="404" Width="715">
+
+ <Window.CommandBindings>
+ <CommandBinding Command="src:FileCommands.Exit" Executed="Exit_Executed" CanExecute="Exit_CanExecute"/>
+ </Window.CommandBindings>
+ <Grid>
+ <StatusBar Height="23" Name="statusBar" VerticalAlignment="Bottom" />
+ <Menu Height="22" Name="menu1" VerticalAlignment="Top">
+ <MenuItem Header="_File">
+ <MenuItem Command="src:FileCommands.Exit"/>
+ </MenuItem>
+ <MenuItem Header="_Edit">
+ <MenuItem Command="Cut" Header="Cu_t" />
+ <MenuItem Command="Copy" Header="_Copy" />
+ <MenuItem Command="Paste" Header="_Paste" IsEnabled="True" />
+ </MenuItem>
+ </Menu>
+ </Grid>
+</Window>
diff --git a/tasty/Window1.xaml.cs b/tasty/MainWindow.xaml.cs
similarity index 59%
rename from tasty/Window1.xaml.cs
rename to tasty/MainWindow.xaml.cs
index 55bb3ff..a9c2405 100644
--- a/tasty/Window1.xaml.cs
+++ b/tasty/MainWindow.xaml.cs
@@ -1,39 +1,54 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using LinqToDelicious;
using System.Net;
using System.Diagnostics;
namespace tasty
{
/// <summary>
/// Interaction logic for Window1.xaml
/// </summary>
- public partial class Window1 : Window
+ public partial class MainWindow : Window
{
- public Window1()
+ public IEnumerable<Post> Posts { get; private set; }
+
+ public MainWindow()
{
InitializeComponent();
+
+ Posts = new List<Post>();
+ /*
var query = from post in new Delicious("username", "password").Posts
- where post.Date == new DateTime(2008, 1, 1)
+ where post.Date == new DateTime(2009, 1, 16)
select post;
foreach (var p in query)
{
Debug.WriteLine(p);
- }
+ }*/
+ }
+
+ private void Exit_Executed(object sender, ExecutedRoutedEventArgs e)
+ {
+ Close();
+ }
+
+ private void Exit_CanExecute(object sender, CanExecuteRoutedEventArgs e)
+ {
+ e.CanExecute = true;
}
}
}
diff --git a/tasty/Window1.xaml b/tasty/Window1.xaml
deleted file mode 100644
index 02f2172..0000000
--- a/tasty/Window1.xaml
+++ /dev/null
@@ -1,8 +0,0 @@
-<Window x:Class="tasty.Window1"
- xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
- xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
- Title="Window1" Height="300" Width="300">
- <Grid>
-
- </Grid>
-</Window>
diff --git a/tasty/tasty.csproj b/tasty/tasty.csproj
index 6273919..e849ffe 100644
--- a/tasty/tasty.csproj
+++ b/tasty/tasty.csproj
@@ -1,113 +1,114 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{69B0D2E5-8BC8-4747-9225-7F7450153B86}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>tasty</RootNamespace>
<AssemblyName>tasty</AssemblyName>
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{60dc8134-eba5-43b8-bcc9-bb4bc16c2548};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Xml.Linq">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data.DataSetExtensions">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
<Reference Include="WindowsBase" />
<Reference Include="PresentationCore" />
<Reference Include="PresentationFramework" />
</ItemGroup>
<ItemGroup>
<ApplicationDefinition Include="App.xaml">
<Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType>
</ApplicationDefinition>
- <Page Include="Window1.xaml">
+ <Page Include="MainWindow.xaml">
<Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType>
</Page>
<Compile Include="App.xaml.cs">
<DependentUpon>App.xaml</DependentUpon>
<SubType>Code</SubType>
</Compile>
- <Compile Include="Window1.xaml.cs">
- <DependentUpon>Window1.xaml</DependentUpon>
+ <Compile Include="MainWindow.xaml.cs">
+ <DependentUpon>MainWindow.xaml</DependentUpon>
<SubType>Code</SubType>
</Compile>
</ItemGroup>
<ItemGroup>
+ <Compile Include="Commands\FileCommands.cs" />
<Compile Include="Properties\AssemblyInfo.cs">
<SubType>Code</SubType>
</Compile>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DesignTime>True</DesignTime>
<DependentUpon>Resources.resx</DependentUpon>
</Compile>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<AppDesigner Include="Properties\" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\IQToolkit\IQToolkit.csproj">
<Project>{680B0226-FF3E-447B-BE5A-CB99698D4555}</Project>
<Name>IQToolkit</Name>
</ProjectReference>
<ProjectReference Include="..\LinqToDelicious\LinqToDelicious.csproj">
<Project>{C29DC32C-1DB9-4186-B707-E31E8C4C9209}</Project>
<Name>LinqToDelicious</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
\ No newline at end of file
|
maravillas/linq-to-delicious
|
1b2db33c36bed04dedebbe64c1b11929a582f001
|
Fix date format in URLs
|
diff --git a/LinqToDelicious/QueryTranslator.cs b/LinqToDelicious/QueryTranslator.cs
index d36cba2..43bf684 100644
--- a/LinqToDelicious/QueryTranslator.cs
+++ b/LinqToDelicious/QueryTranslator.cs
@@ -1,203 +1,203 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Linq.Expressions;
using System.Diagnostics;
using IQ;
namespace LinqToDelicious
{
class QueryTranslator : ExpressionVisitor, LinqToDelicious.IQueryTranslator
{
public Expression Expression { get; private set; }
private StringBuilder mBuilder;
private Stack<Object> mStack;
private const String TAG_TOKEN = "tags";
private const String DATE_TOKEN = "date";
public QueryTranslator(Expression expression)
{
Expression = expression;
mStack = new Stack<Object>();
}
public String Translate()
{
if (mBuilder == null)
{
mBuilder = new StringBuilder("http://www.example.com/delicious.xml?");
Visit(Expression);
}
return mBuilder.ToString();
}
private static Expression StripQuotes(Expression expression)
{
while (expression.NodeType == ExpressionType.Quote)
{
expression = ((UnaryExpression)expression).Operand;
}
return expression;
}
protected override Expression VisitMethodCall(MethodCallExpression methodCall)
{
Debug.WriteLine("Visiting method " + methodCall);
if (methodCall.Method.DeclaringType == typeof(Queryable) && methodCall.Method.Name == "Where")
{
//mBuilder.Append(string.Format("Where {0}, {1}", methodCall.Arguments[0], methodCall.Arguments[1]));
Debug.WriteLine("Type: " + ((ConstantExpression)methodCall.Arguments[0]).Value);
LambdaExpression lambda = (LambdaExpression)StripQuotes(methodCall.Arguments[1]);
Visit(lambda.Body);
return methodCall;
}
else if (methodCall.Method.Name == "Contains")
{
Visit(methodCall.Object);
String token = (String)mStack.Pop();
if (token.Equals(TAG_TOKEN) &&
methodCall.Method.DeclaringType == typeof(List<string>))
{
// Would it be reasonable to assume these conditions are true?
if (methodCall.Arguments.Count == 1 &&
methodCall.Arguments[0].NodeType == ExpressionType.Constant)
{
mBuilder.Append("&tag=");
Visit(methodCall.Arguments[0]);
mBuilder.Append(mStack.Pop());
}
else
{
throw new Exception("Missing or invalid argument to method Contains");
}
}
return methodCall;
}
// Where Query(LinqToDelicious.Post), post => (post.Date > new DateTime(2008, 1, 1))
throw new NotSupportedException(string.Format("The method '{0}' is not supported", methodCall.Method.Name));
}
protected override Expression VisitUnary(UnaryExpression u)
{
Debug.WriteLine("Visiting unary expression " + u);
return u;
}
protected override Expression VisitBinary(BinaryExpression binaryExpression)
{
Debug.WriteLine("Visiting binary expression " + binaryExpression);
if (binaryExpression.NodeType == ExpressionType.And)
{
Visit(binaryExpression.Left);
Visit(binaryExpression.Right);
}
else if (binaryExpression.NodeType == ExpressionType.Equal ||
binaryExpression.NodeType == ExpressionType.LessThan ||
binaryExpression.NodeType == ExpressionType.LessThanOrEqual ||
binaryExpression.NodeType == ExpressionType.GreaterThan ||
binaryExpression.NodeType == ExpressionType.GreaterThanOrEqual)
{
Visit(binaryExpression.Left);
Debug.Assert(mStack.Peek().GetType() == typeof(String), "Expected String on the stack, was " + mStack.Peek().GetType());
String token = (String)mStack.Pop();
if (token.Equals(DATE_TOKEN))
{
Visit(binaryExpression.Right);
Debug.Assert(mStack.Peek().GetType() == typeof(DateTime), "Expected DateTime on the stack, was " + mStack.Peek().GetType());
DateTime date = (DateTime)mStack.Pop();
switch (binaryExpression.NodeType)
{
case ExpressionType.Equal:
- mBuilder.Append(String.Format("&fromdt={0}&todt={0}", date));
+ mBuilder.Append(String.Format("&fromdt={0}Z&todt={1}Z", date.ToString("s"), date.AddDays(1).ToString("s")));
break;
case ExpressionType.LessThan:
- mBuilder.Append(String.Format("&todt={0}", date));
+ mBuilder.Append(String.Format("&todt={0}Z", date.ToString("s")));
break;
case ExpressionType.LessThanOrEqual:
date = date.AddDays(1);
- mBuilder.Append(String.Format("&todt={0}", date));
+ mBuilder.Append(String.Format("&todt={0}Z", date.ToString("s")));
break;
case ExpressionType.GreaterThan:
- mBuilder.Append(String.Format("&fromdt={0}", date));
+ mBuilder.Append(String.Format("&fromdt={0}Z", date.ToString("s")));
break;
case ExpressionType.GreaterThanOrEqual:
date = date.AddDays(-1);
- mBuilder.Append(String.Format("&fromdt={0}", date));
+ mBuilder.Append(String.Format("&fromdt={0}Z", date.ToString("s")));
break;
default:
throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for date comparisons", binaryExpression.NodeType));
}
}
}
else
{
throw new NotSupportedException(string.Format("The operator '{0}' is not supported", binaryExpression.NodeType));
}
return binaryExpression;
}
protected override Expression VisitConstant(ConstantExpression constant)
{
Debug.WriteLine("Visiting constant " + constant);
mStack.Push(constant.Value);
return constant;
}
protected override Expression VisitMemberAccess(MemberExpression member)
{
Debug.WriteLine("Visiting member " + member);
if (member.Expression != null &&
member.Expression.NodeType == ExpressionType.Parameter)
{
mStack.Push(member.Member.Name.ToLower());
return member;
}
throw new NotSupportedException(string.Format("The member '{0}' is not supported", member.Member.Name));
}
}
}
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index 86390e2..238e4f7 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,319 +1,319 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mMocks = new MockRepository();
private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
- Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
- Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("fromdt=2008-01-01T00:00:00Z"));
+ Assert.IsTrue(url.Contains("todt=2008-01-02T00:00:00Z"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
- Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("todt=2008-01-01T00:00:00Z"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
- Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("todt=2008-01-02T00:00:00Z"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
- Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("fromdt=2008-01-01T00:00:00Z"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date >= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
-
- Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
+
+ Assert.IsTrue(url.Contains("fromdt=2007-12-31T00:00:00Z"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
Expression tagClause = BuildTagClause(mParam, "example");
string url = TranslateQuery(mParam, tagClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateGreaterThan()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
- Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("fromdt=2008-01-01T00:00:00Z"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateEquals()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
- Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
- Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("fromdt=2008-01-01T00:00:00Z"));
+ Assert.IsTrue(url.Contains("todt=2008-01-02T00:00:00Z"));
}
/// <summary>
/// A test for an NotSupportedException when trying to OR in the where clause.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") || post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
[ExpectedException(typeof(NotSupportedException))]
public void UnsupportedWhereClause()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.Or(tagClause, dateClause);
TranslateQuery(mParam, tagAndDateClauses);
}
/// <summary>
/// A test for an NotSupportedException when trying to compare dates with !=.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date != 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
[ExpectedException(typeof(NotSupportedException))]
public void UnsupportedDateComparison()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.NotEqual(left, right));
TranslateQuery(mParam, dateClause);
}
private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
{
// post.Tags
MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
ConstantExpression tagExpression = Expression.Constant(tag);
// post.Tags.Contains("example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tagExpression);
return containsCall;
}
private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// post.Date
Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
return comparison;
}
private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
{
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
a450c91ee00754498115ac8018b55bdc1ab965ac
|
Modify request factory to set credentials and User-Agent
|
diff --git a/LinqToDelicious/Delicious.cs b/LinqToDelicious/Delicious.cs
index a787ab7..2ca997f 100644
--- a/LinqToDelicious/Delicious.cs
+++ b/LinqToDelicious/Delicious.cs
@@ -1,26 +1,26 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using System.Net;
using IQ;
namespace LinqToDelicious
{
public class Delicious
{
public Query<Post> Posts { get; private set; }
- public Delicious()
+ public Delicious(string username, string password)
{
Delayer delayer = new Delayer(1000);
QueryTranslatorFactory translatorFactory = new QueryTranslatorFactory();
- IHttpWebRequestFactory requestFactory = new HttpWebRequestFactory();
+ IHttpWebRequestFactory requestFactory = new HttpWebRequestFactory(username, password);
QueryProvider provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
Posts = new Query<Post>(provider);
}
}
}
diff --git a/LinqToDelicious/HttpWebRequestFactory.cs b/LinqToDelicious/HttpWebRequestFactory.cs
index 84f6223..805ec6f 100644
--- a/LinqToDelicious/HttpWebRequestFactory.cs
+++ b/LinqToDelicious/HttpWebRequestFactory.cs
@@ -1,24 +1,40 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
+using System.Reflection;
namespace LinqToDelicious
{
/// <summary>
/// Creates HttpWebRequest objects from URIs.
/// </summary>
class HttpWebRequestFactory : IHttpWebRequestFactory
{
+ private string mUsername;
+ private string mPassword;
+
+ public HttpWebRequestFactory(string username, string password)
+ {
+ mUsername = username;
+ mPassword = password;
+ }
+
/// <summary>
/// Create a new HttpWebRequest.
/// </summary>
/// <param name="uri">The URI To request.</param>
/// <returns>The request.</returns>
public HttpWebRequest Create(string uri)
{
- return (HttpWebRequest)WebRequest.Create(uri);
+ HttpWebRequest request = (HttpWebRequest)WebRequest.Create(uri);
+
+ AssemblyName name = System.Reflection.Assembly.GetExecutingAssembly().GetName();
+ request.UserAgent = name.Name + " " + name.Version.ToString();
+ request.Credentials = new NetworkCredential(mUsername, mPassword);
+
+ return request;
}
}
}
diff --git a/LinqToDeliciousTest/HttpWebRequestFactoryTest.cs b/LinqToDeliciousTest/HttpWebRequestFactoryTest.cs
new file mode 100644
index 0000000..41e65f3
--- /dev/null
+++ b/LinqToDeliciousTest/HttpWebRequestFactoryTest.cs
@@ -0,0 +1,83 @@
+using System;
+using System.Text;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using LinqToDelicious;
+using System.Net;
+
+namespace LinqToDeliciousTest
+{
+ /// <summary>
+ /// Summary description for HttpWebRequestFactoryTest
+ /// </summary>
+ [TestClass]
+ public class HttpWebRequestFactoryTest
+ {
+ public HttpWebRequestFactoryTest()
+ {
+ //
+ // TODO: Add constructor logic here
+ //
+ }
+
+ private TestContext testContextInstance;
+
+ /// <summary>
+ ///Gets or sets the test context which provides
+ ///information about and functionality for the current test run.
+ ///</summary>
+ public TestContext TestContext
+ {
+ get
+ {
+ return testContextInstance;
+ }
+ set
+ {
+ testContextInstance = value;
+ }
+ }
+
+ #region Additional test attributes
+ //
+ // You can use the following additional attributes as you write your tests:
+ //
+ // Use ClassInitialize to run code before running the first test in the class
+ // [ClassInitialize()]
+ // public static void MyClassInitialize(TestContext testContext) { }
+ //
+ // Use ClassCleanup to run code after all tests in a class have run
+ // [ClassCleanup()]
+ // public static void MyClassCleanup() { }
+ //
+ // Use TestInitialize to run code before running each test
+ // [TestInitialize()]
+ // public void MyTestInitialize() { }
+ //
+ // Use TestCleanup to run code after each test has run
+ // [TestCleanup()]
+ // public void MyTestCleanup() { }
+ //
+ #endregion
+
+ [TestMethod]
+ public void Create()
+ {
+ string username = "user";
+ string password = "password";
+ string uri = "http://www.example.com";
+
+ HttpWebRequestFactory factory = new HttpWebRequestFactory(username, password);
+
+ HttpWebRequest request = factory.Create(uri);
+
+ NetworkCredential credentials = request.Credentials.GetCredential(new Uri(uri), "Basic");
+
+ Assert.AreEqual(credentials.UserName, username);
+ Assert.AreEqual(credentials.Password, password);
+
+ Assert.IsTrue(request.UserAgent.Contains("LinqToDelicious"));
+ }
+ }
+}
diff --git a/LinqToDeliciousTest/LinqToDeliciousTest.csproj b/LinqToDeliciousTest/LinqToDeliciousTest.csproj
index 916a35c..f7aa16e 100644
--- a/LinqToDeliciousTest/LinqToDeliciousTest.csproj
+++ b/LinqToDeliciousTest/LinqToDeliciousTest.csproj
@@ -1,77 +1,78 @@
<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>LinqToDeliciousTest</RootNamespace>
<AssemblyName>LinqToDeliciousTest</AssemblyName>
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.VisualStudio.QualityTools.UnitTestFramework, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a, processorArchitecture=MSIL" />
<Reference Include="Rhino.Mocks, Version=3.5.0.1337, Culture=neutral, PublicKeyToken=0b3305902db7183f, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\Rhino.Mocks.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data" />
<Reference Include="System.Data.DataSetExtensions">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Xml" />
<Reference Include="System.Xml.Linq">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Include="DelayerTest.cs" />
<Compile Include="DeliciousQueryProviderTest.cs" />
+ <Compile Include="HttpWebRequestFactoryTest.cs" />
<Compile Include="PostTest.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="QueryTranslatorTest.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\IQToolkit\IQToolkit.csproj">
<Project>{680B0226-FF3E-447B-BE5A-CB99698D4555}</Project>
<Name>IQToolkit</Name>
</ProjectReference>
<ProjectReference Include="..\LinqToDelicious\LinqToDelicious.csproj">
<Project>{C29DC32C-1DB9-4186-B707-E31E8C4C9209}</Project>
<Name>LinqToDelicious</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
\ No newline at end of file
diff --git a/tasty.sln b/tasty.sln
index eda5968..c9191bc 100644
--- a/tasty.sln
+++ b/tasty.sln
@@ -1,51 +1,53 @@

Microsoft Visual Studio Solution File, Format Version 10.00
# Visual Studio 2008
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tasty", "tasty\tasty.csproj", "{69B0D2E5-8BC8-4747-9225-7F7450153B86}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{A3A51F38-2E9A-4573-BC1D-C76771C569E9}"
ProjectSection(SolutionItems) = preProject
LocalTestRun.testrunconfig = LocalTestRun.testrunconfig
tasty.vsmdi = tasty.vsmdi
+ tasty1.vsmdi = tasty1.vsmdi
+ tasty2.vsmdi = tasty2.vsmdi
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDelicious", "LinqToDelicious\LinqToDelicious.csproj", "{C29DC32C-1DB9-4186-B707-E31E8C4C9209}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDeliciousTest", "LinqToDeliciousTest\LinqToDeliciousTest.csproj", "{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IQToolkit", "IQToolkit\IQToolkit.csproj", "{680B0226-FF3E-447B-BE5A-CB99698D4555}"
EndProject
Global
GlobalSection(SubversionScc) = preSolution
Svn-Managed = True
Manager = AnkhSVN - Subversion Support for Visual Studio
EndGlobalSection
GlobalSection(TestCaseManagementSettings) = postSolution
- CategoryFile = tasty.vsmdi
+ CategoryFile = tasty2.vsmdi
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.Build.0 = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.ActiveCfg = Release|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.Build.0 = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.Build.0 = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.Build.0 = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.Build.0 = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.ActiveCfg = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
diff --git a/tasty/Window1.xaml.cs b/tasty/Window1.xaml.cs
index a85dea9..55bb3ff 100644
--- a/tasty/Window1.xaml.cs
+++ b/tasty/Window1.xaml.cs
@@ -1,39 +1,39 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using LinqToDelicious;
using System.Net;
using System.Diagnostics;
namespace tasty
{
/// <summary>
/// Interaction logic for Window1.xaml
/// </summary>
public partial class Window1 : Window
{
public Window1()
{
InitializeComponent();
- var query = from post in new Delicious().Posts
+ var query = from post in new Delicious("username", "password").Posts
where post.Date == new DateTime(2008, 1, 1)
select post;
foreach (var p in query)
{
Debug.WriteLine(p);
}
}
}
}
|
maravillas/linq-to-delicious
|
baef3833fd6aed41b9907221c1fe90ca224d8376
|
Change assembly version numbering
|
diff --git a/LinqToDelicious/Properties/AssemblyInfo.cs b/LinqToDelicious/Properties/AssemblyInfo.cs
index 0afce6f..3af7df5 100644
--- a/LinqToDelicious/Properties/AssemblyInfo.cs
+++ b/LinqToDelicious/Properties/AssemblyInfo.cs
@@ -1,41 +1,40 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("LinqToDelicious")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("LinqToDelicious")]
-[assembly: AssemblyCopyright("Copyright © 2008")]
+[assembly: AssemblyCopyright("Copyright © 2009")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("85ae1aae-5c2e-47eb-b9b0-020ab41f14bc")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
+[assembly: AssemblyVersion("0.1.*")]
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("LinqToDeliciousTest")]
// For Rhino.Mock
[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]
\ No newline at end of file
diff --git a/LinqToDeliciousTest/Properties/AssemblyInfo.cs b/LinqToDeliciousTest/Properties/AssemblyInfo.cs
index b4bed46..ecc3f09 100644
--- a/LinqToDeliciousTest/Properties/AssemblyInfo.cs
+++ b/LinqToDeliciousTest/Properties/AssemblyInfo.cs
@@ -1,35 +1,34 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("LinqToDeliciousTest")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("LinqToDeliciousTest")]
[assembly: AssemblyCopyright("Copyright © 2008")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM componenets. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("000519b5-6051-4df3-9447-d80646e9d461")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Revision and Build Numbers
// by using the '*' as shown below:
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
+[assembly: AssemblyVersion("0.1.*")]
|
maravillas/linq-to-delicious
|
65e912416f28feba588986718cf256c017b853bf
|
Reduce tested delay from 1000 msec to 500 msec
|
diff --git a/LinqToDeliciousTest/DelayerTest.cs b/LinqToDeliciousTest/DelayerTest.cs
index 88db417..8bc7214 100644
--- a/LinqToDeliciousTest/DelayerTest.cs
+++ b/LinqToDeliciousTest/DelayerTest.cs
@@ -1,191 +1,191 @@
using System;
using System.Text;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Threading;
using LinqToDelicious;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DelayerTest
/// </summary>
[TestClass]
public class DelayerTest
{
// Acceptable error in measured call times
private const double TIME_DIFFERENCE = 0.1;
// Acceptable error in measured call times for "immediate" results
private const double IMMEDIATE_TIME_DIFFERENCE = 0.01;
public DelayerTest()
{
}
[TestMethod]
- public void Immediate1000MillisecondDelay()
+ public void Immediate500MillisecondDelay()
{
- TestImmediateDelay(new Delayer(1000), 1000);
+ TestImmediateDelay(new Delayer(500), 500);
}
[TestMethod]
public void Immediate100MillisecondDelay()
{
TestImmediateDelay(new Delayer(100), 100);
}
[TestMethod]
- public void Subsequent1000MillisecondDelay()
+ public void Subsequent500MillisecondDelay()
{
- TestSubsequentDelay(new Delayer(1000), 1000);
+ TestSubsequentDelay(new Delayer(500), 500);
}
[TestMethod]
public void Subsequent100MillisecondDelay()
{
TestSubsequentDelay(new Delayer(100), 100);
}
[TestMethod]
public void AdditionalImmediateDelay()
{
Delayer delayer = new Delayer(100);
delayer.AdditionalDelay = 100;
TestImmediateDelay(delayer, 200);
}
[TestMethod]
public void AdditionalSubsequentDelay()
{
Delayer delayer = new Delayer(100);
delayer.AdditionalDelay = 100;
TestSubsequentDelay(delayer, 100);
}
private void TestImmediateDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
if (timeElapsed > delay * IMMEDIATE_TIME_DIFFERENCE)
{
Assert.Inconclusive("Test failed, or delay was too small. Time elapsed was " + timeElapsed + " msec; expected <= " + (delay * IMMEDIATE_TIME_DIFFERENCE));
}
}
private void TestSubsequentDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
Callback nop = delegate()
{
return null;
};
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
// This is the difference between this method and TestImmediateDelay.
// This method ignores the first call to Delay and tests the delay of the
// second call to Delay.
delayer.Delay(nop);
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
double elapsedError = delay * TIME_DIFFERENCE;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
Assert.IsTrue(timeElapsed >= delay - elapsedError,
"Delay was not at least (" + delay + " - " + elapsedError + ") msec; total time elapsed was " + timeElapsed);
if (timeElapsed > delay + elapsedError)
{
Assert.Inconclusive("Test failed, or specified delay was too small to be supported. Time elapsed was " + timeElapsed + " msec; expected <= " + elapsedError);
}
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
|
maravillas/linq-to-delicious
|
dd1398448b37373842ce72d2ff6b33b105ec5e13
|
Clarify naming of Delayer tests
|
diff --git a/LinqToDeliciousTest/DelayerTest.cs b/LinqToDeliciousTest/DelayerTest.cs
index c842052..88db417 100644
--- a/LinqToDeliciousTest/DelayerTest.cs
+++ b/LinqToDeliciousTest/DelayerTest.cs
@@ -1,188 +1,191 @@
using System;
using System.Text;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Threading;
using LinqToDelicious;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DelayerTest
/// </summary>
[TestClass]
public class DelayerTest
{
// Acceptable error in measured call times
private const double TIME_DIFFERENCE = 0.1;
// Acceptable error in measured call times for "immediate" results
private const double IMMEDIATE_TIME_DIFFERENCE = 0.01;
public DelayerTest()
{
}
[TestMethod]
- public void DelayImmediate1000Milliseconds()
+ public void Immediate1000MillisecondDelay()
{
TestImmediateDelay(new Delayer(1000), 1000);
}
[TestMethod]
- public void DelayImmediate100Milliseconds()
+ public void Immediate100MillisecondDelay()
{
TestImmediateDelay(new Delayer(100), 100);
}
[TestMethod]
- public void Delay1000Milliseconds()
+ public void Subsequent1000MillisecondDelay()
{
- TestDelay(new Delayer(1000), 1000);
+ TestSubsequentDelay(new Delayer(1000), 1000);
}
[TestMethod]
- public void Delay100Milliseconds()
+ public void Subsequent100MillisecondDelay()
{
- TestDelay(new Delayer(100), 100);
+ TestSubsequentDelay(new Delayer(100), 100);
}
[TestMethod]
public void AdditionalImmediateDelay()
{
Delayer delayer = new Delayer(100);
delayer.AdditionalDelay = 100;
TestImmediateDelay(delayer, 200);
}
[TestMethod]
- public void AdditionalRepeatedDelay()
+ public void AdditionalSubsequentDelay()
{
Delayer delayer = new Delayer(100);
delayer.AdditionalDelay = 100;
- TestDelay(delayer, 100);
+ TestSubsequentDelay(delayer, 100);
}
private void TestImmediateDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
if (timeElapsed > delay * IMMEDIATE_TIME_DIFFERENCE)
{
Assert.Inconclusive("Test failed, or delay was too small. Time elapsed was " + timeElapsed + " msec; expected <= " + (delay * IMMEDIATE_TIME_DIFFERENCE));
}
}
- private void TestDelay(Delayer delayer, int delay)
+ private void TestSubsequentDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
Callback nop = delegate()
{
return null;
};
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
+ // This is the difference between this method and TestImmediateDelay.
+ // This method ignores the first call to Delay and tests the delay of the
+ // second call to Delay.
delayer.Delay(nop);
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
double elapsedError = delay * TIME_DIFFERENCE;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
Assert.IsTrue(timeElapsed >= delay - elapsedError,
"Delay was not at least (" + delay + " - " + elapsedError + ") msec; total time elapsed was " + timeElapsed);
if (timeElapsed > delay + elapsedError)
{
Assert.Inconclusive("Test failed, or specified delay was too small to be supported. Time elapsed was " + timeElapsed + " msec; expected <= " + elapsedError);
}
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
|
maravillas/linq-to-delicious
|
d16bc517c4694650681b71b3be646e75591746e7
|
Fix uppercase in a comment
|
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index ea4f06f..86390e2 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,319 +1,319 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mMocks = new MockRepository();
private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date >= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
Expression tagClause = BuildTagClause(mParam, "example");
string url = TranslateQuery(mParam, tagClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateGreaterThan()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateEquals()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for an NotSupportedException when trying to OR in the where clause.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") || post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
[ExpectedException(typeof(NotSupportedException))]
public void UnsupportedWhereClause()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.Or(tagClause, dateClause);
TranslateQuery(mParam, tagAndDateClauses);
}
/// <summary>
/// A test for an NotSupportedException when trying to compare dates with !=.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date != 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
[ExpectedException(typeof(NotSupportedException))]
public void UnsupportedDateComparison()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.NotEqual(left, right));
TranslateQuery(mParam, dateClause);
}
private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
{
// post.Tags
MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
ConstantExpression tagExpression = Expression.Constant(tag);
- // post.Tags.Contains("Example")
+ // post.Tags.Contains("example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tagExpression);
return containsCall;
}
private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// post.Date
Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
return comparison;
}
private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
{
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
e1f88181719cb3ca50615f51ae7656bb2d5a4ea0
|
Add tests for additional delay
|
diff --git a/LinqToDeliciousTest/DelayerTest.cs b/LinqToDeliciousTest/DelayerTest.cs
index 4dad614..c842052 100644
--- a/LinqToDeliciousTest/DelayerTest.cs
+++ b/LinqToDeliciousTest/DelayerTest.cs
@@ -1,174 +1,188 @@
using System;
using System.Text;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Threading;
using LinqToDelicious;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DelayerTest
/// </summary>
[TestClass]
public class DelayerTest
{
// Acceptable error in measured call times
private const double TIME_DIFFERENCE = 0.1;
// Acceptable error in measured call times for "immediate" results
private const double IMMEDIATE_TIME_DIFFERENCE = 0.01;
public DelayerTest()
{
}
[TestMethod]
public void DelayImmediate1000Milliseconds()
{
- TestImmediateDelay(1000);
+ TestImmediateDelay(new Delayer(1000), 1000);
}
[TestMethod]
public void DelayImmediate100Milliseconds()
{
- TestImmediateDelay(100);
+ TestImmediateDelay(new Delayer(100), 100);
}
[TestMethod]
public void Delay1000Milliseconds()
{
- TestDelay(1000);
+ TestDelay(new Delayer(1000), 1000);
}
[TestMethod]
public void Delay100Milliseconds()
{
- TestDelay(100);
+ TestDelay(new Delayer(100), 100);
}
- private void TestImmediateDelay(int delay)
+ [TestMethod]
+ public void AdditionalImmediateDelay()
+ {
+ Delayer delayer = new Delayer(100);
+ delayer.AdditionalDelay = 100;
+
+ TestImmediateDelay(delayer, 200);
+ }
+
+ [TestMethod]
+ public void AdditionalRepeatedDelay()
+ {
+ Delayer delayer = new Delayer(100);
+ delayer.AdditionalDelay = 100;
+
+ TestDelay(delayer, 100);
+ }
+
+ private void TestImmediateDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
- Delayer delayer = new Delayer(delay);
-
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
if (timeElapsed > delay * IMMEDIATE_TIME_DIFFERENCE)
{
Assert.Inconclusive("Test failed, or delay was too small. Time elapsed was " + timeElapsed + " msec; expected <= " + (delay * IMMEDIATE_TIME_DIFFERENCE));
}
}
- private void TestDelay(int delay)
+ private void TestDelay(Delayer delayer, int delay)
{
ManualResetEvent manualEvent = new ManualResetEvent(false);
bool called = false;
DateTime timeCalled = DateTime.MaxValue;
Object obj = new Object();
- Delayer delayer = new Delayer(delay);
-
Callback nop = delegate()
{
return null;
};
Callback callback = delegate()
{
called = true;
timeCalled = DateTime.Now;
manualEvent.Set();
return obj;
};
delayer.Delay(nop);
DateTime timeStarted = DateTime.Now;
Object result = delayer.Delay(callback);
manualEvent.WaitOne(delay * 2, false);
int timeElapsed = (int)(timeCalled - timeStarted).TotalMilliseconds;
double elapsedError = delay * TIME_DIFFERENCE;
Assert.IsTrue(called);
Assert.AreSame(obj, result);
- Assert.IsTrue(timeElapsed >= delay - elapsedError,
+ Assert.IsTrue(timeElapsed >= delay - elapsedError,
"Delay was not at least (" + delay + " - " + elapsedError + ") msec; total time elapsed was " + timeElapsed);
if (timeElapsed > delay + elapsedError)
{
Assert.Inconclusive("Test failed, or specified delay was too small to be supported. Time elapsed was " + timeElapsed + " msec; expected <= " + elapsedError);
}
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
|
maravillas/linq-to-delicious
|
3864321221bb013ecdc63c23a430cf07ed7eac78
|
Add several comments to methods and classes.
|
diff --git a/LinqToDelicious/Delayer.cs b/LinqToDelicious/Delayer.cs
index 7d089cf..14a8fb2 100644
--- a/LinqToDelicious/Delayer.cs
+++ b/LinqToDelicious/Delayer.cs
@@ -1,46 +1,62 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Timers;
using System.Diagnostics;
using System.Threading;
namespace LinqToDelicious
{
-
class Delayer : IDelayer
{
+ /// <summary>
+ /// The number of milliseconds to delay each time Delay is called.
+ /// </summary>
public int DelayLength { get; private set; }
+
+ /// <summary>
+ /// The number of milliseconds to delay the only next time Delay is called.
+ /// </summary>
public int AdditionalDelay { get; set; }
private Callback mCallback;
private DateTime mLastCall;
+ /// <summary>
+ /// Creates a new Delayer.
+ /// </summary>
+ /// <param name="delay"></param>
public Delayer(int delay)
{
DelayLength = delay;
mLastCall = DateTime.MaxValue;
}
+ /// <summary>
+ /// Suspends the current thread by the previously specified length of time,
+ /// and executes the specified callback when finished.
+ /// </summary>
+ /// <param name="callback">The Callback to be executed when the delay is complete.</param>
+ /// <returns>The result of the callback.</returns>
public object Delay(Callback callback)
{
int timeDifference = (int)(DateTime.Now - mLastCall).TotalMilliseconds;
int delay = Math.Max((DelayLength + Math.Max(AdditionalDelay, 0)) - timeDifference, 0);
mCallback = callback;
Debug.WriteLine("Delaying for " + delay + " msec");
if (delay > 0)
{
Thread.Sleep(delay);
}
mLastCall = DateTime.Now;
AdditionalDelay = 0;
return mCallback();
}
}
}
diff --git a/LinqToDelicious/DeliciousQueryProvider.cs b/LinqToDelicious/DeliciousQueryProvider.cs
index e14d600..c572f4b 100644
--- a/LinqToDelicious/DeliciousQueryProvider.cs
+++ b/LinqToDelicious/DeliciousQueryProvider.cs
@@ -1,97 +1,105 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using System.Xml.Linq;
using IQ;
using System.Diagnostics;
namespace LinqToDelicious
{
+ /// <summary>
+ /// Evaluates query expressions and translates them into collections of Posts.
+ /// </summary>
class DeliciousQueryProvider : QueryProvider
{
private const int BACKOFF_DELAY = 10000;
private IHttpWebRequestFactory mRequestFactory;
private IDelayer mDelayer;
private IQueryTranslatorFactory mTranslatorFactory;
+ /// <summary>
+ /// Creates a new DeliciousQueryProvider.
+ /// </summary>
+ /// <param name="requestFactory">The factory for creating HttpWebRequest objects.</param>
+ /// <param name="delayer">The IDelayer responsible for timing HTTP requests.</param>
+ /// <param name="translatorFactory">The factory for creating IQueryTranslator objects.</param>
public DeliciousQueryProvider(IHttpWebRequestFactory requestFactory, IDelayer delayer, IQueryTranslatorFactory translatorFactory)
{
mRequestFactory = requestFactory;
mDelayer = delayer;
mTranslatorFactory = translatorFactory;
}
public override string GetQueryText(Expression expression)
{
return mTranslatorFactory.Create(expression).Translate();
}
public override object Execute(Expression expression)
{
return mDelayer.Delay(delegate()
{
expression = PartialEvaluator.Eval(expression);
String uri = GetQueryText(expression);
Debug.WriteLine("Requesting " + uri);
HttpWebRequest request = mRequestFactory.Create(uri);
HttpWebResponse response;
try
{
response = (HttpWebResponse)request.GetResponse();
}
catch (WebException ex)
{
throw new RequestException("The request to " + uri + " timed out", ex);
}
try
{
if (response.StatusCode == HttpStatusCode.ServiceUnavailable)
{
// Simple backoff, for now.
mDelayer.AdditionalDelay = BACKOFF_DELAY;
throw new RequestException("Could not read " + uri);
}
// Is this too strict?
else if (response.StatusCode != HttpStatusCode.OK)
{
- // TODO: Define an exception
throw new RequestException("Could not read " + uri);
}
else
{
Stream stream = response.GetResponseStream();
XDocument document = XDocument.Load(new StreamReader(stream));
var posts = from post in document.Descendants("post")
select new Post(post.Attribute("href").Value,
post.Attribute("hash").Value,
post.Attribute("description").Value,
post.Attribute("tag").Value,
post.Attribute("extended").Value,
post.Attribute("time").Value,
post.Attribute("meta").Value);
return posts;
}
}
finally
{
response.Close();
}
});
}
}
}
diff --git a/LinqToDelicious/HttpWebRequestFactory.cs b/LinqToDelicious/HttpWebRequestFactory.cs
index 3ab14b9..84f6223 100644
--- a/LinqToDelicious/HttpWebRequestFactory.cs
+++ b/LinqToDelicious/HttpWebRequestFactory.cs
@@ -1,16 +1,24 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
namespace LinqToDelicious
{
+ /// <summary>
+ /// Creates HttpWebRequest objects from URIs.
+ /// </summary>
class HttpWebRequestFactory : IHttpWebRequestFactory
{
+ /// <summary>
+ /// Create a new HttpWebRequest.
+ /// </summary>
+ /// <param name="uri">The URI To request.</param>
+ /// <returns>The request.</returns>
public HttpWebRequest Create(string uri)
{
return (HttpWebRequest)WebRequest.Create(uri);
}
}
}
diff --git a/LinqToDelicious/IDelayer.cs b/LinqToDelicious/IDelayer.cs
index 5dbc827..bb2fcf6 100644
--- a/LinqToDelicious/IDelayer.cs
+++ b/LinqToDelicious/IDelayer.cs
@@ -1,16 +1,30 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LinqToDelicious
{
internal delegate object Callback();
internal interface IDelayer
{
+ /// <summary>
+ /// The number of milliseconds to delay each time Delay is called.
+ /// </summary>
+ int DelayLength { get; }
+
+ /// <summary>
+ /// The number of milliseconds to delay the only next time Delay is called.
+ /// </summary>
int AdditionalDelay { get; set; }
+ /// <summary>
+ /// Suspends the current thread by the previously specified length of time,
+ /// and executes the specified callback when finished.
+ /// </summary>
+ /// <param name="callback">The Callback to be executed when the delay is complete.</param>
+ /// <returns>The result of the callback.</returns>
object Delay(Callback callback);
}
}
diff --git a/LinqToDelicious/IHttpWebRequestFactory.cs b/LinqToDelicious/IHttpWebRequestFactory.cs
index ab20b65..2322ba7 100644
--- a/LinqToDelicious/IHttpWebRequestFactory.cs
+++ b/LinqToDelicious/IHttpWebRequestFactory.cs
@@ -1,13 +1,21 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
namespace LinqToDelicious
{
+ /// <summary>
+ /// Creates HttpWebRequest objects from URIs.
+ /// </summary>
interface IHttpWebRequestFactory
{
+ /// <summary>
+ /// Create a new HttpWebRequest.
+ /// </summary>
+ /// <param name="uri">The URI To request.</param>
+ /// <returns>The request.</returns>
HttpWebRequest Create(string uri);
}
}
diff --git a/LinqToDelicious/IQueryTranslatorFactory.cs b/LinqToDelicious/IQueryTranslatorFactory.cs
index 5f88b7c..358971c 100644
--- a/LinqToDelicious/IQueryTranslatorFactory.cs
+++ b/LinqToDelicious/IQueryTranslatorFactory.cs
@@ -1,13 +1,21 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace LinqToDelicious
{
+ /// <summary>
+ /// A factory for creating IQueryTranslator objects.
+ /// </summary>
interface IQueryTranslatorFactory
{
+ /// <summary>
+ /// Create a new IQueryTranslator.
+ /// </summary>
+ /// <param name="expression">The expression to be translated.</param>
+ /// <returns>The translator.</returns>
IQueryTranslator Create(Expression expression);
}
}
diff --git a/LinqToDelicious/Post.cs b/LinqToDelicious/Post.cs
index 527e213..91af4c6 100644
--- a/LinqToDelicious/Post.cs
+++ b/LinqToDelicious/Post.cs
@@ -1,77 +1,126 @@
using System;
using System.Linq;
using System.Text;
using System.Collections.Generic;
using System.Globalization;
using System.Diagnostics;
namespace LinqToDelicious
{
+ /// <summary>
+ /// A post made to Delicious.
+ /// </summary>
public class Post
{
+ /// <summary>
+ /// The address for this post.
+ /// </summary>
public String Address { get; private set; }
+
+ /// <summary>
+ /// The Delicious-generated hash for this post.
+ /// </summary>
public String Hash { get; private set; }
+
+ /// <summary>
+ /// The description for this post.
+ /// </summary>
public String Description { get; private set; }
+
+ /// <summary>
+ /// The list of tags for this post.
+ /// </summary>
public List<String> Tags { get; private set; }
+
+ /// <summary>
+ /// The extended text for this post.
+ /// </summary>
public String Extended { get; private set; }
+
+ /// <summary>
+ /// The date this post was saved.
+ /// </summary>
public DateTime Date { get; private set; }
+
+ /// <summary>
+ /// The signature that indicates when values have changed in this post.
+ /// </summary>
public String Meta { get; private set; }
+ /// <summary>
+ /// Create a new post.
+ /// </summary>
+ /// <param name="address">The address for this post.</param>
+ /// <param name="hash">The Delicious-generated hash for this post.</param>
+ /// <param name="description">The description for this post.</param>
+ /// <param name="tags">The list of tags for this post.</param>
+ /// <param name="extended">The extended text for this post.</param>
+ /// <param name="date">The date this post was saved.</param>
public Post(String address, String hash, String description, String tags, String extended, String date) :
this(address, hash, description, tags, extended, date, "")
{
}
+ /// <summary>
+ /// Create a new post.
+ /// </summary>
+ /// <param name="address">The address for this post.</param>
+ /// <param name="hash">The Delicious-generated hash for this post.</param>
+ /// <param name="description">The description for this post.</param>
+ /// <param name="tags">The list of tags for this post.</param>
+ /// <param name="extended">The extended text for this post.</param>
+ /// <param name="date">The date this post was saved.</param>
+ /// <param name="meta">The signature that indicates when values have changed in this post.</param>
public Post(String address, String hash, String description, String tags, String extended, String date, String meta)
{
Address = address;
Hash = hash;
Description = description;
Tags = new List<String>(tags.Split(' '));
Extended = extended;
Date = DateTime.ParseExact(date, "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'", new CultureInfo("en-US"));
Meta = meta;
}
public override string ToString()
{
return String.Format("Post [address={0} hash={1} description={2} tags={3} extended={4} date={5} meta={5}]",
Address, Hash, Description, Tags, Extended, Date, Meta);
}
public override bool Equals(object obj)
{
Post post = obj as Post;
if ((System.Object)post == null)
{
return false;
}
if (post == this)
{
return true;
}
return Address.Equals(post.Address) &&
Hash.Equals(post.Hash) &&
Description.Equals(post.Description) &&
Tags.SequenceEqual(post.Tags) &&
Extended.Equals(post.Extended) &&
Date.Equals(post.Date) &&
Meta.Equals(post.Meta);
}
public override int GetHashCode()
{
return Address.GetHashCode() ^
Hash.GetHashCode() ^
Description.GetHashCode() ^
string.Join(" ", Tags.ToArray()).GetHashCode() ^
Extended.GetHashCode() ^
Date.GetHashCode() ^
Meta.GetHashCode();
}
}
}
diff --git a/LinqToDelicious/QueryTranslatorFactory.cs b/LinqToDelicious/QueryTranslatorFactory.cs
index f629efd..0a15ee5 100644
--- a/LinqToDelicious/QueryTranslatorFactory.cs
+++ b/LinqToDelicious/QueryTranslatorFactory.cs
@@ -1,16 +1,24 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace LinqToDelicious
{
+ /// <summary>
+ /// A factory for creating IQueryTranslator objects.
+ /// </summary>
class QueryTranslatorFactory : IQueryTranslatorFactory
{
+ /// <summary>
+ /// Create a new IQueryTranslator.
+ /// </summary>
+ /// <param name="expression">The expression to be translated.</param>
+ /// <returns>The translator.</returns>
public IQueryTranslator Create(Expression expression)
{
return new QueryTranslator(expression);
}
}
}
diff --git a/LinqToDelicious/RequestException.cs b/LinqToDelicious/RequestException.cs
index 7070314..761eb1a 100644
--- a/LinqToDelicious/RequestException.cs
+++ b/LinqToDelicious/RequestException.cs
@@ -1,14 +1,17 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LinqToDelicious
{
+ /// <summary>
+ /// The exception that is thrown when a problem occurs while trying to request a Delicious page.
+ /// </summary>
class RequestException : ApplicationException
{
public RequestException() { }
public RequestException(string message) : base(message) { }
public RequestException(string message, Exception innerException) : base(message, innerException) { }
}
}
|
maravillas/linq-to-delicious
|
bc5c5e0ed8a1462d7844f0397f3e96ccf85136e7
|
Add ServiceUnavailableRequest test
|
diff --git a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
index 70da8b1..6d6006b 100644
--- a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
+++ b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
@@ -1,189 +1,216 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Rhino.Mocks;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DeliciousQueryProviderTest
/// </summary>
[TestClass]
public class DeliciousQueryProviderTest
{
delegate object CallbackDelegate(Callback callback);
private const string DOCUMENT = @"<?xml version='1.0' encoding='UTF-8'?>
<posts user='example' dt='2008-12-11T08:00:00Z' tag=''>
<post href='http://example.com/'
hash='a5a6f3d28d8dd549f3cad39fb0b34104'
description='Example domain'
tag='example domain'
time='2008-12-12T07:45:52Z'
extended='An example site.'
meta='762ee1d713648596931f798a7ba987e0'/>
<post href='http://second-example.com/'
hash='ce67c6fbe4f79a521481060e2447001b'
description='Another example domain'
tag='example domain another'
time='2008-12-12T04:04:24Z'
extended='Another example site.'
meta='fa2a46d239ad4f089c3ce7961d958b2e'/>
</posts>
<!-- fe03.api.del.ac4.yahoo.net uncompressed/chunked Sun Dec 14 20:20:32 PST 2008 -->";
private Post EXAMPLE_POST = new Post(
"http://example.com/",
"a5a6f3d28d8dd549f3cad39fb0b34104",
"Example domain",
"example domain",
"An example site.",
"2008-12-12T07:45:52Z",
"762ee1d713648596931f798a7ba987e0");
private Post ANOTHER_EXAMPLE_POST = new Post(
"http://second-example.com/",
"ce67c6fbe4f79a521481060e2447001b",
"Another example domain",
"example domain another",
"Another example site.",
"2008-12-12T04:04:24Z",
"fa2a46d239ad4f089c3ce7961d958b2e");
MockRepository mocks = new MockRepository();
IDelayer delayer;
IQueryTranslatorFactory translatorFactory;
IQueryTranslator translator;
IHttpWebRequestFactory requestFactory;
HttpWebRequest request;
HttpWebResponse response;
Expression expression = Expression.Constant(new Object());
string uri = "http://www.example.com";
Byte[] documentBytes = new UTF8Encoding().GetBytes(DOCUMENT);
Stream stream = new MemoryStream();
DeliciousQueryProvider provider;
public DeliciousQueryProviderTest()
{
delayer = mocks.StrictMock<IDelayer>();
translatorFactory = mocks.StrictMock<IQueryTranslatorFactory>();
translator = mocks.StrictMock<IQueryTranslator>();
requestFactory = mocks.StrictMock<IHttpWebRequestFactory>();
request = mocks.StrictMock<HttpWebRequest>();
response = mocks.StrictMock<HttpWebResponse>();
stream.Write(documentBytes, 0, documentBytes.Length);
stream.Seek(0, SeekOrigin.Begin);
provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
}
[TestMethod]
public void FoundRequest()
{
// Set up the mocked call to Delay to actually execute the callback
Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
{
return callback();
});
Expect.Call(translatorFactory.Create(expression)).Return(translator);
Expect.Call(requestFactory.Create(uri)).Return(request);
Expect.Call(translator.Translate()).Return(uri);
Expect.Call(request.GetResponse()).Return(response);
Expect.Call(response.GetResponseStream()).Return(stream);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(delegate { response.Close(); });
mocks.ReplayAll();
object result = provider.Execute(expression);
Assert.IsInstanceOfType(result, typeof(IEnumerable<Post>));
IEnumerable<Post> posts = (IEnumerable<Post>)result;
Assert.IsTrue(posts.Contains(EXAMPLE_POST), "Missing post for example.com.");
Assert.IsTrue(posts.Contains(ANOTHER_EXAMPLE_POST), "Missing post for another-example.com.");
mocks.VerifyAll();
}
[TestMethod]
[ExpectedException(typeof(RequestException))]
public void NotFoundRequest()
{
// Set up the mocked call to Delay to actually execute the callback
Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
{
return callback();
});
Expect.Call(translatorFactory.Create(expression)).Return(translator);
Expect.Call(requestFactory.Create(uri)).Return(request);
Expect.Call(translator.Translate()).Return(uri);
Expect.Call(request.GetResponse()).Return(response);
Expect.Call(response.GetResponseStream()).Return(stream);
Expect.Call(response.StatusCode).Return(HttpStatusCode.NotFound);
Expect.Call(response.StatusCode).Return(HttpStatusCode.NotFound);
Expect.Call(delegate { response.Close(); });
mocks.ReplayAll();
object result = provider.Execute(expression);
mocks.VerifyAll();
}
+ [TestMethod]
+ [ExpectedException(typeof(RequestException))]
+ public void ServiceUnavailableRequest()
+ {
+ // Set up the mocked call to Delay to actually execute the callback
+ Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
+ {
+ return callback();
+ });
+
+ delayer.AdditionalDelay = 10000;
+ Expect.Call(translatorFactory.Create(expression)).Return(translator);
+ Expect.Call(requestFactory.Create(uri)).Return(request);
+ Expect.Call(translator.Translate()).Return(uri);
+ Expect.Call(request.GetResponse()).Return(response);
+ Expect.Call(response.GetResponseStream()).Return(stream);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.ServiceUnavailable);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.ServiceUnavailable);
+ Expect.Call(delegate { response.Close(); });
+
+ mocks.ReplayAll();
+
+ object result = provider.Execute(expression);
+
+ mocks.VerifyAll();
+ }
+
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
diff --git a/tasty.sln b/tasty.sln
index 123c028..eda5968 100644
--- a/tasty.sln
+++ b/tasty.sln
@@ -1,54 +1,51 @@

Microsoft Visual Studio Solution File, Format Version 10.00
# Visual Studio 2008
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tasty", "tasty\tasty.csproj", "{69B0D2E5-8BC8-4747-9225-7F7450153B86}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{A3A51F38-2E9A-4573-BC1D-C76771C569E9}"
ProjectSection(SolutionItems) = preProject
LocalTestRun.testrunconfig = LocalTestRun.testrunconfig
tasty.vsmdi = tasty.vsmdi
- tasty1.vsmdi = tasty1.vsmdi
- tasty2.vsmdi = tasty2.vsmdi
- tasty4.vsmdi = tasty4.vsmdi
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDelicious", "LinqToDelicious\LinqToDelicious.csproj", "{C29DC32C-1DB9-4186-B707-E31E8C4C9209}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDeliciousTest", "LinqToDeliciousTest\LinqToDeliciousTest.csproj", "{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IQToolkit", "IQToolkit\IQToolkit.csproj", "{680B0226-FF3E-447B-BE5A-CB99698D4555}"
EndProject
Global
GlobalSection(SubversionScc) = preSolution
Svn-Managed = True
Manager = AnkhSVN - Subversion Support for Visual Studio
EndGlobalSection
GlobalSection(TestCaseManagementSettings) = postSolution
- CategoryFile = tasty4.vsmdi
+ CategoryFile = tasty.vsmdi
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.Build.0 = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.ActiveCfg = Release|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.Build.0 = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.Build.0 = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.Build.0 = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.Build.0 = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.ActiveCfg = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
|
maravillas/linq-to-delicious
|
b159a71e1c30f02e080e9a7231e88c16c71830e3
|
Add NotFoundRequest test; rename Execute to FoundRequest
|
diff --git a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
index d9a7e7c..70da8b1 100644
--- a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
+++ b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
@@ -1,163 +1,189 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Rhino.Mocks;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DeliciousQueryProviderTest
/// </summary>
[TestClass]
public class DeliciousQueryProviderTest
{
delegate object CallbackDelegate(Callback callback);
private const string DOCUMENT = @"<?xml version='1.0' encoding='UTF-8'?>
<posts user='example' dt='2008-12-11T08:00:00Z' tag=''>
<post href='http://example.com/'
hash='a5a6f3d28d8dd549f3cad39fb0b34104'
description='Example domain'
tag='example domain'
time='2008-12-12T07:45:52Z'
extended='An example site.'
meta='762ee1d713648596931f798a7ba987e0'/>
<post href='http://second-example.com/'
hash='ce67c6fbe4f79a521481060e2447001b'
description='Another example domain'
tag='example domain another'
time='2008-12-12T04:04:24Z'
extended='Another example site.'
meta='fa2a46d239ad4f089c3ce7961d958b2e'/>
</posts>
<!-- fe03.api.del.ac4.yahoo.net uncompressed/chunked Sun Dec 14 20:20:32 PST 2008 -->";
private Post EXAMPLE_POST = new Post(
"http://example.com/",
"a5a6f3d28d8dd549f3cad39fb0b34104",
"Example domain",
"example domain",
"An example site.",
"2008-12-12T07:45:52Z",
"762ee1d713648596931f798a7ba987e0");
private Post ANOTHER_EXAMPLE_POST = new Post(
"http://second-example.com/",
"ce67c6fbe4f79a521481060e2447001b",
"Another example domain",
"example domain another",
"Another example site.",
"2008-12-12T04:04:24Z",
"fa2a46d239ad4f089c3ce7961d958b2e");
MockRepository mocks = new MockRepository();
IDelayer delayer;
IQueryTranslatorFactory translatorFactory;
IQueryTranslator translator;
IHttpWebRequestFactory requestFactory;
HttpWebRequest request;
HttpWebResponse response;
Expression expression = Expression.Constant(new Object());
string uri = "http://www.example.com";
Byte[] documentBytes = new UTF8Encoding().GetBytes(DOCUMENT);
Stream stream = new MemoryStream();
DeliciousQueryProvider provider;
public DeliciousQueryProviderTest()
{
delayer = mocks.StrictMock<IDelayer>();
translatorFactory = mocks.StrictMock<IQueryTranslatorFactory>();
translator = mocks.StrictMock<IQueryTranslator>();
requestFactory = mocks.StrictMock<IHttpWebRequestFactory>();
request = mocks.StrictMock<HttpWebRequest>();
response = mocks.StrictMock<HttpWebResponse>();
stream.Write(documentBytes, 0, documentBytes.Length);
stream.Seek(0, SeekOrigin.Begin);
provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
}
[TestMethod]
- public void Execute()
+ public void FoundRequest()
{
// Set up the mocked call to Delay to actually execute the callback
Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
{
return callback();
});
Expect.Call(translatorFactory.Create(expression)).Return(translator);
Expect.Call(requestFactory.Create(uri)).Return(request);
Expect.Call(translator.Translate()).Return(uri);
Expect.Call(request.GetResponse()).Return(response);
Expect.Call(response.GetResponseStream()).Return(stream);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(delegate { response.Close(); });
mocks.ReplayAll();
object result = provider.Execute(expression);
Assert.IsInstanceOfType(result, typeof(IEnumerable<Post>));
IEnumerable<Post> posts = (IEnumerable<Post>)result;
Assert.IsTrue(posts.Contains(EXAMPLE_POST), "Missing post for example.com.");
Assert.IsTrue(posts.Contains(ANOTHER_EXAMPLE_POST), "Missing post for another-example.com.");
mocks.VerifyAll();
}
+ [TestMethod]
+ [ExpectedException(typeof(RequestException))]
+ public void NotFoundRequest()
+ {
+ // Set up the mocked call to Delay to actually execute the callback
+ Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
+ {
+ return callback();
+ });
+
+ Expect.Call(translatorFactory.Create(expression)).Return(translator);
+ Expect.Call(requestFactory.Create(uri)).Return(request);
+ Expect.Call(translator.Translate()).Return(uri);
+ Expect.Call(request.GetResponse()).Return(response);
+ Expect.Call(response.GetResponseStream()).Return(stream);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.NotFound);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.NotFound);
+ Expect.Call(delegate { response.Close(); });
+
+ mocks.ReplayAll();
+
+ object result = provider.Execute(expression);
+
+ mocks.VerifyAll();
+ }
+
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
|
maravillas/linq-to-delicious
|
fddf416f3f99ca1f7aebbef39728273f7cd4160e
|
Use RequestException instead of Exception
|
diff --git a/LinqToDelicious/DeliciousQueryProvider.cs b/LinqToDelicious/DeliciousQueryProvider.cs
index b7bea93..e14d600 100644
--- a/LinqToDelicious/DeliciousQueryProvider.cs
+++ b/LinqToDelicious/DeliciousQueryProvider.cs
@@ -1,98 +1,97 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using System.Xml.Linq;
using IQ;
using System.Diagnostics;
namespace LinqToDelicious
{
class DeliciousQueryProvider : QueryProvider
{
private const int BACKOFF_DELAY = 10000;
private IHttpWebRequestFactory mRequestFactory;
private IDelayer mDelayer;
private IQueryTranslatorFactory mTranslatorFactory;
public DeliciousQueryProvider(IHttpWebRequestFactory requestFactory, IDelayer delayer, IQueryTranslatorFactory translatorFactory)
{
mRequestFactory = requestFactory;
mDelayer = delayer;
mTranslatorFactory = translatorFactory;
}
public override string GetQueryText(Expression expression)
{
return mTranslatorFactory.Create(expression).Translate();
}
public override object Execute(Expression expression)
{
return mDelayer.Delay(delegate()
{
expression = PartialEvaluator.Eval(expression);
String uri = GetQueryText(expression);
Debug.WriteLine("Requesting " + uri);
HttpWebRequest request = mRequestFactory.Create(uri);
-
+
HttpWebResponse response;
try
{
response = (HttpWebResponse)request.GetResponse();
}
catch (WebException ex)
{
- // TODO: Wrap this exception
- throw ex;
+ throw new RequestException("The request to " + uri + " timed out", ex);
}
try
{
if (response.StatusCode == HttpStatusCode.ServiceUnavailable)
{
// Simple backoff, for now.
mDelayer.AdditionalDelay = BACKOFF_DELAY;
- throw new Exception("Could not read " + uri);
+ throw new RequestException("Could not read " + uri);
}
// Is this too strict?
else if (response.StatusCode != HttpStatusCode.OK)
{
// TODO: Define an exception
- throw new Exception("Could not read " + uri);
+ throw new RequestException("Could not read " + uri);
}
else
{
Stream stream = response.GetResponseStream();
XDocument document = XDocument.Load(new StreamReader(stream));
var posts = from post in document.Descendants("post")
select new Post(post.Attribute("href").Value,
post.Attribute("hash").Value,
post.Attribute("description").Value,
post.Attribute("tag").Value,
post.Attribute("extended").Value,
post.Attribute("time").Value,
post.Attribute("meta").Value);
return posts;
}
}
finally
{
response.Close();
}
});
}
}
}
|
maravillas/linq-to-delicious
|
d8b674da8acaed3ed48c1c7525b43b18c1272924
|
Add RequestException
|
diff --git a/LinqToDelicious/LinqToDelicious.csproj b/LinqToDelicious/LinqToDelicious.csproj
index ab8cc43..f60b86c 100644
--- a/LinqToDelicious/LinqToDelicious.csproj
+++ b/LinqToDelicious/LinqToDelicious.csproj
@@ -1,78 +1,79 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{C29DC32C-1DB9-4186-B707-E31E8C4C9209}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>LinqToDelicious</RootNamespace>
<AssemblyName>LinqToDelicious</AssemblyName>
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Xml.Linq">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data.DataSetExtensions">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Delayer.cs" />
<Compile Include="Delicious.cs" />
<Compile Include="DeliciousQueryProvider.cs" />
<Compile Include="HttpWebRequestFactory.cs" />
<Compile Include="IHttpWebRequestFactory.cs" />
<Compile Include="IDelayer.cs" />
<Compile Include="IQueryTranslator.cs" />
<Compile Include="IQueryTranslatorFactory.cs" />
<Compile Include="Post.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="QueryTranslator.cs" />
<Compile Include="QueryTranslatorFactory.cs" />
+ <Compile Include="RequestException.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\IQToolkit\IQToolkit.csproj">
<Project>{680B0226-FF3E-447B-BE5A-CB99698D4555}</Project>
<Name>IQToolkit</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<Content Include="queries.txt" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
\ No newline at end of file
diff --git a/LinqToDelicious/RequestException.cs b/LinqToDelicious/RequestException.cs
new file mode 100644
index 0000000..7070314
--- /dev/null
+++ b/LinqToDelicious/RequestException.cs
@@ -0,0 +1,14 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace LinqToDelicious
+{
+ class RequestException : ApplicationException
+ {
+ public RequestException() { }
+ public RequestException(string message) : base(message) { }
+ public RequestException(string message, Exception innerException) : base(message, innerException) { }
+ }
+}
|
maravillas/linq-to-delicious
|
09cbbae870d7f59a72a87dfc9d899710733c35a7
|
Refactor DeliciouQueryProviderTest#Execute in prep for other tests.
|
diff --git a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
index cfa14ab..d9a7e7c 100644
--- a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
+++ b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
@@ -1,156 +1,163 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Rhino.Mocks;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DeliciousQueryProviderTest
/// </summary>
[TestClass]
public class DeliciousQueryProviderTest
{
delegate object CallbackDelegate(Callback callback);
private const string DOCUMENT = @"<?xml version='1.0' encoding='UTF-8'?>
<posts user='example' dt='2008-12-11T08:00:00Z' tag=''>
<post href='http://example.com/'
hash='a5a6f3d28d8dd549f3cad39fb0b34104'
description='Example domain'
tag='example domain'
time='2008-12-12T07:45:52Z'
extended='An example site.'
meta='762ee1d713648596931f798a7ba987e0'/>
<post href='http://second-example.com/'
hash='ce67c6fbe4f79a521481060e2447001b'
description='Another example domain'
tag='example domain another'
time='2008-12-12T04:04:24Z'
extended='Another example site.'
meta='fa2a46d239ad4f089c3ce7961d958b2e'/>
</posts>
<!-- fe03.api.del.ac4.yahoo.net uncompressed/chunked Sun Dec 14 20:20:32 PST 2008 -->";
private Post EXAMPLE_POST = new Post(
"http://example.com/",
"a5a6f3d28d8dd549f3cad39fb0b34104",
"Example domain",
"example domain",
"An example site.",
"2008-12-12T07:45:52Z",
"762ee1d713648596931f798a7ba987e0");
private Post ANOTHER_EXAMPLE_POST = new Post(
"http://second-example.com/",
"ce67c6fbe4f79a521481060e2447001b",
"Another example domain",
"example domain another",
"Another example site.",
"2008-12-12T04:04:24Z",
"fa2a46d239ad4f089c3ce7961d958b2e");
+ MockRepository mocks = new MockRepository();
- public DeliciousQueryProviderTest()
- {
+ IDelayer delayer;
+ IQueryTranslatorFactory translatorFactory;
+ IQueryTranslator translator;
+ IHttpWebRequestFactory requestFactory;
+ HttpWebRequest request;
+ HttpWebResponse response;
- }
+ Expression expression = Expression.Constant(new Object());
- [TestMethod]
- public void Execute()
- {
- MockRepository mocks = new MockRepository();
+ string uri = "http://www.example.com";
- IDelayer delayer = mocks.StrictMock<IDelayer>();
- IQueryTranslatorFactory translatorFactory = mocks.StrictMock<IQueryTranslatorFactory>();
- IQueryTranslator translator = mocks.StrictMock<IQueryTranslator>();
- IHttpWebRequestFactory requestFactory = mocks.StrictMock<IHttpWebRequestFactory>();
- HttpWebRequest request = mocks.StrictMock<HttpWebRequest>();
- HttpWebResponse response = mocks.StrictMock<HttpWebResponse>();
+ Byte[] documentBytes = new UTF8Encoding().GetBytes(DOCUMENT);
+ Stream stream = new MemoryStream();
- Expression expression = Expression.Constant(new Object());
+ DeliciousQueryProvider provider;
- string uri = "http://www.example.com";
+ public DeliciousQueryProviderTest()
+ {
+ delayer = mocks.StrictMock<IDelayer>();
+ translatorFactory = mocks.StrictMock<IQueryTranslatorFactory>();
+ translator = mocks.StrictMock<IQueryTranslator>();
+ requestFactory = mocks.StrictMock<IHttpWebRequestFactory>();
+ request = mocks.StrictMock<HttpWebRequest>();
+ response = mocks.StrictMock<HttpWebResponse>();
- Byte[] documentBytes = new UTF8Encoding().GetBytes(DOCUMENT);
- Stream stream = new MemoryStream();
stream.Write(documentBytes, 0, documentBytes.Length);
stream.Seek(0, SeekOrigin.Begin);
- DeliciousQueryProvider provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
+ provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
+ }
+ [TestMethod]
+ public void Execute()
+ {
// Set up the mocked call to Delay to actually execute the callback
Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
{
return callback();
});
Expect.Call(translatorFactory.Create(expression)).Return(translator);
Expect.Call(requestFactory.Create(uri)).Return(request);
Expect.Call(translator.Translate()).Return(uri);
Expect.Call(request.GetResponse()).Return(response);
Expect.Call(response.GetResponseStream()).Return(stream);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
Expect.Call(delegate { response.Close(); });
mocks.ReplayAll();
object result = provider.Execute(expression);
Assert.IsInstanceOfType(result, typeof(IEnumerable<Post>));
-
+
IEnumerable<Post> posts = (IEnumerable<Post>)result;
Assert.IsTrue(posts.Contains(EXAMPLE_POST), "Missing post for example.com.");
Assert.IsTrue(posts.Contains(ANOTHER_EXAMPLE_POST), "Missing post for another-example.com.");
mocks.VerifyAll();
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
|
maravillas/linq-to-delicious
|
c73b91c96763ce7aad60cef663e6a87088dca860
|
Change DeliciousQueryProvider to use HttpWebRequest/HttpWebResponse
|
diff --git a/LinqToDelicious/Delayer.cs b/LinqToDelicious/Delayer.cs
index ee65599..7d089cf 100644
--- a/LinqToDelicious/Delayer.cs
+++ b/LinqToDelicious/Delayer.cs
@@ -1,44 +1,46 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Timers;
using System.Diagnostics;
using System.Threading;
namespace LinqToDelicious
{
class Delayer : IDelayer
{
public int DelayLength { get; private set; }
+ public int AdditionalDelay { get; set; }
private Callback mCallback;
private DateTime mLastCall;
public Delayer(int delay)
{
DelayLength = delay;
mLastCall = DateTime.MaxValue;
}
public object Delay(Callback callback)
{
int timeDifference = (int)(DateTime.Now - mLastCall).TotalMilliseconds;
- int delay = Math.Max(DelayLength - timeDifference, 0);
+ int delay = Math.Max((DelayLength + Math.Max(AdditionalDelay, 0)) - timeDifference, 0);
mCallback = callback;
Debug.WriteLine("Delaying for " + delay + " msec");
if (delay > 0)
{
Thread.Sleep(delay);
}
mLastCall = DateTime.Now;
+ AdditionalDelay = 0;
return mCallback();
}
}
}
diff --git a/LinqToDelicious/Delicious.cs b/LinqToDelicious/Delicious.cs
index 6b3d5e3..a787ab7 100644
--- a/LinqToDelicious/Delicious.cs
+++ b/LinqToDelicious/Delicious.cs
@@ -1,25 +1,26 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using System.Net;
using IQ;
namespace LinqToDelicious
{
public class Delicious
{
public Query<Post> Posts { get; private set; }
- public Delicious(WebClient webClient)
+ public Delicious()
{
Delayer delayer = new Delayer(1000);
- QueryTranslatorFactory factory = new QueryTranslatorFactory();
+ QueryTranslatorFactory translatorFactory = new QueryTranslatorFactory();
+ IHttpWebRequestFactory requestFactory = new HttpWebRequestFactory();
- QueryProvider provider = new DeliciousQueryProvider(webClient, delayer, factory);
+ QueryProvider provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
Posts = new Query<Post>(provider);
}
}
}
diff --git a/LinqToDelicious/DeliciousQueryProvider.cs b/LinqToDelicious/DeliciousQueryProvider.cs
index 39fe8de..b7bea93 100644
--- a/LinqToDelicious/DeliciousQueryProvider.cs
+++ b/LinqToDelicious/DeliciousQueryProvider.cs
@@ -1,59 +1,98 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using System.Xml.Linq;
using IQ;
using System.Diagnostics;
namespace LinqToDelicious
{
class DeliciousQueryProvider : QueryProvider
{
- private WebClient mWebClient;
+ private const int BACKOFF_DELAY = 10000;
+
+ private IHttpWebRequestFactory mRequestFactory;
private IDelayer mDelayer;
private IQueryTranslatorFactory mTranslatorFactory;
- public DeliciousQueryProvider(WebClient webClient, IDelayer delayer, IQueryTranslatorFactory translatorFactory)
+ public DeliciousQueryProvider(IHttpWebRequestFactory requestFactory, IDelayer delayer, IQueryTranslatorFactory translatorFactory)
{
- mWebClient = webClient;
+ mRequestFactory = requestFactory;
mDelayer = delayer;
mTranslatorFactory = translatorFactory;
}
public override string GetQueryText(Expression expression)
{
return mTranslatorFactory.Create(expression).Translate();
}
public override object Execute(Expression expression)
{
return mDelayer.Delay(delegate()
{
expression = PartialEvaluator.Eval(expression);
String uri = GetQueryText(expression);
Debug.WriteLine("Requesting " + uri);
- Stream stream = mWebClient.OpenRead(uri);
+ HttpWebRequest request = mRequestFactory.Create(uri);
+
+ HttpWebResponse response;
+
+ try
+ {
+ response = (HttpWebResponse)request.GetResponse();
+ }
+ catch (WebException ex)
+ {
+ // TODO: Wrap this exception
+ throw ex;
+ }
+
+ try
+ {
+ if (response.StatusCode == HttpStatusCode.ServiceUnavailable)
+ {
+ // Simple backoff, for now.
+ mDelayer.AdditionalDelay = BACKOFF_DELAY;
+
+ throw new Exception("Could not read " + uri);
+ }
+ // Is this too strict?
+ else if (response.StatusCode != HttpStatusCode.OK)
+ {
+ // TODO: Define an exception
+ throw new Exception("Could not read " + uri);
+ }
+ else
+ {
+ Stream stream = response.GetResponseStream();
- XDocument document = XDocument.Load(new StreamReader(stream));
+ XDocument document = XDocument.Load(new StreamReader(stream));
- var posts = from post in document.Descendants("post")
- select new Post(post.Attribute("href").Value,
- post.Attribute("hash").Value,
- post.Attribute("description").Value,
- post.Attribute("tag").Value,
- post.Attribute("extended").Value,
- post.Attribute("time").Value,
- post.Attribute("meta").Value);
+ var posts = from post in document.Descendants("post")
+ select new Post(post.Attribute("href").Value,
+ post.Attribute("hash").Value,
+ post.Attribute("description").Value,
+ post.Attribute("tag").Value,
+ post.Attribute("extended").Value,
+ post.Attribute("time").Value,
+ post.Attribute("meta").Value);
- return posts;
+ return posts;
+ }
+ }
+ finally
+ {
+ response.Close();
+ }
});
}
}
}
diff --git a/LinqToDelicious/IDelayer.cs b/LinqToDelicious/IDelayer.cs
index 797e6a3..5dbc827 100644
--- a/LinqToDelicious/IDelayer.cs
+++ b/LinqToDelicious/IDelayer.cs
@@ -1,14 +1,16 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LinqToDelicious
{
internal delegate object Callback();
internal interface IDelayer
{
+ int AdditionalDelay { get; set; }
+
object Delay(Callback callback);
}
}
diff --git a/LinqToDelicious/LinqToDelicious.csproj b/LinqToDelicious/LinqToDelicious.csproj
index 0cb2f95..ab8cc43 100644
--- a/LinqToDelicious/LinqToDelicious.csproj
+++ b/LinqToDelicious/LinqToDelicious.csproj
@@ -1,76 +1,78 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="3.5" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{C29DC32C-1DB9-4186-B707-E31E8C4C9209}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>LinqToDelicious</RootNamespace>
<AssemblyName>LinqToDelicious</AssemblyName>
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Xml.Linq">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data.DataSetExtensions">
<RequiredTargetFramework>3.5</RequiredTargetFramework>
</Reference>
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Delayer.cs" />
<Compile Include="Delicious.cs" />
<Compile Include="DeliciousQueryProvider.cs" />
+ <Compile Include="HttpWebRequestFactory.cs" />
+ <Compile Include="IHttpWebRequestFactory.cs" />
<Compile Include="IDelayer.cs" />
<Compile Include="IQueryTranslator.cs" />
<Compile Include="IQueryTranslatorFactory.cs" />
<Compile Include="Post.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="QueryTranslator.cs" />
<Compile Include="QueryTranslatorFactory.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\IQToolkit\IQToolkit.csproj">
<Project>{680B0226-FF3E-447B-BE5A-CB99698D4555}</Project>
<Name>IQToolkit</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<Content Include="queries.txt" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
\ No newline at end of file
diff --git a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
index 772928d..cfa14ab 100644
--- a/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
+++ b/LinqToDeliciousTest/DeliciousQueryProviderTest.cs
@@ -1,149 +1,156 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Text;
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Rhino.Mocks;
namespace LinqToDeliciousTest
{
/// <summary>
/// Summary description for DeliciousQueryProviderTest
/// </summary>
[TestClass]
public class DeliciousQueryProviderTest
{
delegate object CallbackDelegate(Callback callback);
private const string DOCUMENT = @"<?xml version='1.0' encoding='UTF-8'?>
<posts user='example' dt='2008-12-11T08:00:00Z' tag=''>
<post href='http://example.com/'
hash='a5a6f3d28d8dd549f3cad39fb0b34104'
description='Example domain'
tag='example domain'
time='2008-12-12T07:45:52Z'
extended='An example site.'
meta='762ee1d713648596931f798a7ba987e0'/>
<post href='http://second-example.com/'
hash='ce67c6fbe4f79a521481060e2447001b'
description='Another example domain'
tag='example domain another'
time='2008-12-12T04:04:24Z'
extended='Another example site.'
meta='fa2a46d239ad4f089c3ce7961d958b2e'/>
</posts>
<!-- fe03.api.del.ac4.yahoo.net uncompressed/chunked Sun Dec 14 20:20:32 PST 2008 -->";
private Post EXAMPLE_POST = new Post(
"http://example.com/",
"a5a6f3d28d8dd549f3cad39fb0b34104",
"Example domain",
"example domain",
"An example site.",
"2008-12-12T07:45:52Z",
"762ee1d713648596931f798a7ba987e0");
private Post ANOTHER_EXAMPLE_POST = new Post(
"http://second-example.com/",
"ce67c6fbe4f79a521481060e2447001b",
"Another example domain",
"example domain another",
"Another example site.",
"2008-12-12T04:04:24Z",
"fa2a46d239ad4f089c3ce7961d958b2e");
public DeliciousQueryProviderTest()
{
}
[TestMethod]
public void Execute()
{
MockRepository mocks = new MockRepository();
- WebClient client = mocks.StrictMock<WebClient>();
IDelayer delayer = mocks.StrictMock<IDelayer>();
- IQueryTranslatorFactory factory = mocks.StrictMock<IQueryTranslatorFactory>();
+ IQueryTranslatorFactory translatorFactory = mocks.StrictMock<IQueryTranslatorFactory>();
IQueryTranslator translator = mocks.StrictMock<IQueryTranslator>();
+ IHttpWebRequestFactory requestFactory = mocks.StrictMock<IHttpWebRequestFactory>();
+ HttpWebRequest request = mocks.StrictMock<HttpWebRequest>();
+ HttpWebResponse response = mocks.StrictMock<HttpWebResponse>();
Expression expression = Expression.Constant(new Object());
string uri = "http://www.example.com";
Byte[] documentBytes = new UTF8Encoding().GetBytes(DOCUMENT);
Stream stream = new MemoryStream();
stream.Write(documentBytes, 0, documentBytes.Length);
stream.Seek(0, SeekOrigin.Begin);
- DeliciousQueryProvider provider = new DeliciousQueryProvider(client, delayer, factory);
+ DeliciousQueryProvider provider = new DeliciousQueryProvider(requestFactory, delayer, translatorFactory);
// Set up the mocked call to Delay to actually execute the callback
Expect.Call(delayer.Delay(null)).IgnoreArguments().Do((CallbackDelegate)delegate(Callback callback)
{
return callback();
});
- Expect.Call(factory.Create(expression)).Return(translator);
+ Expect.Call(translatorFactory.Create(expression)).Return(translator);
+ Expect.Call(requestFactory.Create(uri)).Return(request);
Expect.Call(translator.Translate()).Return(uri);
- Expect.Call(client.OpenRead(uri)).Return(stream);
+ Expect.Call(request.GetResponse()).Return(response);
+ Expect.Call(response.GetResponseStream()).Return(stream);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
+ Expect.Call(response.StatusCode).Return(HttpStatusCode.OK);
+ Expect.Call(delegate { response.Close(); });
mocks.ReplayAll();
object result = provider.Execute(expression);
Assert.IsInstanceOfType(result, typeof(IEnumerable<Post>));
IEnumerable<Post> posts = (IEnumerable<Post>)result;
Assert.IsTrue(posts.Contains(EXAMPLE_POST), "Missing post for example.com.");
Assert.IsTrue(posts.Contains(ANOTHER_EXAMPLE_POST), "Missing post for another-example.com.");
mocks.VerifyAll();
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
}
}
diff --git a/tasty.sln b/tasty.sln
index c9191bc..123c028 100644
--- a/tasty.sln
+++ b/tasty.sln
@@ -1,53 +1,54 @@

Microsoft Visual Studio Solution File, Format Version 10.00
# Visual Studio 2008
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "tasty", "tasty\tasty.csproj", "{69B0D2E5-8BC8-4747-9225-7F7450153B86}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{A3A51F38-2E9A-4573-BC1D-C76771C569E9}"
ProjectSection(SolutionItems) = preProject
LocalTestRun.testrunconfig = LocalTestRun.testrunconfig
tasty.vsmdi = tasty.vsmdi
tasty1.vsmdi = tasty1.vsmdi
tasty2.vsmdi = tasty2.vsmdi
+ tasty4.vsmdi = tasty4.vsmdi
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDelicious", "LinqToDelicious\LinqToDelicious.csproj", "{C29DC32C-1DB9-4186-B707-E31E8C4C9209}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinqToDeliciousTest", "LinqToDeliciousTest\LinqToDeliciousTest.csproj", "{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IQToolkit", "IQToolkit\IQToolkit.csproj", "{680B0226-FF3E-447B-BE5A-CB99698D4555}"
EndProject
Global
GlobalSection(SubversionScc) = preSolution
Svn-Managed = True
Manager = AnkhSVN - Subversion Support for Visual Studio
EndGlobalSection
GlobalSection(TestCaseManagementSettings) = postSolution
- CategoryFile = tasty2.vsmdi
+ CategoryFile = tasty4.vsmdi
EndGlobalSection
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Debug|Any CPU.Build.0 = Debug|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.ActiveCfg = Release|Any CPU
{69B0D2E5-8BC8-4747-9225-7F7450153B86}.Release|Any CPU.Build.0 = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C29DC32C-1DB9-4186-B707-E31E8C4C9209}.Release|Any CPU.Build.0 = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{600738E5-AEAD-4C7C-BD53-F91A82E4C0FE}.Release|Any CPU.Build.0 = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Debug|Any CPU.Build.0 = Debug|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.ActiveCfg = Release|Any CPU
{680B0226-FF3E-447B-BE5A-CB99698D4555}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
diff --git a/tasty/Window1.xaml.cs b/tasty/Window1.xaml.cs
index e011f6d..a85dea9 100644
--- a/tasty/Window1.xaml.cs
+++ b/tasty/Window1.xaml.cs
@@ -1,39 +1,39 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using LinqToDelicious;
using System.Net;
using System.Diagnostics;
namespace tasty
{
/// <summary>
/// Interaction logic for Window1.xaml
/// </summary>
public partial class Window1 : Window
{
public Window1()
{
InitializeComponent();
- var query = from post in new Delicious(new WebClient()).Posts
+ var query = from post in new Delicious().Posts
where post.Date == new DateTime(2008, 1, 1)
select post;
foreach (var p in query)
{
Debug.WriteLine(p);
}
}
}
}
|
maravillas/linq-to-delicious
|
21177d2db8d35b151709afbe29e9d6f7d99ca2c8
|
Rename CreateRequest to Create
|
diff --git a/LinqToDelicious/HttpWebRequestFactory.cs b/LinqToDelicious/HttpWebRequestFactory.cs
index 9035420..3ab14b9 100644
--- a/LinqToDelicious/HttpWebRequestFactory.cs
+++ b/LinqToDelicious/HttpWebRequestFactory.cs
@@ -1,16 +1,16 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
namespace LinqToDelicious
{
class HttpWebRequestFactory : IHttpWebRequestFactory
{
- public HttpWebRequest CreateRequest(string uri)
+ public HttpWebRequest Create(string uri)
{
return (HttpWebRequest)WebRequest.Create(uri);
}
}
}
diff --git a/LinqToDelicious/IHttpWebRequestFactory.cs b/LinqToDelicious/IHttpWebRequestFactory.cs
index 2a8beaa..ab20b65 100644
--- a/LinqToDelicious/IHttpWebRequestFactory.cs
+++ b/LinqToDelicious/IHttpWebRequestFactory.cs
@@ -1,13 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
namespace LinqToDelicious
{
interface IHttpWebRequestFactory
{
- HttpWebRequest CreateRequest(string uri);
+ HttpWebRequest Create(string uri);
}
}
|
maravillas/linq-to-delicious
|
420432f0136ca357ae733375e2c2c0d18477d7a0
|
Add HttpWebRequestFactory as a wrapper to WebRequest#Create()
|
diff --git a/LinqToDelicious/HttpWebRequestFactory.cs b/LinqToDelicious/HttpWebRequestFactory.cs
new file mode 100644
index 0000000..9035420
--- /dev/null
+++ b/LinqToDelicious/HttpWebRequestFactory.cs
@@ -0,0 +1,16 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Net;
+
+namespace LinqToDelicious
+{
+ class HttpWebRequestFactory : IHttpWebRequestFactory
+ {
+ public HttpWebRequest CreateRequest(string uri)
+ {
+ return (HttpWebRequest)WebRequest.Create(uri);
+ }
+ }
+}
diff --git a/LinqToDelicious/IHttpWebRequestFactory.cs b/LinqToDelicious/IHttpWebRequestFactory.cs
new file mode 100644
index 0000000..2a8beaa
--- /dev/null
+++ b/LinqToDelicious/IHttpWebRequestFactory.cs
@@ -0,0 +1,13 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Net;
+
+namespace LinqToDelicious
+{
+ interface IHttpWebRequestFactory
+ {
+ HttpWebRequest CreateRequest(string uri);
+ }
+}
|
maravillas/linq-to-delicious
|
3b338b8b5b8cc7925faca622b4543a95f8b90533
|
Add UnsupportedDateComparison test
|
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index 09c0cfb..ea4f06f 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,304 +1,319 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mMocks = new MockRepository();
private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date >= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
Expression tagClause = BuildTagClause(mParam, "example");
string url = TranslateQuery(mParam, tagClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateGreaterThan()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateEquals()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for an NotSupportedException when trying to OR in the where clause.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") || post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
[ExpectedException(typeof(NotSupportedException))]
public void UnsupportedWhereClause()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.Or(tagClause, dateClause);
TranslateQuery(mParam, tagAndDateClauses);
}
+ /// <summary>
+ /// A test for an NotSupportedException when trying to compare dates with !=.
+ ///
+ /// Query(LinqToDelicious.Post).Where(post => (post.Date != 1/1/2008 12:00:00 AM))
+ ///</summary>
+ [TestMethod()]
+ [DeploymentItem("LinqToDelicious.dll")]
+ [ExpectedException(typeof(NotSupportedException))]
+ public void UnsupportedDateComparison()
+ {
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.NotEqual(left, right));
+
+ TranslateQuery(mParam, dateClause);
+ }
+
private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
{
// post.Tags
MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
ConstantExpression tagExpression = Expression.Constant(tag);
// post.Tags.Contains("Example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tagExpression);
return containsCall;
}
private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// post.Date
Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
return comparison;
}
private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
{
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
d9c700fd963a93bbacc77c3e5620513331143334
|
Remove outdated, commented code
|
diff --git a/LinqToDelicious/QueryTranslator.cs b/LinqToDelicious/QueryTranslator.cs
index d3154e6..d36cba2 100644
--- a/LinqToDelicious/QueryTranslator.cs
+++ b/LinqToDelicious/QueryTranslator.cs
@@ -1,220 +1,203 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Linq.Expressions;
using System.Diagnostics;
using IQ;
namespace LinqToDelicious
{
class QueryTranslator : ExpressionVisitor, LinqToDelicious.IQueryTranslator
{
public Expression Expression { get; private set; }
private StringBuilder mBuilder;
private Stack<Object> mStack;
private const String TAG_TOKEN = "tags";
private const String DATE_TOKEN = "date";
public QueryTranslator(Expression expression)
{
Expression = expression;
mStack = new Stack<Object>();
}
public String Translate()
{
if (mBuilder == null)
{
mBuilder = new StringBuilder("http://www.example.com/delicious.xml?");
Visit(Expression);
}
return mBuilder.ToString();
}
private static Expression StripQuotes(Expression expression)
{
while (expression.NodeType == ExpressionType.Quote)
{
expression = ((UnaryExpression)expression).Operand;
}
return expression;
}
protected override Expression VisitMethodCall(MethodCallExpression methodCall)
{
Debug.WriteLine("Visiting method " + methodCall);
if (methodCall.Method.DeclaringType == typeof(Queryable) && methodCall.Method.Name == "Where")
{
//mBuilder.Append(string.Format("Where {0}, {1}", methodCall.Arguments[0], methodCall.Arguments[1]));
Debug.WriteLine("Type: " + ((ConstantExpression)methodCall.Arguments[0]).Value);
LambdaExpression lambda = (LambdaExpression)StripQuotes(methodCall.Arguments[1]);
Visit(lambda.Body);
return methodCall;
}
else if (methodCall.Method.Name == "Contains")
{
- /*
- if (token.Equals(TAG_TOKEN))
- {
- if (binaryExpression.NodeType == ExpressionType.Equal)
- {
- mBuilder.Append("&tag=");
-
- Visit(binaryExpression.Right);
-
- mBuilder.Append(mStack.Pop());
- }
- else
- {
- throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for tag comparisons", binaryExpression.NodeType));
- }
- }
- */
Visit(methodCall.Object);
String token = (String)mStack.Pop();
if (token.Equals(TAG_TOKEN) &&
methodCall.Method.DeclaringType == typeof(List<string>))
{
// Would it be reasonable to assume these conditions are true?
if (methodCall.Arguments.Count == 1 &&
methodCall.Arguments[0].NodeType == ExpressionType.Constant)
{
mBuilder.Append("&tag=");
Visit(methodCall.Arguments[0]);
mBuilder.Append(mStack.Pop());
}
else
{
throw new Exception("Missing or invalid argument to method Contains");
}
}
return methodCall;
}
// Where Query(LinqToDelicious.Post), post => (post.Date > new DateTime(2008, 1, 1))
throw new NotSupportedException(string.Format("The method '{0}' is not supported", methodCall.Method.Name));
}
protected override Expression VisitUnary(UnaryExpression u)
{
Debug.WriteLine("Visiting unary expression " + u);
return u;
}
protected override Expression VisitBinary(BinaryExpression binaryExpression)
{
Debug.WriteLine("Visiting binary expression " + binaryExpression);
if (binaryExpression.NodeType == ExpressionType.And)
{
Visit(binaryExpression.Left);
Visit(binaryExpression.Right);
}
else if (binaryExpression.NodeType == ExpressionType.Equal ||
binaryExpression.NodeType == ExpressionType.LessThan ||
binaryExpression.NodeType == ExpressionType.LessThanOrEqual ||
binaryExpression.NodeType == ExpressionType.GreaterThan ||
binaryExpression.NodeType == ExpressionType.GreaterThanOrEqual)
{
Visit(binaryExpression.Left);
Debug.Assert(mStack.Peek().GetType() == typeof(String), "Expected String on the stack, was " + mStack.Peek().GetType());
String token = (String)mStack.Pop();
if (token.Equals(DATE_TOKEN))
{
Visit(binaryExpression.Right);
Debug.Assert(mStack.Peek().GetType() == typeof(DateTime), "Expected DateTime on the stack, was " + mStack.Peek().GetType());
DateTime date = (DateTime)mStack.Pop();
switch (binaryExpression.NodeType)
{
case ExpressionType.Equal:
mBuilder.Append(String.Format("&fromdt={0}&todt={0}", date));
break;
case ExpressionType.LessThan:
mBuilder.Append(String.Format("&todt={0}", date));
break;
case ExpressionType.LessThanOrEqual:
date = date.AddDays(1);
mBuilder.Append(String.Format("&todt={0}", date));
break;
case ExpressionType.GreaterThan:
mBuilder.Append(String.Format("&fromdt={0}", date));
break;
case ExpressionType.GreaterThanOrEqual:
date = date.AddDays(-1);
mBuilder.Append(String.Format("&fromdt={0}", date));
break;
default:
throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for date comparisons", binaryExpression.NodeType));
}
}
}
else
{
throw new NotSupportedException(string.Format("The operator '{0}' is not supported", binaryExpression.NodeType));
}
return binaryExpression;
}
protected override Expression VisitConstant(ConstantExpression constant)
{
Debug.WriteLine("Visiting constant " + constant);
mStack.Push(constant.Value);
return constant;
}
protected override Expression VisitMemberAccess(MemberExpression member)
{
Debug.WriteLine("Visiting member " + member);
if (member.Expression != null &&
member.Expression.NodeType == ExpressionType.Parameter)
{
mStack.Push(member.Member.Name.ToLower());
return member;
}
throw new NotSupportedException(string.Format("The member '{0}' is not supported", member.Member.Name));
}
}
}
|
maravillas/linq-to-delicious
|
9b092ecf7558ae10b20444fd115578fd229dd9c8
|
Add UnsupportedWhereClause test
|
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index 172f3b8..09c0cfb 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,287 +1,304 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mMocks = new MockRepository();
private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date >= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
Expression tagClause = BuildTagClause(mParam, "example");
string url = TranslateQuery(mParam, tagClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateGreaterThan()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date and tag clauses together.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTagsAndDateEquals()
{
Expression tagClause = BuildTagClause(mParam, "example");
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
string url = TranslateQuery(mParam, tagAndDateClauses);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
+ /// <summary>
+ /// A test for an NotSupportedException when trying to OR in the where clause.
+ ///
+ /// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") || post.Date = 1/1/2008 12:00:00 AM))
+ ///</summary>
+ [TestMethod()]
+ [DeploymentItem("LinqToDelicious.dll")]
+ [ExpectedException(typeof(NotSupportedException))]
+ public void UnsupportedWhereClause()
+ {
+ Expression tagClause = BuildTagClause(mParam, "example");
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
+ Expression tagAndDateClauses = Expression.Or(tagClause, dateClause);
+
+ TranslateQuery(mParam, tagAndDateClauses);
+ }
+
private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
{
// post.Tags
MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
ConstantExpression tagExpression = Expression.Constant(tag);
// post.Tags.Contains("Example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tagExpression);
return containsCall;
}
private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// post.Date
Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
return comparison;
}
private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
{
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
1599136ca1d23a3767ca3b18dd16dae2cb9b21a9
|
Fix handling of ANDed where clauses
|
diff --git a/LinqToDelicious/QueryTranslator.cs b/LinqToDelicious/QueryTranslator.cs
index 0a3823e..d3154e6 100644
--- a/LinqToDelicious/QueryTranslator.cs
+++ b/LinqToDelicious/QueryTranslator.cs
@@ -1,206 +1,220 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Linq.Expressions;
using System.Diagnostics;
using IQ;
namespace LinqToDelicious
{
class QueryTranslator : ExpressionVisitor, LinqToDelicious.IQueryTranslator
{
public Expression Expression { get; private set; }
private StringBuilder mBuilder;
private Stack<Object> mStack;
private const String TAG_TOKEN = "tags";
private const String DATE_TOKEN = "date";
public QueryTranslator(Expression expression)
{
Expression = expression;
mStack = new Stack<Object>();
}
public String Translate()
{
if (mBuilder == null)
{
mBuilder = new StringBuilder("http://www.example.com/delicious.xml?");
Visit(Expression);
}
return mBuilder.ToString();
}
private static Expression StripQuotes(Expression expression)
{
while (expression.NodeType == ExpressionType.Quote)
{
expression = ((UnaryExpression)expression).Operand;
}
return expression;
}
protected override Expression VisitMethodCall(MethodCallExpression methodCall)
{
Debug.WriteLine("Visiting method " + methodCall);
if (methodCall.Method.DeclaringType == typeof(Queryable) && methodCall.Method.Name == "Where")
{
//mBuilder.Append(string.Format("Where {0}, {1}", methodCall.Arguments[0], methodCall.Arguments[1]));
Debug.WriteLine("Type: " + ((ConstantExpression)methodCall.Arguments[0]).Value);
LambdaExpression lambda = (LambdaExpression)StripQuotes(methodCall.Arguments[1]);
Visit(lambda.Body);
return methodCall;
}
else if (methodCall.Method.Name == "Contains")
{
/*
if (token.Equals(TAG_TOKEN))
{
if (binaryExpression.NodeType == ExpressionType.Equal)
{
mBuilder.Append("&tag=");
Visit(binaryExpression.Right);
mBuilder.Append(mStack.Pop());
}
else
{
throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for tag comparisons", binaryExpression.NodeType));
}
}
*/
Visit(methodCall.Object);
String token = (String)mStack.Pop();
if (token.Equals(TAG_TOKEN) &&
methodCall.Method.DeclaringType == typeof(List<string>))
{
// Would it be reasonable to assume these conditions are true?
if (methodCall.Arguments.Count == 1 &&
methodCall.Arguments[0].NodeType == ExpressionType.Constant)
{
mBuilder.Append("&tag=");
Visit(methodCall.Arguments[0]);
mBuilder.Append(mStack.Pop());
}
else
{
throw new Exception("Missing or invalid argument to method Contains");
}
}
return methodCall;
}
// Where Query(LinqToDelicious.Post), post => (post.Date > new DateTime(2008, 1, 1))
throw new NotSupportedException(string.Format("The method '{0}' is not supported", methodCall.Method.Name));
}
protected override Expression VisitUnary(UnaryExpression u)
{
Debug.WriteLine("Visiting unary expression " + u);
return u;
}
protected override Expression VisitBinary(BinaryExpression binaryExpression)
{
Debug.WriteLine("Visiting binary expression " + binaryExpression);
- Visit(binaryExpression.Left);
-
- Debug.Assert(mStack.Peek().GetType() == typeof(String), "Expected String on the stack, was " + mStack.Peek().GetType());
-
- String token = (String)mStack.Pop();
-
- if (token.Equals(DATE_TOKEN))
+ if (binaryExpression.NodeType == ExpressionType.And)
{
+ Visit(binaryExpression.Left);
Visit(binaryExpression.Right);
+ }
+ else if (binaryExpression.NodeType == ExpressionType.Equal ||
+ binaryExpression.NodeType == ExpressionType.LessThan ||
+ binaryExpression.NodeType == ExpressionType.LessThanOrEqual ||
+ binaryExpression.NodeType == ExpressionType.GreaterThan ||
+ binaryExpression.NodeType == ExpressionType.GreaterThanOrEqual)
+ {
+ Visit(binaryExpression.Left);
- Debug.Assert(mStack.Peek().GetType() == typeof(DateTime), "Expected DateTime on the stack, was " + mStack.Peek().GetType());
+ Debug.Assert(mStack.Peek().GetType() == typeof(String), "Expected String on the stack, was " + mStack.Peek().GetType());
- DateTime date = (DateTime)mStack.Pop();
+ String token = (String)mStack.Pop();
- switch (binaryExpression.NodeType)
+ if (token.Equals(DATE_TOKEN))
{
- case ExpressionType.Equal:
- mBuilder.Append(String.Format("&fromdt={0}&todt={0}", date));
+ Visit(binaryExpression.Right);
+
+ Debug.Assert(mStack.Peek().GetType() == typeof(DateTime), "Expected DateTime on the stack, was " + mStack.Peek().GetType());
- break;
+ DateTime date = (DateTime)mStack.Pop();
- case ExpressionType.LessThan:
- mBuilder.Append(String.Format("&todt={0}", date));
+ switch (binaryExpression.NodeType)
+ {
+ case ExpressionType.Equal:
+ mBuilder.Append(String.Format("&fromdt={0}&todt={0}", date));
+
+ break;
- break;
+ case ExpressionType.LessThan:
+ mBuilder.Append(String.Format("&todt={0}", date));
- case ExpressionType.LessThanOrEqual:
- date = date.AddDays(1);
+ break;
- mBuilder.Append(String.Format("&todt={0}", date));
+ case ExpressionType.LessThanOrEqual:
+ date = date.AddDays(1);
- break;
+ mBuilder.Append(String.Format("&todt={0}", date));
- case ExpressionType.GreaterThan:
- mBuilder.Append(String.Format("&fromdt={0}", date));
+ break;
- break;
+ case ExpressionType.GreaterThan:
+ mBuilder.Append(String.Format("&fromdt={0}", date));
- case ExpressionType.GreaterThanOrEqual:
- date = date.AddDays(-1);
+ break;
- mBuilder.Append(String.Format("&fromdt={0}", date));
+ case ExpressionType.GreaterThanOrEqual:
+ date = date.AddDays(-1);
- break;
+ mBuilder.Append(String.Format("&fromdt={0}", date));
- default:
- throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for date comparisons", binaryExpression.NodeType));
+ break;
+
+ default:
+ throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for date comparisons", binaryExpression.NodeType));
+ }
}
}
+ else
+ {
+ throw new NotSupportedException(string.Format("The operator '{0}' is not supported", binaryExpression.NodeType));
+ }
return binaryExpression;
}
protected override Expression VisitConstant(ConstantExpression constant)
{
Debug.WriteLine("Visiting constant " + constant);
mStack.Push(constant.Value);
return constant;
}
protected override Expression VisitMemberAccess(MemberExpression member)
{
Debug.WriteLine("Visiting member " + member);
if (member.Expression != null &&
member.Expression.NodeType == ExpressionType.Parameter)
{
- Debug.WriteLine("Pushing \"" + member.Member.Name.ToLower() + "\"");
-
mStack.Push(member.Member.Name.ToLower());
return member;
}
throw new NotSupportedException(string.Format("The member '{0}' is not supported", member.Member.Name));
}
}
}
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index c97846f..172f3b8 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,242 +1,287 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mMocks = new MockRepository();
private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
- /// Query(LinqToDelicious.Post).Where(post => (post.Date <e;= 1/1/2008 12:00:00 AM))
+ /// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
- /// Query(LinqToDelicious.Post).Where(post => (post.Date >e;= 1/1/2008 12:00:00 AM))
+ /// Query(LinqToDelicious.Post).Where(post => (post.Date >= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
Expression tagClause = BuildTagClause(mParam, "example");
string url = TranslateQuery(mParam, tagClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
+ /// <summary>
+ /// A test for date and tag clauses together.
+ ///
+ /// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date > 1/1/2008 12:00:00 AM))
+ ///</summary>
+ [TestMethod()]
+ [DeploymentItem("LinqToDelicious.dll")]
+ public void WhereTagsAndDateGreaterThan()
+ {
+ Expression tagClause = BuildTagClause(mParam, "example");
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
+ Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
+
+ string url = TranslateQuery(mParam, tagAndDateClauses);
+
+ Debug.WriteLine("url: " + url);
+
+ Assert.IsTrue(url.Contains("tag=example"));
+ Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
+ Assert.IsFalse(url.Contains("todt="));
+ }
+
+ /// <summary>
+ /// A test for date and tag clauses together.
+ ///
+ /// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example") && post.Date = 1/1/2008 12:00:00 AM))
+ ///</summary>
+ [TestMethod()]
+ [DeploymentItem("LinqToDelicious.dll")]
+ public void WhereTagsAndDateEquals()
+ {
+ Expression tagClause = BuildTagClause(mParam, "example");
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
+ Expression tagAndDateClauses = Expression.And(tagClause, dateClause);
+
+ string url = TranslateQuery(mParam, tagAndDateClauses);
+
+ Debug.WriteLine("url: " + url);
+
+ Assert.IsTrue(url.Contains("tag=example"));
+
+ Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
+ Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
+ }
+
private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
{
// post.Tags
MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
ConstantExpression tagExpression = Expression.Constant(tag);
// post.Tags.Contains("Example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tagExpression);
return containsCall;
}
private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// post.Date
Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
return comparison;
}
private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
{
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
4135839d5403ed5959bcded1fc10de550a8ea9f3
|
Refactor QueryTranslator tests
|
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index 708b83f..c97846f 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,256 +1,242 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
- private MockRepository mocks = new MockRepository();
+ private MockRepository mMocks = new MockRepository();
+ private ParameterExpression mParam = Expression.Parameter(typeof(Post), "post");
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
- string url = TranslateComparison((left, right) => Expression.Equal(left, right));
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.Equal(left, right));
+ string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
- string url = TranslateComparison((left, right) => Expression.LessThan(left, right));
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThan(left, right));
+ string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <e;= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
- string url = TranslateComparison((left, right) => Expression.LessThanOrEqual(left, right));
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.LessThanOrEqual(left, right));
+ string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
- string url = TranslateComparison((left, right) => Expression.GreaterThan(left, right));
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThan(left, right));
+ string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date greater than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date >e;= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
- string url = TranslateComparison((left, right) => Expression.GreaterThanOrEqual(left, right));
+ Expression dateClause = BuildDateClause(mParam, new DateTime(2008, 1, 1), (left, right) => Expression.GreaterThanOrEqual(left, right));
+ string url = TranslateQuery(mParam, dateClause);
Debug.WriteLine("url: " + url);
-
+
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
- String tagName = "example";
-
- // from post in queryable
- // where post.Tags.Contains("example")
+ Expression tagClause = BuildTagClause(mParam, "example");
+ string url = TranslateQuery(mParam, tagClause);
- // i.e. queryable.Where(post => post.Tags.Contains("Example"))
-
- // TODO: This should probably be mocked instead of using a meaningless array.
- // See the TODO below - it probably relates.
- IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
+ Debug.WriteLine("url: " + url);
- // param represents the parameter to the lambda expression
- ParameterExpression param = Expression.Parameter(typeof(Post), "post");
+ Assert.IsTrue(url.Contains("tag=example"));
+ }
+ private Expression BuildTagClause(ParameterExpression lambdaParameter, String tag)
+ {
// post.Tags
- MemberExpression member = Expression.Property(param, "Tags");
+ MemberExpression member = Expression.Property(lambdaParameter, "Tags");
// "example"
- ConstantExpression tag = Expression.Constant(tagName);
+ ConstantExpression tagExpression = Expression.Constant(tag);
// post.Tags.Contains("Example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
- tag);
-
- // queryable.Where( ... )
- MethodCallExpression whereCall = Expression.Call(
- typeof(Queryable),
- "Where",
- new Type[] { typeof(Post) },
- queryable.Expression,
- Expression.Lambda<Func<Post, bool>>(containsCall, new ParameterExpression[] { param }));
-
- // TODO: Double check that queryable.Expression gives us what we want above
- // Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
- // This argument isn't used in the translator at the moment.
-
- string url = new QueryTranslator(whereCall).Translate();
-
- Debug.WriteLine("url: " + url);
+ tagExpression);
- Assert.IsTrue(url.Contains("tag=example"));
+ return containsCall;
}
- private string TranslateComparison(Func<Expression, Expression, Expression> comparator)
+ private Expression BuildDateClause(ParameterExpression lambdaParameter, DateTime date, Func<Expression, Expression, Expression> comparator)
{
- DateTime comparedDate = new DateTime(2008, 1, 1);
-
- // from post in queryable
- // where post.Date == <evaluated comparedDate>
-
- // i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
-
- // TODO: This should probably be mocked instead of using a meaningless array.
- // See the TODO below - it probably relates.
- IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
-
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
- // param represents the parameter to the lambda expression
- ParameterExpression param = Expression.Parameter(typeof(Post), "post");
-
// post.Date
- Expression left = Expression.Property(param, "Date");
+ Expression left = Expression.Property(lambdaParameter, "Date");
// <evaluated comparedDate>
- Expression right = Expression.Constant(comparedDate);
+ Expression right = Expression.Constant(date);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
+ return comparison;
+ }
+
+ private string TranslateQuery(ParameterExpression lambdaParameter, Expression clauses)
+ {
+ // from post in queryable
+ // where post.Date == <evaluated comparedDate>
+
+ // i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
+
+ // TODO: This should probably be mocked instead of using a meaningless array.
+ // See the TODO below - it probably relates.
+ IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
+
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
- Expression.Lambda<Func<Post, bool>>(comparison, new ParameterExpression[] { param }));
+ Expression.Lambda<Func<Post, bool>>(clauses, new ParameterExpression[] { lambdaParameter }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
016079e7d1a7f4af3d2c1fa3714157d5689e6411
|
Fix some test comments
|
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index 052dde5..708b83f 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,256 +1,256 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mocks = new MockRepository();
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
string url = TranslateComparison((left, right) => Expression.Equal(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
string url = TranslateComparison((left, right) => Expression.LessThan(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
- /// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
+ /// Query(LinqToDelicious.Post).Where(post => (post.Date <e;= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
string url = TranslateComparison((left, right) => Expression.LessThanOrEqual(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
string url = TranslateComparison((left, right) => Expression.GreaterThan(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
- /// A test for date less than/equal clauses.
+ /// A test for date greater than/equal clauses.
///
- /// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
+ /// Query(LinqToDelicious.Post).Where(post => (post.Date >e;= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
string url = TranslateComparison((left, right) => Expression.GreaterThanOrEqual(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for tag clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereTags()
{
String tagName = "example";
// from post in queryable
// where post.Tags.Contains("example")
// i.e. queryable.Where(post => post.Tags.Contains("Example"))
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
// param represents the parameter to the lambda expression
ParameterExpression param = Expression.Parameter(typeof(Post), "post");
// post.Tags
MemberExpression member = Expression.Property(param, "Tags");
// "example"
ConstantExpression tag = Expression.Constant(tagName);
// post.Tags.Contains("Example")
MethodCallExpression containsCall = Expression.Call(
member,
typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
tag);
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(containsCall, new ParameterExpression[] { param }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
string url = new QueryTranslator(whereCall).Translate();
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("tag=example"));
}
private string TranslateComparison(Func<Expression, Expression, Expression> comparator)
{
DateTime comparedDate = new DateTime(2008, 1, 1);
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// param represents the parameter to the lambda expression
ParameterExpression param = Expression.Parameter(typeof(Post), "post");
// post.Date
Expression left = Expression.Property(param, "Date");
// <evaluated comparedDate>
Expression right = Expression.Constant(comparedDate);
// post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(comparison, new ParameterExpression[] { param }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
9aa45eae6918a09b875f4e8f869dcae680753122
|
Add support to queries for Tags.Contains()
|
diff --git a/LinqToDelicious/QueryTranslator.cs b/LinqToDelicious/QueryTranslator.cs
index 0024be4..0a3823e 100644
--- a/LinqToDelicious/QueryTranslator.cs
+++ b/LinqToDelicious/QueryTranslator.cs
@@ -1,178 +1,206 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Linq.Expressions;
using System.Diagnostics;
using IQ;
namespace LinqToDelicious
{
class QueryTranslator : ExpressionVisitor, LinqToDelicious.IQueryTranslator
{
public Expression Expression { get; private set; }
private StringBuilder mBuilder;
private Stack<Object> mStack;
- private const String TAG_TOKEN = "tag";
+ private const String TAG_TOKEN = "tags";
private const String DATE_TOKEN = "date";
public QueryTranslator(Expression expression)
{
Expression = expression;
mStack = new Stack<Object>();
}
public String Translate()
{
if (mBuilder == null)
{
mBuilder = new StringBuilder("http://www.example.com/delicious.xml?");
Visit(Expression);
}
return mBuilder.ToString();
}
private static Expression StripQuotes(Expression expression)
{
while (expression.NodeType == ExpressionType.Quote)
{
expression = ((UnaryExpression)expression).Operand;
}
return expression;
}
protected override Expression VisitMethodCall(MethodCallExpression methodCall)
{
Debug.WriteLine("Visiting method " + methodCall);
if (methodCall.Method.DeclaringType == typeof(Queryable) && methodCall.Method.Name == "Where")
{
//mBuilder.Append(string.Format("Where {0}, {1}", methodCall.Arguments[0], methodCall.Arguments[1]));
Debug.WriteLine("Type: " + ((ConstantExpression)methodCall.Arguments[0]).Value);
LambdaExpression lambda = (LambdaExpression)StripQuotes(methodCall.Arguments[1]);
Visit(lambda.Body);
return methodCall;
}
+ else if (methodCall.Method.Name == "Contains")
+ {
+ /*
+ if (token.Equals(TAG_TOKEN))
+ {
+ if (binaryExpression.NodeType == ExpressionType.Equal)
+ {
+ mBuilder.Append("&tag=");
+
+ Visit(binaryExpression.Right);
+
+ mBuilder.Append(mStack.Pop());
+ }
+ else
+ {
+ throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for tag comparisons", binaryExpression.NodeType));
+ }
+ }
+ */
+ Visit(methodCall.Object);
+ String token = (String)mStack.Pop();
+
+ if (token.Equals(TAG_TOKEN) &&
+ methodCall.Method.DeclaringType == typeof(List<string>))
+ {
+ // Would it be reasonable to assume these conditions are true?
+ if (methodCall.Arguments.Count == 1 &&
+ methodCall.Arguments[0].NodeType == ExpressionType.Constant)
+ {
+ mBuilder.Append("&tag=");
+
+ Visit(methodCall.Arguments[0]);
+
+ mBuilder.Append(mStack.Pop());
+ }
+ else
+ {
+ throw new Exception("Missing or invalid argument to method Contains");
+ }
+ }
+
+ return methodCall;
+ }
+
// Where Query(LinqToDelicious.Post), post => (post.Date > new DateTime(2008, 1, 1))
throw new NotSupportedException(string.Format("The method '{0}' is not supported", methodCall.Method.Name));
}
protected override Expression VisitUnary(UnaryExpression u)
{
Debug.WriteLine("Visiting unary expression " + u);
return u;
}
protected override Expression VisitBinary(BinaryExpression binaryExpression)
{
Debug.WriteLine("Visiting binary expression " + binaryExpression);
Visit(binaryExpression.Left);
Debug.Assert(mStack.Peek().GetType() == typeof(String), "Expected String on the stack, was " + mStack.Peek().GetType());
String token = (String)mStack.Pop();
- if (token.Equals(TAG_TOKEN))
- {
- if (binaryExpression.NodeType == ExpressionType.Equal)
- {
- mBuilder.Append("&tag=");
-
- Visit(binaryExpression.Right);
-
- mBuilder.Append(mStack.Pop());
- }
- else
- {
- throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for tag comparisons", binaryExpression.NodeType));
- }
- }
-
- else if (token.Equals(DATE_TOKEN))
+ if (token.Equals(DATE_TOKEN))
{
Visit(binaryExpression.Right);
Debug.Assert(mStack.Peek().GetType() == typeof(DateTime), "Expected DateTime on the stack, was " + mStack.Peek().GetType());
DateTime date = (DateTime)mStack.Pop();
switch (binaryExpression.NodeType)
{
case ExpressionType.Equal:
mBuilder.Append(String.Format("&fromdt={0}&todt={0}", date));
break;
case ExpressionType.LessThan:
mBuilder.Append(String.Format("&todt={0}", date));
break;
case ExpressionType.LessThanOrEqual:
date = date.AddDays(1);
mBuilder.Append(String.Format("&todt={0}", date));
break;
case ExpressionType.GreaterThan:
mBuilder.Append(String.Format("&fromdt={0}", date));
break;
case ExpressionType.GreaterThanOrEqual:
date = date.AddDays(-1);
mBuilder.Append(String.Format("&fromdt={0}", date));
break;
default:
throw new NotSupportedException(string.Format("The binary operator '{0}' is not supported for date comparisons", binaryExpression.NodeType));
}
}
return binaryExpression;
}
protected override Expression VisitConstant(ConstantExpression constant)
{
Debug.WriteLine("Visiting constant " + constant);
mStack.Push(constant.Value);
return constant;
}
protected override Expression VisitMemberAccess(MemberExpression member)
{
Debug.WriteLine("Visiting member " + member);
if (member.Expression != null &&
member.Expression.NodeType == ExpressionType.Parameter)
{
Debug.WriteLine("Pushing \"" + member.Member.Name.ToLower() + "\"");
mStack.Push(member.Member.Name.ToLower());
return member;
}
throw new NotSupportedException(string.Format("The member '{0}' is not supported", member.Member.Name));
}
}
}
diff --git a/LinqToDeliciousTest/QueryTranslatorTest.cs b/LinqToDeliciousTest/QueryTranslatorTest.cs
index e7bfbd1..052dde5 100644
--- a/LinqToDeliciousTest/QueryTranslatorTest.cs
+++ b/LinqToDeliciousTest/QueryTranslatorTest.cs
@@ -1,197 +1,256 @@
using LinqToDelicious;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Linq.Expressions;
using IQ;
using Rhino.Mocks;
using System.Linq;
using System;
using System.Diagnostics;
+using System.Collections.Generic;
namespace LinqToDeliciousTest
{
/// <summary>
///This is a test class for QueryTranslatorTest and is intended
///to contain all QueryTranslatorTest Unit Tests
///</summary>
[TestClass()]
public class QueryTranslatorTest
{
private MockRepository mocks = new MockRepository();
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
/// A test for date equality clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date = 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateEqual()
{
string url = TranslateComparison((left, right) => Expression.Equal(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
}
/// <summary>
/// A test for date less than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date < 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThan()
{
string url = TranslateComparison((left, right) => Expression.LessThan(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateLessThanOrEqual()
{
string url = TranslateComparison((left, right) => Expression.LessThanOrEqual(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("todt=1/2/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("fromdt="));
}
/// <summary>
/// A test for date greater than clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date > 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterThan()
{
string url = TranslateComparison((left, right) => Expression.GreaterThan(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=1/1/2008 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
/// <summary>
/// A test for date less than/equal clauses.
///
/// Query(LinqToDelicious.Post).Where(post => (post.Date <= 1/1/2008 12:00:00 AM))
///</summary>
[TestMethod()]
[DeploymentItem("LinqToDelicious.dll")]
public void WhereDateGreaterOrThanEqual()
{
string url = TranslateComparison((left, right) => Expression.GreaterThanOrEqual(left, right));
Debug.WriteLine("url: " + url);
Assert.IsTrue(url.Contains("fromdt=12/31/2007 12:00:00 AM"));
Assert.IsFalse(url.Contains("todt="));
}
+ /// <summary>
+ /// A test for tag clauses.
+ ///
+ /// Query(LinqToDelicious.Post).Where(post => (post.Tags.Contains("example")))
+ ///</summary>
+ [TestMethod()]
+ [DeploymentItem("LinqToDelicious.dll")]
+ public void WhereTags()
+ {
+ String tagName = "example";
+
+ // from post in queryable
+ // where post.Tags.Contains("example")
+
+ // i.e. queryable.Where(post => post.Tags.Contains("Example"))
+
+ // TODO: This should probably be mocked instead of using a meaningless array.
+ // See the TODO below - it probably relates.
+ IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
+
+ // param represents the parameter to the lambda expression
+ ParameterExpression param = Expression.Parameter(typeof(Post), "post");
+
+ // post.Tags
+ MemberExpression member = Expression.Property(param, "Tags");
+
+ // "example"
+ ConstantExpression tag = Expression.Constant(tagName);
+
+ // post.Tags.Contains("Example")
+ MethodCallExpression containsCall = Expression.Call(
+ member,
+ typeof(List<String>).GetMethod("Contains", new Type[] { typeof(String) }),
+ tag);
+
+ // queryable.Where( ... )
+ MethodCallExpression whereCall = Expression.Call(
+ typeof(Queryable),
+ "Where",
+ new Type[] { typeof(Post) },
+ queryable.Expression,
+ Expression.Lambda<Func<Post, bool>>(containsCall, new ParameterExpression[] { param }));
+
+ // TODO: Double check that queryable.Expression gives us what we want above
+ // Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
+ // This argument isn't used in the translator at the moment.
+
+ string url = new QueryTranslator(whereCall).Translate();
+
+ Debug.WriteLine("url: " + url);
+
+ Assert.IsTrue(url.Contains("tag=example"));
+ }
+
private string TranslateComparison(Func<Expression, Expression, Expression> comparator)
{
DateTime comparedDate = new DateTime(2008, 1, 1);
// from post in queryable
// where post.Date == <evaluated comparedDate>
// i.e. queryable.Where(post => post.Date == <evaluated comparedDate>)
// TODO: This should probably be mocked instead of using a meaningless array.
// See the TODO below - it probably relates.
IQueryable<Post> queryable = new Post[] { }.AsQueryable<Post>();
//IQueryable<Post> queryable = mocks.StrictMock<IQueryable<Post>>();
// ConstantExpression queryableExpression = Expression.Constant(queryable);
// SetupResult.For(queryable.Expression).Return(queryableExpression);
// Debug.WriteLine(queryable.Expression);
// param represents the parameter to the lambda expression
ParameterExpression param = Expression.Parameter(typeof(Post), "post");
- // post.Date == <evaluated comparedDate>
+ // post.Date
Expression left = Expression.Property(param, "Date");
+
+ // <evaluated comparedDate>
Expression right = Expression.Constant(comparedDate);
+
+ // post.Date == <evaluated comparedDate>
Expression comparison = comparator(left, right);
// queryable.Where( ... )
MethodCallExpression whereCall = Expression.Call(
typeof(Queryable),
"Where",
new Type[] { typeof(Post) },
queryable.Expression,
Expression.Lambda<Func<Post, bool>>(comparison, new ParameterExpression[] { param }));
// TODO: Double check that queryable.Expression gives us what we want above
// Debug output says it results in a Post[], but I thought it should be an IQueryable<Post>
// This argument isn't used in the translator at the moment.
QueryTranslator translator = new QueryTranslator(whereCall);
return translator.Translate();
}
}
}
|
maravillas/linq-to-delicious
|
6684ee45f5e65660730cf23e2828ee713d361c9a
|
Remove old .vsmdi files and ignore future .vsmdi files
|
diff --git a/.gitignore b/.gitignore
index 2f197a2..cd36eda 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
TestResults/*
*.suo
+*.vsmdi
\ No newline at end of file
diff --git a/tasty1.vsmdi b/tasty1.vsmdi
deleted file mode 100644
index 8f470af..0000000
--- a/tasty1.vsmdi
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<TestLists xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2006">
- <TestList name="Lists of Tests" id="8c43106b-9dc1-4907-a29f-aa66a61bf5b6">
- <RunConfiguration id="d389d6aa-7acc-4b36-8108-0b66b41cc677" name="Local Test Run" storage="localtestrun.testrunconfig" type="Microsoft.VisualStudio.TestTools.Common.TestRunConfiguration, Microsoft.VisualStudio.QualityTools.Common, PublicKeyToken=b03f5f7f11d50a3a" />
- </TestList>
-</TestLists>
\ No newline at end of file
diff --git a/tasty2.vsmdi b/tasty2.vsmdi
deleted file mode 100644
index 8f470af..0000000
--- a/tasty2.vsmdi
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<TestLists xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2006">
- <TestList name="Lists of Tests" id="8c43106b-9dc1-4907-a29f-aa66a61bf5b6">
- <RunConfiguration id="d389d6aa-7acc-4b36-8108-0b66b41cc677" name="Local Test Run" storage="localtestrun.testrunconfig" type="Microsoft.VisualStudio.TestTools.Common.TestRunConfiguration, Microsoft.VisualStudio.QualityTools.Common, PublicKeyToken=b03f5f7f11d50a3a" />
- </TestList>
-</TestLists>
\ No newline at end of file
|
maravillas/linq-to-delicious
|
c70369792008c992c00403dac849a25360a721ed
|
Removed old .vsmdi files and ignored future .vsmdi files
|
diff --git a/.gitignore b/.gitignore
index 2f197a2..cd36eda 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
TestResults/*
*.suo
+*.vsmdi
\ No newline at end of file
diff --git a/tasty1.vsmdi b/tasty1.vsmdi
deleted file mode 100644
index 8f470af..0000000
--- a/tasty1.vsmdi
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<TestLists xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2006">
- <TestList name="Lists of Tests" id="8c43106b-9dc1-4907-a29f-aa66a61bf5b6">
- <RunConfiguration id="d389d6aa-7acc-4b36-8108-0b66b41cc677" name="Local Test Run" storage="localtestrun.testrunconfig" type="Microsoft.VisualStudio.TestTools.Common.TestRunConfiguration, Microsoft.VisualStudio.QualityTools.Common, PublicKeyToken=b03f5f7f11d50a3a" />
- </TestList>
-</TestLists>
\ No newline at end of file
diff --git a/tasty2.vsmdi b/tasty2.vsmdi
deleted file mode 100644
index 8f470af..0000000
--- a/tasty2.vsmdi
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<TestLists xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2006">
- <TestList name="Lists of Tests" id="8c43106b-9dc1-4907-a29f-aa66a61bf5b6">
- <RunConfiguration id="d389d6aa-7acc-4b36-8108-0b66b41cc677" name="Local Test Run" storage="localtestrun.testrunconfig" type="Microsoft.VisualStudio.TestTools.Common.TestRunConfiguration, Microsoft.VisualStudio.QualityTools.Common, PublicKeyToken=b03f5f7f11d50a3a" />
- </TestList>
-</TestLists>
\ No newline at end of file
|
maravillas/linq-to-delicious
|
941aa7dbeb2f1b471d548ca78452839a50b3e71d
|
Fix ignoring of TestResults directory
|
diff --git a/.gitignore b/.gitignore
index 380f45e..2f197a2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,2 @@
-TestResults/**/*
+TestResults/*
*.suo
|
maravillas/linq-to-delicious
|
566443579acda8c6c0e5fe864e9b173290008773
|
Add Post#GetHashCode and fix Post#Equals
|
diff --git a/LinqToDelicious/Post.cs b/LinqToDelicious/Post.cs
index 7709aa3..527e213 100644
--- a/LinqToDelicious/Post.cs
+++ b/LinqToDelicious/Post.cs
@@ -1,66 +1,77 @@
using System;
using System.Linq;
using System.Text;
using System.Collections.Generic;
using System.Globalization;
+using System.Diagnostics;
namespace LinqToDelicious
{
public class Post
{
public String Address { get; private set; }
public String Hash { get; private set; }
public String Description { get; private set; }
public List<String> Tags { get; private set; }
public String Extended { get; private set; }
public DateTime Date { get; private set; }
public String Meta { get; private set; }
public Post(String address, String hash, String description, String tags, String extended, String date) :
this(address, hash, description, tags, extended, date, "")
{
}
public Post(String address, String hash, String description, String tags, String extended, String date, String meta)
{
Address = address;
Hash = hash;
Description = description;
Tags = new List<String>(tags.Split(' '));
Extended = extended;
Date = DateTime.ParseExact(date, "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'", new CultureInfo("en-US"));
Meta = meta;
}
public override string ToString()
{
return String.Format("Post [address={0} hash={1} description={2} tags={3} extended={4} date={5} meta={5}]",
Address, Hash, Description, Tags, Extended, Date, Meta);
}
public override bool Equals(object obj)
{
Post post = obj as Post;
if ((System.Object)post == null)
{
return false;
}
- if (post == obj)
+ if (post == this)
{
return true;
}
return Address.Equals(post.Address) &&
Hash.Equals(post.Hash) &&
Description.Equals(post.Description) &&
Tags.SequenceEqual(post.Tags) &&
Extended.Equals(post.Extended) &&
Date.Equals(post.Date) &&
Meta.Equals(post.Meta);
+ }
+ public override int GetHashCode()
+ {
+ return Address.GetHashCode() ^
+ Hash.GetHashCode() ^
+ Description.GetHashCode() ^
+ string.Join(" ", Tags.ToArray()).GetHashCode() ^
+ Extended.GetHashCode() ^
+ Date.GetHashCode() ^
+ Meta.GetHashCode();
}
}
}
diff --git a/LinqToDeliciousTest/PostTest.cs b/LinqToDeliciousTest/PostTest.cs
index 40331d8..eafb0b7 100644
--- a/LinqToDeliciousTest/PostTest.cs
+++ b/LinqToDeliciousTest/PostTest.cs
@@ -1,90 +1,166 @@
using System;
using System.Text;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using LinqToDelicious;
namespace LinqToDeliciousTest
{
/// <summary>
/// Tests for Post
/// </summary>
[TestClass]
public class PostTest
{
public PostTest()
{
}
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
[TestMethod]
public void Create()
{
String address = "http://example.com/";
String hash = "a5a6f3d28d8dd549f3cad39fb0b34104";
String description = "Example web page";
String tags = "example testing domain test";
String date = "2008-12-12T07:45:52Z";
String extended = "These domain names are reserved for use in documentation and are not available for registration. See RFC 2606, Section 3.";
String meta = "fa2a46d239ad4f089c3ce7961d958b2e";
Post post = new Post(address, hash, description, tags, extended, date, meta);
Assert.AreEqual(address, post.Address);
Assert.AreEqual(hash, post.Hash);
Assert.AreEqual(description, post.Description);
Assert.AreEqual(4, post.Tags.Count);
Assert.IsTrue(post.Tags.Contains("example"));
Assert.IsTrue(post.Tags.Contains("testing"));
Assert.IsTrue(post.Tags.Contains("domain"));
Assert.IsTrue(post.Tags.Contains("test"));
Assert.AreEqual(new DateTime(2008, 12, 12, 7, 45, 52), post.Date);
Assert.AreEqual(extended, post.Extended);
Assert.AreEqual(meta, post.Meta);
}
+
+ [TestMethod]
+ public void Equal()
+ {
+ String address = "http://example.com/";
+ String hash = "a5a6f3d28d8dd549f3cad39fb0b34104";
+ String description = "Example web page";
+ String tags = "example testing domain test";
+ String date = "2008-12-12T07:45:52Z";
+ String extended = "These domain names are reserved for use in documentation and are not available for registration. See RFC 2606, Section 3.";
+ String meta = "fa2a46d239ad4f089c3ce7961d958b2e";
+
+ Post post1 = new Post(address, hash, description, tags, extended, date, meta);
+ Post post2 = new Post(address, hash, description, tags, extended, date, meta);
+
+ Assert.AreEqual(post1, post2);
+ }
+
+ [TestMethod]
+ public void NotEqual()
+ {
+ String address = "http://example.com/";
+ String hash = "a5a6f3d28d8dd549f3cad39fb0b34104";
+ String description = "Example web page";
+ String tags = "example testing domain test";
+ String date = "2008-12-12T07:45:52Z";
+ String extended = "These domain names are reserved for use in documentation and are not available for registration. See RFC 2606, Section 3.";
+ String meta = "fa2a46d239ad4f089c3ce7961d958b2e";
+
+ Post post1 = new Post(address, hash, description, tags, extended, date, meta);
+ Post post2 = new Post("address", hash, description, tags, extended, date, meta);
+ Post post3 = new Post(address, "hash", description, tags, extended, date, meta);
+ Post post4 = new Post(address, hash, "description", tags, extended, date, meta);
+ Post post5 = new Post(address, hash, description, "tags", extended, date, meta);
+ Post post6 = new Post(address, hash, description, tags, "extended", date, meta);
+ Post post7 = new Post(address, hash, description, tags, extended, "1999-10-10T01:01:01Z", meta);
+ Post post8 = new Post(address, hash, description, tags, extended, date, "meta");
+
+ Assert.AreNotEqual(post1, post2);
+ Assert.AreNotEqual(post1, post3);
+ Assert.AreNotEqual(post1, post4);
+ Assert.AreNotEqual(post1, post5);
+ Assert.AreNotEqual(post1, post6);
+ Assert.AreNotEqual(post1, post7);
+ Assert.AreNotEqual(post1, post8);
+ }
+
+ [TestMethod]
+ public void HashCode()
+ {
+ String address = "http://example.com/";
+ String hash = "a5a6f3d28d8dd549f3cad39fb0b34104";
+ String description = "Example web page";
+ String tags = "example testing domain test";
+ String date = "2008-12-12T07:45:52Z";
+ String extended = "These domain names are reserved for use in documentation and are not available for registration. See RFC 2606, Section 3.";
+ String meta = "fa2a46d239ad4f089c3ce7961d958b2e";
+
+ Post post1 = new Post(address, hash, description, tags, extended, date, meta);
+ Post post2 = new Post(address, hash, description, tags, extended, date, meta);
+ Post post3 = new Post("address", hash, description, tags, extended, date, meta);
+ Post post4 = new Post(address, "hash", description, tags, extended, date, meta);
+ Post post5 = new Post(address, hash, "description", tags, extended, date, meta);
+ Post post6 = new Post(address, hash, description, "tags", extended, date, meta);
+ Post post7 = new Post(address, hash, description, tags, "extended", date, meta);
+ Post post8 = new Post(address, hash, description, tags, extended, "1999-10-10T01:01:01Z", meta);
+ Post post9 = new Post(address, hash, description, tags, extended, date, "meta");
+
+ Assert.AreEqual(post1.GetHashCode(), post2.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post3.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post4.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post5.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post6.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post7.GetHashCode());
+ Assert.AreNotEqual(post1.GetHashCode(), post8.GetHashCode());
+ }
}
}
|
maravillas/linq-to-delicious
|
073d768602381a631a90b20d2a5e747f288e0317
|
Add *.suo
|
diff --git a/.gitignore b/.gitignore
index 74a0cd4..380f45e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
TestResults/**/*
+*.suo
|
maravillas/linq-to-delicious
|
6c1b08b08c1409ca60e42f93f084290ffa318a76
|
Add README
|
diff --git a/README b/README
new file mode 100644
index 0000000..21e4ba7
--- /dev/null
+++ b/README
@@ -0,0 +1,3 @@
+== LINQ to Delicious README
+
+LINQ to Delicious is a C# library that provides a LINQ interface to the Delicious API.
\ No newline at end of file
|
maravillas/linq-to-delicious
|
47a0d720fdd67f0511415c30204fe4071e823d0d
|
Add .gitignore files
|
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..74a0cd4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+TestResults/**/*
diff --git a/IQToolkit/.gitignore b/IQToolkit/.gitignore
new file mode 100644
index 0000000..69b3799
--- /dev/null
+++ b/IQToolkit/.gitignore
@@ -0,0 +1,2 @@
+bin/**/*
+obj/**/*
diff --git a/LinqToDelicious/.gitignore b/LinqToDelicious/.gitignore
new file mode 100644
index 0000000..69b3799
--- /dev/null
+++ b/LinqToDelicious/.gitignore
@@ -0,0 +1,2 @@
+bin/**/*
+obj/**/*
diff --git a/LinqToDeliciousTest/.gitignore b/LinqToDeliciousTest/.gitignore
new file mode 100644
index 0000000..69b3799
--- /dev/null
+++ b/LinqToDeliciousTest/.gitignore
@@ -0,0 +1,2 @@
+bin/**/*
+obj/**/*
diff --git a/tasty/.gitignore b/tasty/.gitignore
new file mode 100644
index 0000000..69b3799
--- /dev/null
+++ b/tasty/.gitignore
@@ -0,0 +1,2 @@
+bin/**/*
+obj/**/*
|
sinisterchipmunk/tiny_mce
|
39403af28ddba6d9763c78faec39ab315a2987ee
|
Added a "tiny_mce_init" method so you can initialize multiple times (if that's necessary...)
|
diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb
index 216859b..8074474 100644
--- a/app/helpers/application_helper.rb
+++ b/app/helpers/application_helper.rb
@@ -1,25 +1,29 @@
module ApplicationHelper
def tiny_mce(options = {})
+ r = (@tiny_mce_included ? '' : javascript_include_tag("tiny_mce/tiny_mce")) + "\n" +
+ #javascript_tag("tinyMCE_GZ.init(#{gz.to_json})") + "\n" + # compressor for performance
+ javascript_tag(tiny_mce_init(options))
+ @tiny_mce_included ||= true
+ r
+ end
+
+ def tiny_mce_init(options = {})
config = TinyMce.configuration.merge(options)
# gz = {
# :themes => config[:theme],
# :languages => config[:languages],
# :disk_cache => true,
# :debug => false,
# :plugins => config[:plugins].join(',')
# }
mce = config.dup
mce[:plugins] = mce[:plugins].join(',') if mce[:plugins].kind_of?(Array)
buttons = mce.delete(:buttons)
buttons.length.times do |i|
mce["theme_advanced_buttons#{i+1}"] = buttons[i].join(',') unless mce["theme_advanced_buttons#{i+1}"]
end if buttons
-
- r = (@tiny_mce_included ? '' : javascript_include_tag("tiny_mce/tiny_mce")) + "\n" +
- #javascript_tag("tinyMCE_GZ.init(#{gz.to_json})") + "\n" + # compressor for performance
- javascript_tag("tinyMCE.init(#{mce.to_json});")
- @tiny_mce_included ||= true
- r
+
+ "tinyMCE.init(#{mce.to_json});"
end
end
|
adityavm/general
|
6be06403a766743370625ffb4049334f174dad86
|
sync commit
|
diff --git a/checkInt.py b/checkInt.py
new file mode 100644
index 0000000..dac3be3
--- /dev/null
+++ b/checkInt.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import re, requests, time
+
+def remove_html_tags(data):
+ p = re.compile(r'<.*?>')
+ return p.sub('', data)
+
+url = "http://192.168.1.1/status/status_deviceinfo.htm"
+
+r = requests.get(url, auth=("admin", "password"))
+output = "adsl status:\nconnected" in remove_html_tags(r.text.lower())
+if output:
+ print("â")
+else:
+ print("Ã")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.