Vitalik
10 months ago
2336 changed files with 5 additions and 399907 deletions
@ -1,27 +0,0 @@
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env ruby_executable_hooks |
||||
# |
||||
# This file was generated by RubyGems. |
||||
# |
||||
# The application 'fuzzy_match' is installed as part of a gem, and |
||||
# this file is here to facilitate running it. |
||||
# |
||||
|
||||
require 'rubygems' |
||||
|
||||
version = ">= 0.a" |
||||
|
||||
str = ARGV.first |
||||
if str |
||||
str = str.b[/\A_(.*)_\z/, 1] |
||||
if str and Gem::Version.correct?(str) |
||||
version = str |
||||
ARGV.shift |
||||
end |
||||
end |
||||
|
||||
if Gem.respond_to?(:activate_bin_path) |
||||
load Gem.activate_bin_path('fuzzy_match', 'fuzzy_match', version) |
||||
else |
||||
gem "fuzzy_match", version |
||||
load Gem.bin_path("fuzzy_match", "fuzzy_match", version) |
||||
end |
@ -1,27 +0,0 @@
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env ruby_executable_hooks |
||||
# |
||||
# This file was generated by RubyGems. |
||||
# |
||||
# The application 'httpclient' is installed as part of a gem, and |
||||
# this file is here to facilitate running it. |
||||
# |
||||
|
||||
require 'rubygems' |
||||
|
||||
version = ">= 0.a" |
||||
|
||||
str = ARGV.first |
||||
if str |
||||
str = str.b[/\A_(.*)_\z/, 1] |
||||
if str and Gem::Version.correct?(str) |
||||
version = str |
||||
ARGV.shift |
||||
end |
||||
end |
||||
|
||||
if Gem.respond_to?(:activate_bin_path) |
||||
load Gem.activate_bin_path('httpclient', 'httpclient', version) |
||||
else |
||||
gem "httpclient", version |
||||
load Gem.bin_path("httpclient", "httpclient", version) |
||||
end |
@ -1,27 +0,0 @@
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env ruby_executable_hooks |
||||
# |
||||
# This file was generated by RubyGems. |
||||
# |
||||
# The application 'cocoapods' is installed as part of a gem, and |
||||
# this file is here to facilitate running it. |
||||
# |
||||
|
||||
require 'rubygems' |
||||
|
||||
version = ">= 0.a" |
||||
|
||||
str = ARGV.first |
||||
if str |
||||
str = str.b[/\A_(.*)_\z/, 1] |
||||
if str and Gem::Version.correct?(str) |
||||
version = str |
||||
ARGV.shift |
||||
end |
||||
end |
||||
|
||||
if Gem.respond_to?(:activate_bin_path) |
||||
load Gem.activate_bin_path('cocoapods', 'pod', version) |
||||
else |
||||
gem "cocoapods", version |
||||
load Gem.bin_path("cocoapods", "pod", version) |
||||
end |
@ -1,23 +0,0 @@
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env ruby |
||||
|
||||
title = "ruby #{ARGV*" "}" |
||||
$0 = ARGV.shift |
||||
Process.setproctitle(title) if Process.methods.include?(:setproctitle) |
||||
|
||||
require 'rubygems' |
||||
begin |
||||
require 'executable-hooks/hooks' |
||||
Gem::ExecutableHooks.run($0) |
||||
rescue LoadError |
||||
warn "unable to load executable-hooks/hooks" if ENV.key?('ExecutableHooks_DEBUG') |
||||
end unless $0.end_with?('/executable-hooks-uninstaller') |
||||
|
||||
content = File.read($0) |
||||
|
||||
if (index = content.index("\n#!ruby\n")) && index > 0 |
||||
skipped_content = content.slice!(0..index) |
||||
start_line = skipped_content.count("\n") + 1 |
||||
eval content, binding, $0, start_line |
||||
else |
||||
eval content, binding, $0 |
||||
end |
@ -1,27 +0,0 @@
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env ruby_executable_hooks |
||||
# |
||||
# This file was generated by RubyGems. |
||||
# |
||||
# The application 'cocoapods' is installed as part of a gem, and |
||||
# this file is here to facilitate running it. |
||||
# |
||||
|
||||
require 'rubygems' |
||||
|
||||
version = ">= 0.a" |
||||
|
||||
str = ARGV.first |
||||
if str |
||||
str = str.b[/\A_(.*)_\z/, 1] |
||||
if str and Gem::Version.correct?(str) |
||||
version = str |
||||
ARGV.shift |
||||
end |
||||
end |
||||
|
||||
if Gem.respond_to?(:activate_bin_path) |
||||
load Gem.activate_bin_path('cocoapods', 'sandbox-pod', version) |
||||
else |
||||
gem "cocoapods", version |
||||
load Gem.bin_path("cocoapods", "sandbox-pod", version) |
||||
end |
@ -1,27 +0,0 @@
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env ruby_executable_hooks |
||||
# |
||||
# This file was generated by RubyGems. |
||||
# |
||||
# The application 'xcodeproj' is installed as part of a gem, and |
||||
# this file is here to facilitate running it. |
||||
# |
||||
|
||||
require 'rubygems' |
||||
|
||||
version = ">= 0.a" |
||||
|
||||
str = ARGV.first |
||||
if str |
||||
str = str.b[/\A_(.*)_\z/, 1] |
||||
if str and Gem::Version.correct?(str) |
||||
version = str |
||||
ARGV.shift |
||||
end |
||||
end |
||||
|
||||
if Gem.respond_to?(:activate_bin_path) |
||||
load Gem.activate_bin_path('xcodeproj', 'xcodeproj', version) |
||||
else |
||||
gem "xcodeproj", version |
||||
load Gem.bin_path("xcodeproj", "xcodeproj", version) |
||||
end |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -1,229 +0,0 @@
@@ -1,229 +0,0 @@
|
||||
"pkg-config --exists libffi" |
||||
| pkg-config --libs libffi |
||||
=> "-lffi\n" |
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: int main(int argc, char **argv) |
||||
4: { |
||||
5: return !!argv[argc]; |
||||
6: } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 -lffi " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: int main(int argc, char **argv) |
||||
4: { |
||||
5: return !!argv[argc]; |
||||
6: } |
||||
/* end */ |
||||
|
||||
| pkg-config --cflags-only-I libffi |
||||
=> "-I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi\n" |
||||
| pkg-config --cflags-only-other libffi |
||||
=> "\n" |
||||
| pkg-config --libs-only-l libffi |
||||
=> "-lffi\n" |
||||
package configuration for libffi |
||||
incflags: -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi |
||||
cflags: |
||||
ldflags: |
||||
libs: -lffi |
||||
|
||||
have_library: checking for ffi_prep_closure_loc() in -lffi... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lruby.2.7 -lffi -lffi " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: #include <ffi.h> |
||||
4: |
||||
5: /*top*/ |
||||
6: extern int t(void); |
||||
7: int main(int argc, char **argv) |
||||
8: { |
||||
9: if (argc > 1000000) { |
||||
10: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
11: printf("%d", (*tp)()); |
||||
12: } |
||||
13: |
||||
14: return !!argv[argc]; |
||||
15: } |
||||
16: int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_prep_closure_loc; return !p; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
have_func: checking for ffi_prep_cif_var()... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
conftest.c:14:57: error: use of undeclared identifier 'ffi_prep_cif_var' |
||||
int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_prep_cif_var; return !p; } |
||||
^ |
||||
1 error generated. |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_prep_cif_var; return !p; } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: extern void ffi_prep_cif_var(); |
||||
15: int t(void) { ffi_prep_cif_var(); return 0; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
have_func: checking for ffi_raw_call()... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
conftest.c:14:57: error: use of undeclared identifier 'ffi_raw_call' |
||||
int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_raw_call; return !p; } |
||||
^ |
||||
1 error generated. |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_raw_call; return !p; } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: extern void ffi_raw_call(); |
||||
15: int t(void) { ffi_raw_call(); return 0; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
have_func: checking for ffi_prep_raw_closure()... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
conftest.c:14:57: error: use of undeclared identifier 'ffi_prep_raw_closure' |
||||
int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_prep_raw_closure; return !p; } |
||||
^ |
||||
1 error generated. |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: int t(void) { void ((*volatile p)()); p = (void ((*)()))ffi_prep_raw_closure; return !p; } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -lffi -lffi " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: /*top*/ |
||||
4: extern int t(void); |
||||
5: int main(int argc, char **argv) |
||||
6: { |
||||
7: if (argc > 1000000) { |
||||
8: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
9: printf("%d", (*tp)()); |
||||
10: } |
||||
11: |
||||
12: return !!argv[argc]; |
||||
13: } |
||||
14: extern void ffi_prep_raw_closure(); |
||||
15: int t(void) { ffi_prep_raw_closure(); return 0; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
block in append_ldflags: checking for whether -pthread is accepted as LDFLAGS... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/Library/Developer/CommandLineTools/SDKs/MacOSX13.sdk/usr/include/ffi -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lffi -lffi -lruby.2.7 -pthread " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: int main(int argc, char **argv) |
||||
4: { |
||||
5: return !!argv[argc]; |
||||
6: } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
extconf.h is: |
||||
/* begin */ |
||||
1: #ifndef EXTCONF_H |
||||
2: #define EXTCONF_H |
||||
3: #define HAVE_FFI_PREP_CIF_VAR 1 |
||||
4: #define HAVE_FFI_RAW_CALL 1 |
||||
5: #define HAVE_FFI_PREP_RAW_CLOSURE 1 |
||||
6: #define HAVE_RAW_API 1 |
||||
7: #endif |
||||
/* end */ |
||||
|
@ -1,13 +0,0 @@
@@ -1,13 +0,0 @@
|
||||
current directory: /Users/mac/work/truthordare/vendor/bundle/ruby/2.7.0/gems/json-2.6.1/ext/json |
||||
/Users/mac/.rvm/rubies/ruby-2.7.4/bin/ruby -I /Users/mac/.rvm/rubies/ruby-2.7.4/lib/ruby/2.7.0 -r ./siteconf20231116-47994-1k6xh5j.rb extconf.rb |
||||
creating Makefile |
||||
|
||||
current directory: /Users/mac/work/truthordare/vendor/bundle/ruby/2.7.0/gems/json-2.6.1/ext/json |
||||
make "DESTDIR=" clean |
||||
|
||||
current directory: /Users/mac/work/truthordare/vendor/bundle/ruby/2.7.0/gems/json-2.6.1/ext/json |
||||
make "DESTDIR=" |
||||
make: Nothing to be done for `all'. |
||||
|
||||
current directory: /Users/mac/work/truthordare/vendor/bundle/ruby/2.7.0/gems/json-2.6.1/ext/json |
||||
make "DESTDIR=" install |
Binary file not shown.
Binary file not shown.
@ -1,118 +0,0 @@
@@ -1,118 +0,0 @@
|
||||
have_func: checking for rb_enc_raise() in ruby.h... -------------------- yes |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: int main(int argc, char **argv) |
||||
4: { |
||||
5: return !!argv[argc]; |
||||
6: } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
conftest.c:16:57: error: use of undeclared identifier 'rb_enc_raise' |
||||
int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_enc_raise; return !p; } |
||||
^ |
||||
1 error generated. |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: #include <ruby.h> |
||||
4: |
||||
5: /*top*/ |
||||
6: extern int t(void); |
||||
7: int main(int argc, char **argv) |
||||
8: { |
||||
9: if (argc > 1000000) { |
||||
10: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
11: printf("%d", (*tp)()); |
||||
12: } |
||||
13: |
||||
14: return !!argv[argc]; |
||||
15: } |
||||
16: int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_enc_raise; return !p; } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: #include <ruby.h> |
||||
4: |
||||
5: /*top*/ |
||||
6: extern int t(void); |
||||
7: int main(int argc, char **argv) |
||||
8: { |
||||
9: if (argc > 1000000) { |
||||
10: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
11: printf("%d", (*tp)()); |
||||
12: } |
||||
13: |
||||
14: return !!argv[argc]; |
||||
15: } |
||||
16: extern void rb_enc_raise(); |
||||
17: int t(void) { rb_enc_raise(); return 0; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
||||
have_func: checking for rb_enc_interned_str() in ruby.h... -------------------- no |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
conftest.c:16:57: error: use of undeclared identifier 'rb_enc_interned_str' |
||||
int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_enc_interned_str; return !p; } |
||||
^ |
||||
1 error generated. |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: #include <ruby.h> |
||||
4: |
||||
5: /*top*/ |
||||
6: extern int t(void); |
||||
7: int main(int argc, char **argv) |
||||
8: { |
||||
9: if (argc > 1000000) { |
||||
10: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
11: printf("%d", (*tp)()); |
||||
12: } |
||||
13: |
||||
14: return !!argv[argc]; |
||||
15: } |
||||
16: int t(void) { void ((*volatile p)()); p = (void ((*)()))rb_enc_interned_str; return !p; } |
||||
/* end */ |
||||
|
||||
"gcc -o conftest -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/x86_64-darwin21 -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0/ruby/backward -I/Users/mac/.rvm/rubies/ruby-2.7.4/include/ruby-2.7.0 -I. -I/usr/local/opt/libyaml/include -I/usr/local/opt/libksba/include -I/usr/local/opt/readline/include -I/usr/local/opt/zlib/include -I/usr/local/opt/openssl@1.1/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -D_DARWIN_UNLIMITED_SELECT -D_REENTRANT -g -O2 -fno-common -pipe conftest.c -L. -L/Users/mac/.rvm/rubies/ruby-2.7.4/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -L. -fstack-protector-strong -L/usr/local/lib -L/usr/local/opt/libyaml/lib -L/usr/local/opt/libksba/lib -L/usr/local/opt/readline/lib -L/usr/local/opt/zlib/lib -L/usr/local/opt/openssl@1.1/lib -lruby.2.7 " |
||||
Undefined symbols for architecture x86_64: |
||||
"_rb_enc_interned_str", referenced from: |
||||
_t in conftest-e5f1ad.o |
||||
ld: symbol(s) not found for architecture x86_64 |
||||
clang: error: linker command failed with exit code 1 (use -v to see invocation) |
||||
checked program was: |
||||
/* begin */ |
||||
1: #include "ruby.h" |
||||
2: |
||||
3: #include <ruby.h> |
||||
4: |
||||
5: /*top*/ |
||||
6: extern int t(void); |
||||
7: int main(int argc, char **argv) |
||||
8: { |
||||
9: if (argc > 1000000) { |
||||
10: int (* volatile tp)(void)=(int (*)(void))&t; |
||||
11: printf("%d", (*tp)()); |
||||
12: } |
||||
13: |
||||
14: return !!argv[argc]; |
||||
15: } |
||||
16: extern void rb_enc_interned_str(); |
||||
17: int t(void) { rb_enc_interned_str(); return 0; } |
||||
/* end */ |
||||
|
||||
-------------------- |
||||
|
@ -1,19 +0,0 @@
@@ -1,19 +0,0 @@
|
||||
Copyright (c) 2010 Christian Kruse, <cjk@wwwtech.de> |
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a |
||||
copy of this software and associated documentation files (the |
||||
"Software"), to deal in the Software without restriction, including |
||||
without limitation the rights to use, copy, modify, merge, publish, |
||||
distribute, sublicense, and/or sell copies of the Software, and to |
||||
permit persons to whom the Software is furnished to do so, subject to |
||||
the following conditions: |
||||
The above copyright notice and this permission notice shall be included |
||||
in all copies or substantial portions of the Software. |
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
||||
|
@ -1,79 +0,0 @@
@@ -1,79 +0,0 @@
|
||||
CFPropertyList implementation |
||||
class to read, manipulate and write both XML and binary property list |
||||
files (plist(5)) as defined by Apple. Have a look at CFPropertyList::List |
||||
for more documentation. |
||||
|
||||
# Caution! |
||||
|
||||
In version 3.0.0 we dropped Ruby 1.8 compatibility. If you are using |
||||
Ruby 1.8 consider to update Ruby; if you can't upgrade, don't upgrade |
||||
CFPropertyList. |
||||
|
||||
# Installation |
||||
|
||||
You could either use ruby gems and install it via |
||||
|
||||
```bash |
||||
gem install CFPropertyList |
||||
``` |
||||
|
||||
or you could clone this repository and place it somewhere in your load path. |
||||
|
||||
Example: |
||||
```ruby |
||||
require 'cfpropertylist' |
||||
``` |
||||
|
||||
If you're using Rails, you can add it into your Gemfile |
||||
|
||||
```ruby |
||||
gem 'CFPropertyList' |
||||
``` |
||||
|
||||
# Usage |
||||
|
||||
## create a arbitrary data structure of basic data types |
||||
|
||||
```ruby |
||||
data = { |
||||
'name' => 'John Doe', |
||||
'missing' => true, |
||||
'last_seen' => Time.now, |
||||
'friends' => ['Jane Doe','Julian Doe'], |
||||
'likes' => { |
||||
'me' => false |
||||
} |
||||
} |
||||
``` |
||||
|
||||
## create CFPropertyList::List object |
||||
|
||||
```ruby |
||||
plist = CFPropertyList::List.new |
||||
``` |
||||
|
||||
## call CFPropertyList.guess() to create corresponding CFType values |
||||
|
||||
```ruby |
||||
plist.value = CFPropertyList.guess(data) |
||||
``` |
||||
|
||||
## write plist to file |
||||
```ruby |
||||
plist.save("example.plist", CFPropertyList::List::FORMAT_BINARY) |
||||
``` |
||||
|
||||
## … later, read it again |
||||
```ruby |
||||
plist = CFPropertyList::List.new(:file => "example.plist") |
||||
data = CFPropertyList.native_types(plist.value) |
||||
``` |
||||
|
||||
# Author and license |
||||
|
||||
**Author:** Christian Kruse (mailto:cjk@wwwtech.de) |
||||
|
||||
**Copyright:** Copyright (c) 2010 |
||||
|
||||
**License:** MIT License |
||||
|
@ -1,43 +0,0 @@
@@ -1,43 +0,0 @@
|
||||
CFPropertyList implementation |
||||
class to read, manipulate and write both XML and binary property list |
||||
files (plist(5)) as defined by Apple. Have a look at CFPropertyList::List |
||||
for more documentation. |
||||
|
||||
== Installation |
||||
|
||||
You could either use ruby gems and install it via |
||||
|
||||
gem install CFPropertyList |
||||
|
||||
or you could clone this repository and place it somewhere in your load path. |
||||
|
||||
== Example |
||||
require 'cfpropertylist' |
||||
|
||||
# create a arbitrary data structure of basic data types |
||||
data = { |
||||
'name' => 'John Doe', |
||||
'missing' => true, |
||||
'last_seen' => Time.now, |
||||
'friends' => ['Jane Doe','Julian Doe'], |
||||
'likes' => { |
||||
'me' => false |
||||
} |
||||
} |
||||
|
||||
# create CFPropertyList::List object |
||||
plist = CFPropertyList::List.new |
||||
|
||||
# call CFPropertyList.guess() to create corresponding CFType values |
||||
plist.value = CFPropertyList.guess(data) |
||||
|
||||
# write plist to file |
||||
plist.save("example.plist", CFPropertyList::List::FORMAT_BINARY) |
||||
|
||||
# … later, read it again |
||||
plist = CFPropertyList::List.new(:file => "example.plist") |
||||
data = CFPropertyList.native_types(plist.value) |
||||
|
||||
Author:: Christian Kruse (mailto:cjk@wwwtech.de) |
||||
Copyright:: Copyright (c) 2010 |
||||
License:: MIT License |
@ -1,7 +0,0 @@
@@ -1,7 +0,0 @@
|
||||
Special thanks to: |
||||
|
||||
Steve Madsen for providing a lot of performance patches and bugfixes! |
||||
Have a look at his Github account: <http://github.com/sjmadsen> |
||||
|
||||
|
||||
|
@ -1,6 +0,0 @@
@@ -1,6 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'cfpropertylist/rbCFPropertyList' |
||||
|
||||
|
||||
# eof |
@ -1,594 +0,0 @@
@@ -1,594 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'stringio' |
||||
|
||||
module CFPropertyList |
||||
# Binary PList parser class |
||||
class Binary |
||||
# Read a binary plist file |
||||
def load(opts) |
||||
@unique_table = {} |
||||
@count_objects = 0 |
||||
@object_refs = 0 |
||||
|
||||
@written_object_count = 0 |
||||
@object_table = [] |
||||
@object_ref_size = 0 |
||||
|
||||
@offsets = [] |
||||
|
||||
fd = nil |
||||
if(opts.has_key?(:file)) |
||||
fd = File.open(opts[:file],"rb") |
||||
file = opts[:file] |
||||
else |
||||
fd = StringIO.new(opts[:data],"rb") |
||||
file = "<string>" |
||||
end |
||||
|
||||
# first, we read the trailer: 32 byte from the end |
||||
fd.seek(-32,IO::SEEK_END) |
||||
buff = fd.read(32) |
||||
|
||||
offset_size, object_ref_size, number_of_objects, top_object, table_offset = buff.unpack "x6CCx4Nx4Nx4N" |
||||
|
||||
# after that, get the offset table |
||||
fd.seek(table_offset, IO::SEEK_SET) |
||||
coded_offset_table = fd.read(number_of_objects * offset_size) |
||||
raise CFFormatError.new("#{file}: Format error!") unless coded_offset_table.bytesize == number_of_objects * offset_size |
||||
|
||||
@count_objects = number_of_objects |
||||
|
||||
# decode offset table |
||||
if(offset_size != 3) |
||||
formats = ["","C*","n*","","N*"] |
||||
@offsets = coded_offset_table.unpack(formats[offset_size]) |
||||
else |
||||
@offsets = coded_offset_table.unpack("C*").each_slice(3).map { |
||||
|x,y,z| (x << 16) | (y << 8) | z |
||||
} |
||||
end |
||||
|
||||
@object_ref_size = object_ref_size |
||||
val = read_binary_object_at(file,fd,top_object) |
||||
|
||||
fd.close |
||||
val |
||||
end |
||||
|
||||
|
||||
# Convert CFPropertyList to binary format; since we have to count our objects we simply unique CFDictionary and CFArray |
||||
def to_str(opts={}) |
||||
@unique_table = {} |
||||
@count_objects = 0 |
||||
@object_refs = 0 |
||||
|
||||
@written_object_count = 0 |
||||
@object_table = [] |
||||
|
||||
@offsets = [] |
||||
|
||||
binary_str = "bplist00" |
||||
|
||||
@object_refs = count_object_refs(opts[:root]) |
||||
|
||||
opts[:root].to_binary(self) |
||||
|
||||
next_offset = 8 |
||||
offsets = @object_table.map do |object| |
||||
offset = next_offset |
||||
next_offset += object.bytesize |
||||
offset |
||||
end |
||||
binary_str << @object_table.join |
||||
|
||||
table_offset = next_offset |
||||
offset_size = Binary.bytes_needed(table_offset) |
||||
|
||||
if offset_size < 8 |
||||
# Fast path: encode the entire offset array at once. |
||||
binary_str << offsets.pack((%w(C n N N)[offset_size - 1]) + '*') |
||||
else |
||||
# Slow path: host may be little or big endian, must pack each offset |
||||
# separately. |
||||
offsets.each do |offset| |
||||
binary_str << "#{Binary.pack_it_with_size(offset_size,offset)}" |
||||
end |
||||
end |
||||
|
||||
binary_str << [offset_size, object_ref_size(@object_refs)].pack("x6CC") |
||||
binary_str << [@object_table.size].pack("x4N") |
||||
binary_str << [0].pack("x4N") |
||||
binary_str << [table_offset].pack("x4N") |
||||
|
||||
binary_str |
||||
end |
||||
|
||||
def object_ref_size object_refs |
||||
Binary.bytes_needed(object_refs) |
||||
end |
||||
|
||||
# read a „null” type (i.e. null byte, marker byte, bool value) |
||||
def read_binary_null_type(length) |
||||
case length |
||||
when 0 then 0 # null byte |
||||
when 8 then CFBoolean.new(false) |
||||
when 9 then CFBoolean.new(true) |
||||
when 15 then 15 # fill type |
||||
else |
||||
raise CFFormatError.new("unknown null type: #{length}") |
||||
end |
||||
end |
||||
protected :read_binary_null_type |
||||
|
||||
# read a binary int value |
||||
def read_binary_int(fname,fd,length) |
||||
if length > 4 |
||||
raise CFFormatError.new("Integer greater than 16 bytes: #{length}") |
||||
end |
||||
|
||||
nbytes = 1 << length |
||||
|
||||
buff = fd.read(nbytes) |
||||
|
||||
CFInteger.new( |
||||
case length |
||||
when 0 then buff.unpack("C")[0] |
||||
when 1 then buff.unpack("n")[0] |
||||
when 2 then buff.unpack("N")[0] |
||||
# 8 byte integers are always signed |
||||
when 3 then buff.unpack("q>")[0] |
||||
# 16 byte integers are used to represent unsigned 8 byte integers |
||||
# where the unsigned value is stored in the lower 8 bytes and the |
||||
# upper 8 bytes are unused. |
||||
when 4 then buff.unpack("Q>Q>")[1] |
||||
end |
||||
) |
||||
end |
||||
protected :read_binary_int |
||||
|
||||
# read a binary real value |
||||
def read_binary_real(fname,fd,length) |
||||
raise CFFormatError.new("Real greater than 8 bytes: #{length}") if length > 3 |
||||
|
||||
nbytes = 1 << length |
||||
buff = fd.read(nbytes) |
||||
|
||||
CFReal.new( |
||||
case length |
||||
when 0 # 1 byte float? must be an error |
||||
raise CFFormatError.new("got #{length+1} byte float, must be an error!") |
||||
when 1 # 2 byte float? must be an error |
||||
raise CFFormatError.new("got #{length+1} byte float, must be an error!") |
||||
when 2 then |
||||
buff.reverse.unpack("e")[0] |
||||
when 3 then |
||||
buff.reverse.unpack("E")[0] |
||||
else |
||||
fail "unexpected length: #{length}" |
||||
end |
||||
) |
||||
end |
||||
protected :read_binary_real |
||||
|
||||
# read a binary date value |
||||
def read_binary_date(fname,fd,length) |
||||
raise CFFormatError.new("Date greater than 8 bytes: #{length}") if length > 3 |
||||
|
||||
nbytes = 1 << length |
||||
buff = fd.read(nbytes) |
||||
|
||||
CFDate.new( |
||||
case length |
||||
when 0 then # 1 byte CFDate is an error |
||||
raise CFFormatError.new("#{length+1} byte CFDate, error") |
||||
when 1 then # 2 byte CFDate is an error |
||||
raise CFFormatError.new("#{length+1} byte CFDate, error") |
||||
when 2 then |
||||
buff.reverse.unpack("e")[0] |
||||
when 3 then |
||||
buff.reverse.unpack("E")[0] |
||||
end, |
||||
CFDate::TIMESTAMP_APPLE |
||||
) |
||||
end |
||||
protected :read_binary_date |
||||
|
||||
# Read a binary data value |
||||
def read_binary_data(fname,fd,length) |
||||
CFData.new(read_fd(fd, length), CFData::DATA_RAW) |
||||
end |
||||
protected :read_binary_data |
||||
|
||||
def read_fd fd, length |
||||
length > 0 ? fd.read(length) : "" |
||||
end |
||||
|
||||
# Read a binary string value |
||||
def read_binary_string(fname,fd,length) |
||||
buff = read_fd fd, length |
||||
@unique_table[buff] = true unless @unique_table.has_key?(buff) |
||||
CFString.new(buff) |
||||
end |
||||
protected :read_binary_string |
||||
|
||||
# Convert the given string from one charset to another |
||||
def Binary.charset_convert(str,from,to="UTF-8") |
||||
return str.dup.force_encoding(from).encode(to) if str.respond_to?("encode") |
||||
Iconv.conv(to,from,str) |
||||
end |
||||
|
||||
# Count characters considering character set |
||||
def Binary.charset_strlen(str,charset="UTF-8") |
||||
if str.respond_to?(:encode) |
||||
size = str.length |
||||
else |
||||
utf8_str = Iconv.conv("UTF-8",charset,str) |
||||
size = utf8_str.scan(/./mu).size |
||||
end |
||||
|
||||
# UTF-16 code units in the range D800-DBFF are the beginning of |
||||
# a surrogate pair, and count as one additional character for |
||||
# length calculation. |
||||
if charset =~ /^UTF-16/ |
||||
if str.respond_to?(:encode) |
||||
str.bytes.to_a.each_slice(2) { |pair| size += 1 if (0xd8..0xdb).include?(pair[0]) } |
||||
else |
||||
str.split('').each_slice(2) { |pair| size += 1 if ("\xd8".."\xdb").include?(pair[0]) } |
||||
end |
||||
end |
||||
|
||||
size |
||||
end |
||||
|
||||
# Read a unicode string value, coded as UTF-16BE |
||||
def read_binary_unicode_string(fname,fd,length) |
||||
# The problem is: we get the length of the string IN CHARACTERS; |
||||
# since a char in UTF-16 can be 16 or 32 bit long, we don't really know |
||||
# how long the string is in bytes |
||||
buff = fd.read(2*length) |
||||
|
||||
@unique_table[buff] = true unless @unique_table.has_key?(buff) |
||||
CFString.new(Binary.charset_convert(buff,"UTF-16BE","UTF-8")) |
||||
end |
||||
protected :read_binary_unicode_string |
||||
|
||||
def unpack_with_size(nbytes, buff) |
||||
format = ["C*", "n*", "N*", "N*"][nbytes - 1]; |
||||
|
||||
if nbytes == 3 |
||||
buff = "\0" + buff.scan(/.{1,3}/).join("\0") |
||||
end |
||||
|
||||
return buff.unpack(format) |
||||
end |
||||
|
||||
# Read an binary array value, including contained objects |
||||
def read_binary_array(fname,fd,length) |
||||
ary = [] |
||||
|
||||
# first: read object refs |
||||
if(length != 0) |
||||
buff = fd.read(length * @object_ref_size) |
||||
objects = unpack_with_size(@object_ref_size, buff) #buff.unpack(@object_ref_size == 1 ? "C*" : "n*") |
||||
|
||||
# now: read objects |
||||
0.upto(length-1) do |i| |
||||
object = read_binary_object_at(fname,fd,objects[i]) |
||||
ary.push object |
||||
end |
||||
end |
||||
|
||||
CFArray.new(ary) |
||||
end |
||||
protected :read_binary_array |
||||
|
||||
# Read a dictionary value, including contained objects |
||||
def read_binary_dict(fname,fd,length) |
||||
dict = {} |
||||
|
||||
# first: read keys |
||||
if(length != 0) then |
||||
buff = fd.read(length * @object_ref_size) |
||||
keys = unpack_with_size(@object_ref_size, buff) |
||||
|
||||
# second: read object refs |
||||
buff = fd.read(length * @object_ref_size) |
||||
objects = unpack_with_size(@object_ref_size, buff) |
||||
|
||||
# read real keys and objects |
||||
0.upto(length-1) do |i| |
||||
key = read_binary_object_at(fname,fd,keys[i]) |
||||
object = read_binary_object_at(fname,fd,objects[i]) |
||||
dict[key.value] = object |
||||
end |
||||
end |
||||
|
||||
CFDictionary.new(dict) |
||||
end |
||||
protected :read_binary_dict |
||||
|
||||
# Read an object type byte, decode it and delegate to the correct |
||||
# reader function |
||||
def read_binary_object(fname,fd) |
||||
# first: read the marker byte |
||||
buff = fd.read(1) |
||||
|
||||
object_length = buff.unpack("C*") |
||||
object_length = object_length[0] & 0xF |
||||
|
||||
buff = buff.unpack("H*") |
||||
object_type = buff[0][0].chr |
||||
|
||||
if(object_type != "0" && object_length == 15) then |
||||
object_length = read_binary_object(fname,fd) |
||||
object_length = object_length.value |
||||
end |
||||
|
||||
case object_type |
||||
when '0' # null, false, true, fillbyte |
||||
read_binary_null_type(object_length) |
||||
when '1' # integer |
||||
read_binary_int(fname,fd,object_length) |
||||
when '2' # real |
||||
read_binary_real(fname,fd,object_length) |
||||
when '3' # date |
||||
read_binary_date(fname,fd,object_length) |
||||
when '4' # data |
||||
read_binary_data(fname,fd,object_length) |
||||
when '5' # byte string, usually utf8 encoded |
||||
read_binary_string(fname,fd,object_length) |
||||
when '6' # unicode string (utf16be) |
||||
read_binary_unicode_string(fname,fd,object_length) |
||||
when '8' |
||||
CFUid.new(read_binary_int(fname, fd, object_length).value) |
||||
when 'a' # array |
||||
read_binary_array(fname,fd,object_length) |
||||
when 'd' # dictionary |
||||
read_binary_dict(fname,fd,object_length) |
||||
end |
||||
end |
||||
protected :read_binary_object |
||||
|
||||
# Read an object type byte at position $pos, decode it and delegate to the correct reader function |
||||
def read_binary_object_at(fname,fd,pos) |
||||
position = @offsets[pos] |
||||
fd.seek(position,IO::SEEK_SET) |
||||
read_binary_object(fname,fd) |
||||
end |
||||
protected :read_binary_object_at |
||||
|
||||
# pack an +int+ of +nbytes+ with size |
||||
def Binary.pack_it_with_size(nbytes,int) |
||||
case nbytes |
||||
when 1 then [int].pack('c') |
||||
when 2 then [int].pack('n') |
||||
when 4 then [int].pack('N') |
||||
when 8 |
||||
[int >> 32, int & 0xFFFFFFFF].pack('NN') |
||||
else |
||||
raise CFFormatError.new("Don't know how to pack #{nbytes} byte integer") |
||||
end |
||||
end |
||||
|
||||
def Binary.pack_int_array_with_size(nbytes, array) |
||||
case nbytes |
||||
when 1 then array.pack('C*') |
||||
when 2 then array.pack('n*') |
||||
when 4 then array.pack('N*') |
||||
when 8 |
||||
array.map { |int| [int >> 32, int & 0xFFFFFFFF].pack('NN') }.join |
||||
else |
||||
raise CFFormatError.new("Don't know how to pack #{nbytes} byte integer") |
||||
end |
||||
end |
||||
|
||||
# calculate how many bytes are needed to save +count+ |
||||
def Binary.bytes_needed(count) |
||||
case |
||||
when count < 2**8 then 1 |
||||
when count < 2**16 then 2 |
||||
when count < 2**32 then 4 |
||||
when count < 2**64 then 8 |
||||
else |
||||
raise CFFormatError.new("Data size too large: #{count}") |
||||
end |
||||
end |
||||
|
||||
# Create a type byte for binary format as defined by apple |
||||
def Binary.type_bytes(type, length) |
||||
if length < 15 |
||||
[(type << 4) | length].pack('C') |
||||
else |
||||
bytes = [(type << 4) | 0xF] |
||||
if length <= 0xFF |
||||
bytes.push(0x10, length).pack('CCC') # 1 byte length |
||||
elsif length <= 0xFFFF |
||||
bytes.push(0x11, length).pack('CCn') # 2 byte length |
||||
elsif length <= 0xFFFFFFFF |
||||
bytes.push(0x12, length).pack('CCN') # 4 byte length |
||||
elsif length <= 0x7FFFFFFFFFFFFFFF |
||||
bytes.push(0x13, length >> 32, length & 0xFFFFFFFF).pack('CCNN') # 8 byte length |
||||
else |
||||
raise CFFormatError.new("Integer too large: #{int}") |
||||
end |
||||
end |
||||
end |
||||
|
||||
def count_object_refs(object) |
||||
case object |
||||
when CFArray |
||||
contained_refs = 0 |
||||
object.value.each do |element| |
||||
if CFArray === element || CFDictionary === element |
||||
contained_refs += count_object_refs(element) |
||||
end |
||||
end |
||||
return object.value.size + contained_refs |
||||
when CFDictionary |
||||
contained_refs = 0 |
||||
object.value.each_value do |value| |
||||
if CFArray === value || CFDictionary === value |
||||
contained_refs += count_object_refs(value) |
||||
end |
||||
end |
||||
return object.value.keys.size * 2 + contained_refs |
||||
else |
||||
return 0 |
||||
end |
||||
end |
||||
|
||||
def Binary.ascii_string?(str) |
||||
if str.respond_to?(:ascii_only?) |
||||
str.ascii_only? |
||||
else |
||||
str !~ /[\x80-\xFF]/mn |
||||
end |
||||
end |
||||
|
||||
# Uniques and transforms a string value to binary format and adds it to the object table |
||||
def string_to_binary(val) |
||||
val = val.to_s |
||||
|
||||
@unique_table[val] ||= begin |
||||
if !Binary.ascii_string?(val) |
||||
val = Binary.charset_convert(val,"UTF-8","UTF-16BE") |
||||
bdata = Binary.type_bytes(0b0110, Binary.charset_strlen(val,"UTF-16BE")) |
||||
|
||||
val.force_encoding("ASCII-8BIT") if val.respond_to?("encode") |
||||
@object_table[@written_object_count] = bdata << val |
||||
else |
||||
bdata = Binary.type_bytes(0b0101,val.bytesize) |
||||
@object_table[@written_object_count] = bdata << val |
||||
end |
||||
|
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
end |
||||
|
||||
# Codes an integer to binary format |
||||
def int_to_binary(value) |
||||
# Note: nbytes is actually an exponent. number of bytes = 2**nbytes. |
||||
nbytes = 0 |
||||
nbytes = 1 if value > 0xFF # 1 byte unsigned integer |
||||
nbytes += 1 if value > 0xFFFF # 4 byte unsigned integer |
||||
nbytes += 1 if value > 0xFFFFFFFF # 8 byte unsigned integer |
||||
nbytes += 1 if value > 0x7FFFFFFFFFFFFFFF # 8 byte unsigned integer, stored in lower half of 16 bytes |
||||
nbytes = 3 if value < 0 # signed integers always stored in 8 bytes |
||||
|
||||
Binary.type_bytes(0b0001, nbytes) << |
||||
if nbytes < 4 |
||||
[value].pack(["C", "n", "N", "q>"][nbytes]) |
||||
else # nbytes == 4 |
||||
[0,value].pack("Q>Q>") |
||||
end |
||||
end |
||||
|
||||
# Codes a real value to binary format |
||||
def real_to_binary(val) |
||||
Binary.type_bytes(0b0010,3) << [val].pack("E").reverse |
||||
end |
||||
|
||||
# Converts a numeric value to binary and adds it to the object table |
||||
def num_to_binary(value) |
||||
@object_table[@written_object_count] = |
||||
if value.is_a?(CFInteger) |
||||
int_to_binary(value.value) |
||||
else |
||||
real_to_binary(value.value) |
||||
end |
||||
|
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
|
||||
def uid_to_binary(value) |
||||
nbytes = 0 |
||||
nbytes = 1 if value > 0xFF # 1 byte integer |
||||
nbytes += 1 if value > 0xFFFF # 4 byte integer |
||||
nbytes += 1 if value > 0xFFFFFFFF # 8 byte integer |
||||
nbytes = 3 if value < 0 # 8 byte integer, since signed |
||||
|
||||
@object_table[@written_object_count] = Binary.type_bytes(0b1000, nbytes) << |
||||
if nbytes < 3 |
||||
[value].pack( |
||||
if nbytes == 0 then "C" |
||||
elsif nbytes == 1 then "n" |
||||
else "N" |
||||
end |
||||
) |
||||
else |
||||
# 64 bit signed integer; we need the higher and the lower 32 bit of the value |
||||
high_word = value >> 32 |
||||
low_word = value & 0xFFFFFFFF |
||||
[high_word,low_word].pack("NN") |
||||
end |
||||
|
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
|
||||
# Convert date value (apple format) to binary and adds it to the object table |
||||
def date_to_binary(val) |
||||
val = val.getutc.to_f - CFDate::DATE_DIFF_APPLE_UNIX # CFDate is a real, number of seconds since 01/01/2001 00:00:00 GMT |
||||
|
||||
@object_table[@written_object_count] = |
||||
(Binary.type_bytes(0b0011, 3) << [val].pack("E").reverse) |
||||
|
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
|
||||
# Convert a bool value to binary and add it to the object table |
||||
def bool_to_binary(val) |
||||
|
||||
@object_table[@written_object_count] = val ? "\x9" : "\x8" # 0x9 is 1001, type indicator for true; 0x8 is 1000, type indicator for false |
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
|
||||
# Convert data value to binary format and add it to the object table |
||||
def data_to_binary(val) |
||||
@object_table[@written_object_count] = |
||||
(Binary.type_bytes(0b0100, val.bytesize) << val) |
||||
|
||||
@written_object_count += 1 |
||||
@written_object_count - 1 |
||||
end |
||||
|
||||
# Convert array to binary format and add it to the object table |
||||
def array_to_binary(val) |
||||
saved_object_count = @written_object_count |
||||
@written_object_count += 1 |
||||
#@object_refs += val.value.size |
||||
|
||||
values = val.value.map { |v| v.to_binary(self) } |
||||
bdata = Binary.type_bytes(0b1010, val.value.size) << |
||||
Binary.pack_int_array_with_size(object_ref_size(@object_refs), |
||||
values) |
||||
|
||||
@object_table[saved_object_count] = bdata |
||||
saved_object_count |
||||
end |
||||
|
||||
# Convert dictionary to binary format and add it to the object table |
||||
def dict_to_binary(val) |
||||
saved_object_count = @written_object_count |
||||
@written_object_count += 1 |
||||
|
||||
#@object_refs += val.value.keys.size * 2 |
||||
|
||||
keys_and_values = val.value.keys.map { |k| CFString.new(k).to_binary(self) } |
||||
keys_and_values.concat(val.value.values.map { |v| v.to_binary(self) }) |
||||
|
||||
bdata = Binary.type_bytes(0b1101,val.value.size) << |
||||
Binary.pack_int_array_with_size(object_ref_size(@object_refs), keys_and_values) |
||||
|
||||
@object_table[saved_object_count] = bdata |
||||
return saved_object_count |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,26 +0,0 @@
@@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
# Exceptions used: |
||||
# CFPlistError:: General base exception |
||||
# CFFormatError:: Format error |
||||
# CFTypeError:: Type error |
||||
# |
||||
# Easy and simple :-) |
||||
# |
||||
# Author:: Christian Kruse (mailto:cjk@wwwtech.de) |
||||
# Copyright:: Copyright (c) 2010 |
||||
# License:: MIT License |
||||
|
||||
# general plist error. All exceptions thrown are derived from this class. |
||||
class CFPlistError < StandardError |
||||
end |
||||
|
||||
# Exception thrown when format errors occur |
||||
class CFFormatError < CFPlistError |
||||
end |
||||
|
||||
# Exception thrown when type errors occur |
||||
class CFTypeError < CFPlistError |
||||
end |
||||
|
||||
# eof |
@ -1,449 +0,0 @@
@@ -1,449 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'kconv' |
||||
require 'date' |
||||
require 'time' |
||||
|
||||
# |
||||
# CFPropertyList implementation |
||||
# |
||||
# class to read, manipulate and write both XML and binary property list |
||||
# files (plist(5)) as defined by Apple. Have a look at CFPropertyList::List |
||||
# for more documentation. |
||||
# |
||||
# == Example |
||||
# require 'cfpropertylist' |
||||
# |
||||
# # create a arbitrary data structure of basic data types |
||||
# data = { |
||||
# 'name' => 'John Doe', |
||||
# 'missing' => true, |
||||
# 'last_seen' => Time.now, |
||||
# 'friends' => ['Jane Doe','Julian Doe'], |
||||
# 'likes' => { |
||||
# 'me' => false |
||||
# } |
||||
# } |
||||
# |
||||
# # create CFPropertyList::List object |
||||
# plist = CFPropertyList::List.new |
||||
# |
||||
# # call CFPropertyList.guess() to create corresponding CFType values |
||||
# # pass in optional :convert_unknown_to_string => true to convert things like symbols into strings. |
||||
# plist.value = CFPropertyList.guess(data) |
||||
# |
||||
# # write plist to file |
||||
# plist.save("example.plist", CFPropertyList::List::FORMAT_BINARY) |
||||
# |
||||
# # … later, read it again |
||||
# plist = CFPropertyList::List.new(:file => "example.plist") |
||||
# data = CFPropertyList.native_types(plist.value) |
||||
# |
||||
# Author:: Christian Kruse (mailto:cjk@wwwtech.de) |
||||
# Copyright:: Copyright (c) 2010 |
||||
# License:: MIT License |
||||
module CFPropertyList |
||||
class << self |
||||
attr_accessor :xml_parser_interface |
||||
end |
||||
|
||||
# interface class for PList parsers |
||||
class ParserInterface |
||||
# load a plist |
||||
def load(opts={}) |
||||
return "" |
||||
end |
||||
|
||||
# convert a plist to string |
||||
def to_str(opts={}) |
||||
return true |
||||
end |
||||
end |
||||
|
||||
class XMLParserInterface < ParserInterface |
||||
def new_node(name) |
||||
end |
||||
|
||||
def new_text(val) |
||||
end |
||||
|
||||
def append_node(parent, child) |
||||
end |
||||
end |
||||
end |
||||
|
||||
dirname = File.dirname(__FILE__) |
||||
require dirname + '/rbCFPlistError.rb' |
||||
require dirname + '/rbCFTypes.rb' |
||||
require dirname + '/rbBinaryCFPropertyList.rb' |
||||
require dirname + '/rbPlainCFPropertyList.rb' |
||||
|
||||
begin |
||||
require dirname + '/rbLibXMLParser.rb' |
||||
temp = LibXML::XML::Parser::Options::NOBLANKS # check if we have a version with parser options |
||||
temp = false # avoid a warning |
||||
try_nokogiri = false |
||||
CFPropertyList.xml_parser_interface = CFPropertyList::LibXMLParser |
||||
rescue LoadError, NameError |
||||
try_nokogiri = true |
||||
end |
||||
|
||||
if try_nokogiri then |
||||
begin |
||||
require dirname + '/rbNokogiriParser.rb' |
||||
CFPropertyList.xml_parser_interface = CFPropertyList::NokogiriXMLParser |
||||
rescue LoadError |
||||
require dirname + '/rbREXMLParser.rb' |
||||
CFPropertyList.xml_parser_interface = CFPropertyList::ReXMLParser |
||||
end |
||||
end |
||||
|
||||
|
||||
module CFPropertyList |
||||
# Create CFType hierarchy by guessing the correct CFType, e.g. |
||||
# |
||||
# x = { |
||||
# 'a' => ['b','c','d'] |
||||
# } |
||||
# cftypes = CFPropertyList.guess(x) |
||||
# |
||||
# pass optional options hash. Only possible value actually: |
||||
# +convert_unknown_to_string+:: Convert unknown objects to string calling to_str() |
||||
# +converter_method+:: Convert unknown objects to known objects calling +method_name+ |
||||
# |
||||
# cftypes = CFPropertyList.guess(x,:convert_unknown_to_string => true,:converter_method => :to_hash, :converter_with_opts => true) |
||||
def guess(object, options = {}) |
||||
case object |
||||
when Integer then CFInteger.new(object) |
||||
when UidFixnum then CFUid.new(object) |
||||
when Float then CFReal.new(object) |
||||
when TrueClass, FalseClass then CFBoolean.new(object) |
||||
|
||||
when Blob |
||||
CFData.new(object, CFData::DATA_RAW) |
||||
|
||||
when String, Symbol |
||||
CFString.new(object.to_s) |
||||
|
||||
when Time, DateTime, Date |
||||
CFDate.new(object) |
||||
|
||||
when Array, Enumerator |
||||
ary = Array.new |
||||
object.each do |o| |
||||
ary.push CFPropertyList.guess(o, options) |
||||
end |
||||
CFArray.new(ary) |
||||
|
||||
when Hash |
||||
hsh = Hash.new |
||||
object.each_pair do |k,v| |
||||
k = k.to_s if k.is_a?(Symbol) |
||||
hsh[k] = CFPropertyList.guess(v, options) |
||||
end |
||||
CFDictionary.new(hsh) |
||||
else |
||||
case |
||||
when Object.const_defined?('BigDecimal') && object.is_a?(BigDecimal) |
||||
CFReal.new(object) |
||||
when object.respond_to?(:read) |
||||
raw_data = object.read |
||||
# treat the data as a bytestring (ASCII-8BIT) if Ruby supports it. Do this by forcing |
||||
# the encoding, on the assumption that the bytes were read correctly, and just tagged with |
||||
# an inappropriate encoding, rather than transcoding. |
||||
raw_data.force_encoding(Encoding::ASCII_8BIT) if raw_data.respond_to?(:force_encoding) |
||||
CFData.new(raw_data, CFData::DATA_RAW) |
||||
when options[:converter_method] && object.respond_to?(options[:converter_method]) |
||||
if options[:converter_with_opts] |
||||
CFPropertyList.guess(object.send(options[:converter_method],options),options) |
||||
else |
||||
CFPropertyList.guess(object.send(options[:converter_method]),options) |
||||
end |
||||
when options[:convert_unknown_to_string] |
||||
CFString.new(object.to_s) |
||||
else |
||||
raise CFTypeError.new("Unknown class #{object.class.to_s}. Try using :convert_unknown_to_string if you want to use unknown object types!") |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Converts a CFType hiercharchy to native Ruby types |
||||
def native_types(object,keys_as_symbols=false) |
||||
return if object.nil? |
||||
|
||||
if(object.is_a?(CFDate) || object.is_a?(CFString) || object.is_a?(CFInteger) || object.is_a?(CFReal) || object.is_a?(CFBoolean)) || object.is_a?(CFUid) then |
||||
return object.value |
||||
elsif(object.is_a?(CFData)) then |
||||
return CFPropertyList::Blob.new(object.decoded_value) |
||||
elsif(object.is_a?(CFArray)) then |
||||
ary = [] |
||||
object.value.each do |
||||
|v| |
||||
ary.push CFPropertyList.native_types(v) |
||||
end |
||||
|
||||
return ary |
||||
elsif(object.is_a?(CFDictionary)) then |
||||
hsh = {} |
||||
object.value.each_pair do |
||||
|k,v| |
||||
k = k.to_sym if keys_as_symbols |
||||
hsh[k] = CFPropertyList.native_types(v) |
||||
end |
||||
|
||||
return hsh |
||||
end |
||||
end |
||||
|
||||
module_function :guess, :native_types |
||||
|
||||
# Class representing a CFPropertyList. Instantiate with #new |
||||
class List |
||||
# Format constant for binary format |
||||
FORMAT_BINARY = 1 |
||||
|
||||
# Format constant for XML format |
||||
FORMAT_XML = 2 |
||||
|
||||
# Format constant for the old plain format |
||||
FORMAT_PLAIN = 3 |
||||
|
||||
# Format constant for automatic format recognizing |
||||
FORMAT_AUTO = 0 |
||||
|
||||
@@parsers = [Binary, CFPropertyList.xml_parser_interface, PlainParser] |
||||
|
||||
# Path of PropertyList |
||||
attr_accessor :filename |
||||
# the original format of the PropertyList |
||||
attr_accessor :format |
||||
# the root value in the plist file |
||||
attr_accessor :value |
||||
# default value for XML generation; if true generate formatted XML |
||||
attr_accessor :formatted |
||||
|
||||
# initialize a new CFPropertyList, arguments are: |
||||
# |
||||
# :file:: Parse a file |
||||
# :format:: Format is one of FORMAT_BINARY or FORMAT_XML. Defaults to FORMAT_AUTO |
||||
# :data:: Parse a string |
||||
# |
||||
# All arguments are optional |
||||
def initialize(opts={}) |
||||
@filename = opts[:file] |
||||
@format = opts[:format] || FORMAT_AUTO |
||||
@data = opts[:data] |
||||
@formatted = opts[:formatted] |
||||
|
||||
load(@filename) unless @filename.nil? |
||||
load_str(@data) unless @data.nil? |
||||
end |
||||
|
||||
# returns a list of registered parsers |
||||
def self.parsers |
||||
@@parsers |
||||
end |
||||
|
||||
# set a list of parsers |
||||
def self.parsers=(val) |
||||
@@parsers = val |
||||
end |
||||
|
||||
# Load an XML PropertyList |
||||
# filename = nil:: The filename to read from; if nil, read from the file defined by instance variable +filename+ |
||||
def load_xml(filename=nil) |
||||
load(filename,List::FORMAT_XML) |
||||
end |
||||
|
||||
# read a binary plist file |
||||
# filename = nil:: The filename to read from; if nil, read from the file defined by instance variable +filename+ |
||||
def load_binary(filename=nil) |
||||
load(filename,List::FORMAT_BINARY) |
||||
end |
||||
|
||||
# read a plain plist file |
||||
# filename = nil:: The filename to read from; if nil, read from the file defined by instance variable +filename+ |
||||
def load_plain(filename=nil) |
||||
load(filename,List::FORMAT_PLAIN) |
||||
end |
||||
|
||||
# load a plist from a XML string |
||||
# str:: The string containing the plist |
||||
def load_xml_str(str=nil) |
||||
load_str(str,List::FORMAT_XML) |
||||
end |
||||
|
||||
# load a plist from a binary string |
||||
# str:: The string containing the plist |
||||
def load_binary_str(str=nil) |
||||
load_str(str,List::FORMAT_BINARY) |
||||
end |
||||
|
||||
# load a plist from a plain string |
||||
# str:: The string containing the plist |
||||
def load_plain_str(str=nil) |
||||
load_str(str,List::FORMAT_PLAIN) |
||||
end |
||||
|
||||
# load a plist from a string |
||||
# str = nil:: The string containing the plist |
||||
# format = nil:: The format of the plist |
||||
def load_str(str=nil,format=nil) |
||||
str = @data if str.nil? |
||||
format = @format if format.nil? |
||||
|
||||
@value = {} |
||||
case format |
||||
when List::FORMAT_BINARY, List::FORMAT_XML, List::FORMAT_PLAIN then |
||||
prsr = @@parsers[format-1].new |
||||
@value = prsr.load({:data => str}) |
||||
|
||||
when List::FORMAT_AUTO then # what we now do is ugly, but neccessary to recognize the file format |
||||
filetype = str[0..5] |
||||
version = str[6..7] |
||||
|
||||
prsr = nil |
||||
|
||||
if filetype == "bplist" then |
||||
raise CFFormatError.new("Wrong file version #{version}") unless version == "00" |
||||
prsr = Binary.new |
||||
@format = List::FORMAT_BINARY |
||||
else |
||||
if str =~ /^<(\?xml|!DOCTYPE|plist)/ |
||||
prsr = CFPropertyList.xml_parser_interface.new |
||||
@format = List::FORMAT_XML |
||||
else |
||||
prsr = PlainParser.new |
||||
@format = List::FORMAT_PLAIN |
||||
end |
||||
end |
||||
|
||||
@value = prsr.load({:data => str}) |
||||
end |
||||
end |
||||
|
||||
# Read a plist file |
||||
# file = nil:: The filename of the file to read. If nil, use +filename+ instance variable |
||||
# format = nil:: The format of the plist file. Auto-detect if nil |
||||
def load(file=nil,format=nil) |
||||
file = @filename if file.nil? |
||||
format = @format if format.nil? |
||||
@value = {} |
||||
|
||||
raise IOError.new("File #{file} not readable!") unless File.readable? file |
||||
|
||||
case format |
||||
when List::FORMAT_BINARY, List::FORMAT_XML, List::FORMAT_PLAIN then |
||||
prsr = @@parsers[format-1].new |
||||
@value = prsr.load({:file => file}) |
||||
|
||||
when List::FORMAT_AUTO then # what we now do is ugly, but neccessary to recognize the file format |
||||
magic_number = IO.read(file,12) |
||||
raise IOError.new("File #{file} is empty.") unless magic_number |
||||
filetype = magic_number[0..5] |
||||
version = magic_number[6..7] |
||||
|
||||
prsr = nil |
||||
if filetype == "bplist" then |
||||
raise CFFormatError.new("Wrong file version #{version}") unless version == "00" |
||||
prsr = Binary.new |
||||
@format = List::FORMAT_BINARY |
||||
else |
||||
if magic_number =~ /^<(\?xml|!DOCTYPE|plist)/ |
||||
prsr = CFPropertyList.xml_parser_interface.new |
||||
@format = List::FORMAT_XML |
||||
else |
||||
prsr = PlainParser.new |
||||
@format = List::FORMAT_PLAIN |
||||
end |
||||
end |
||||
|
||||
@value = prsr.load({:file => file}) |
||||
end |
||||
|
||||
raise CFFormatError.new("Invalid format or parser error!") if @value.nil? |
||||
end |
||||
|
||||
# Serialize CFPropertyList object to specified format and write it to file |
||||
# file = nil:: The filename of the file to write to. Uses +filename+ instance variable if nil |
||||
# format = nil:: The format to save in. Uses +format+ instance variable if nil |
||||
def save(file=nil,format=nil,opts={}) |
||||
format = @format if format.nil? |
||||
file = @filename if file.nil? |
||||
|
||||
if format != FORMAT_BINARY && format != FORMAT_XML && format != FORMAT_PLAIN |
||||
raise CFFormatError.new("Format #{format} not supported, use List::FORMAT_BINARY or List::FORMAT_XML") |
||||
end |
||||
|
||||
if(!File.exists?(file)) then |
||||
raise IOError.new("File #{file} not writable!") unless File.writable?(File.dirname(file)) |
||||
elsif(!File.writable?(file)) then |
||||
raise IOError.new("File #{file} not writable!") |
||||
end |
||||
|
||||
opts[:root] = @value |
||||
opts[:formatted] = @formatted unless opts.has_key?(:formatted) |
||||
|
||||
prsr = @@parsers[format-1].new |
||||
|
||||
content = prsr.to_str(opts) |
||||
|
||||
File.open(file, 'wb') { |
||||
|fd| |
||||
fd.write content |
||||
} |
||||
end |
||||
|
||||
# convert plist to string |
||||
# format = List::FORMAT_BINARY:: The format to save the plist |
||||
# opts={}:: Pass parser options |
||||
def to_str(format=List::FORMAT_BINARY,opts={}) |
||||
if format != FORMAT_BINARY && format != FORMAT_XML && format != FORMAT_PLAIN |
||||
raise CFFormatError.new("Format #{format} not supported, use List::FORMAT_BINARY or List::FORMAT_XML") |
||||
end |
||||
|
||||
prsr = @@parsers[format-1].new |
||||
|
||||
opts[:root] = @value |
||||
opts[:formatted] = @formatted unless opts.has_key?(:formatted) |
||||
|
||||
return prsr.to_str(opts) |
||||
end |
||||
end |
||||
end |
||||
|
||||
|
||||
class Array |
||||
# convert an array to plist format |
||||
def to_plist(options={}) |
||||
options[:plist_format] ||= CFPropertyList::List::FORMAT_BINARY |
||||
|
||||
plist = CFPropertyList::List.new |
||||
plist.value = CFPropertyList.guess(self, options) |
||||
plist.to_str(options[:plist_format], options) |
||||
end |
||||
end |
||||
|
||||
class Enumerator |
||||
# convert an array to plist format |
||||
def to_plist(options={}) |
||||
options[:plist_format] ||= CFPropertyList::List::FORMAT_BINARY |
||||
|
||||
plist = CFPropertyList::List.new |
||||
plist.value = CFPropertyList.guess(self, options) |
||||
plist.to_str(options[:plist_format], options) |
||||
end |
||||
end |
||||
|
||||
class Hash |
||||
# convert a hash to plist format |
||||
def to_plist(options={}) |
||||
options[:plist_format] ||= CFPropertyList::List::FORMAT_BINARY |
||||
|
||||
plist = CFPropertyList::List.new |
||||
plist.value = CFPropertyList.guess(self, options) |
||||
plist.to_str(options[:plist_format], options) |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,349 +0,0 @@
@@ -1,349 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
# CFTypes, e.g. CFString, CFInteger |
||||
# needed to create unambiguous plists |
||||
# |
||||
# Author:: Christian Kruse (mailto:cjk@wwwtech.de) |
||||
# Copyright:: Copyright (c) 2009 |
||||
# License:: MIT License |
||||
|
||||
require 'base64' |
||||
|
||||
module CFPropertyList |
||||
## |
||||
# Blob is intended to distinguish between a Ruby String instance that should |
||||
# be converted to a CFString type and a Ruby String instance that should be |
||||
# converted to a CFData type |
||||
class Blob < String |
||||
end |
||||
|
||||
## |
||||
# UidFixnum is intended to distinguish between a Ruby Integer |
||||
# instance that should be converted to a CFInteger/CFReal type and a |
||||
# Ruby Integer instance that should be converted to a CFUid type. |
||||
class UidFixnum < Integer |
||||
end |
||||
|
||||
# This class defines the base class for all CFType classes |
||||
# |
||||
class CFType |
||||
# value of the type |
||||
attr_accessor :value |
||||
|
||||
def initialize(value=nil) |
||||
@value = value |
||||
end |
||||
|
||||
def to_xml(parser) |
||||
end |
||||
|
||||
def to_binary(bplist) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
end |
||||
end |
||||
|
||||
# This class holds string values, both, UTF-8 and UTF-16BE |
||||
# It will convert the value to UTF-16BE if necessary (i.e. if non-ascii char contained) |
||||
class CFString < CFType |
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('string') |
||||
n = parser.append_node(n, parser.new_text(@value)) unless @value.nil? |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.string_to_binary(@value); |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
if @value =~ /^\w+$/ |
||||
@value |
||||
else |
||||
quoted |
||||
end |
||||
end |
||||
|
||||
def quoted |
||||
str = '"' |
||||
@value.each_char do |c| |
||||
str << case c |
||||
when '"' |
||||
'\\"' |
||||
when '\\' |
||||
'\\' |
||||
when "\a" |
||||
"\\a" |
||||
when "\b" |
||||
"\\b" |
||||
when "\f" |
||||
"\\f" |
||||
when "\n" |
||||
"\n" |
||||
when "\v" |
||||
"\\v" |
||||
when "\r" |
||||
"\\r" |
||||
when "\t" |
||||
"\\t" |
||||
else |
||||
c |
||||
end |
||||
end |
||||
|
||||
str << '"' |
||||
end |
||||
end |
||||
|
||||
# This class holds integer/fixnum values |
||||
class CFInteger < CFType |
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('integer') |
||||
n = parser.append_node(n, parser.new_text(@value.to_s)) |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.num_to_binary(self) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
@value.to_s |
||||
end |
||||
end |
||||
|
||||
# This class holds float values |
||||
class CFReal < CFType |
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('real') |
||||
n = parser.append_node(n, parser.new_text(@value.to_s)) |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.num_to_binary(self) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
@value.to_s |
||||
end |
||||
end |
||||
|
||||
# This class holds Time values. While Apple uses seconds since 2001, |
||||
# the rest of the world uses seconds since 1970. So if you access value |
||||
# directly, you get the Time class. If you access via get_value you either |
||||
# geht the timestamp or the Apple timestamp |
||||
class CFDate < CFType |
||||
TIMESTAMP_APPLE = 0 |
||||
TIMESTAMP_UNIX = 1 |
||||
DATE_DIFF_APPLE_UNIX = 978307200 |
||||
|
||||
# create a XML date strimg from a time object |
||||
def CFDate.date_string(val) |
||||
# 2009-05-13T20:23:43Z |
||||
val.getutc.strftime("%Y-%m-%dT%H:%M:%SZ") |
||||
end |
||||
|
||||
# parse a XML date string |
||||
def CFDate.parse_date(val) |
||||
# 2009-05-13T20:23:43Z |
||||
val =~ %r{^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})Z$} |
||||
year,month,day,hour,min,sec = $1, $2, $3, $4, $5, $6 |
||||
return Time.utc(year,month,day,hour,min,sec).getlocal |
||||
end |
||||
|
||||
# set value to defined state |
||||
def initialize(value = nil,format=CFDate::TIMESTAMP_UNIX) |
||||
if(value.is_a?(Time) || value.nil?) then |
||||
@value = value.nil? ? Time.now : value |
||||
elsif value.instance_of? Date |
||||
@value = Time.utc(value.year, value.month, value.day, 0, 0, 0) |
||||
elsif value.instance_of? DateTime |
||||
@value = value.to_time.utc |
||||
else |
||||
set_value(value,format) |
||||
end |
||||
end |
||||
|
||||
# set value with timestamp, either Apple or UNIX |
||||
def set_value(value,format=CFDate::TIMESTAMP_UNIX) |
||||
if(format == CFDate::TIMESTAMP_UNIX) then |
||||
@value = Time.at(value) |
||||
else |
||||
@value = Time.at(value + CFDate::DATE_DIFF_APPLE_UNIX) |
||||
end |
||||
end |
||||
|
||||
# get timestamp, either UNIX or Apple timestamp |
||||
def get_value(format=CFDate::TIMESTAMP_UNIX) |
||||
if(format == CFDate::TIMESTAMP_UNIX) then |
||||
@value.to_i |
||||
else |
||||
@value.to_f - CFDate::DATE_DIFF_APPLE_UNIX |
||||
end |
||||
end |
||||
|
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('date') |
||||
n = parser.append_node(n, parser.new_text(CFDate::date_string(@value))) |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.date_to_binary(@value) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
@value.strftime("%Y-%m-%d %H:%M:%S %z") |
||||
end |
||||
end |
||||
|
||||
# This class contains a boolean value |
||||
class CFBoolean < CFType |
||||
# convert to XML |
||||
def to_xml(parser) |
||||
parser.new_node(@value ? 'true' : 'false') |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.bool_to_binary(@value); |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
@value ? "true" : "false" |
||||
end |
||||
end |
||||
|
||||
# This class contains binary data values |
||||
class CFData < CFType |
||||
# Base64 encoded data |
||||
DATA_BASE64 = 0 |
||||
# Raw data |
||||
DATA_RAW = 1 |
||||
|
||||
# set value to defined state, either base64 encoded or raw |
||||
def initialize(value=nil,format=DATA_BASE64) |
||||
if(format == DATA_RAW) |
||||
@raw_value = value |
||||
else |
||||
@value = value |
||||
end |
||||
end |
||||
|
||||
# get base64 encoded value |
||||
def encoded_value |
||||
@value ||= "\n#{Base64.encode64(@raw_value).gsub("\n", '').scan(/.{1,76}/).join("\n")}\n" |
||||
end |
||||
|
||||
# get base64 decoded value |
||||
def decoded_value |
||||
@raw_value ||= Blob.new(Base64.decode64(@value)) |
||||
end |
||||
|
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('data') |
||||
n = parser.append_node(n, parser.new_text(encoded_value())) |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.data_to_binary(decoded_value()) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
"<" + decoded_value.unpack("H*").join("") + ">" |
||||
end |
||||
end |
||||
|
||||
# This class contains an array of values |
||||
class CFArray < CFType |
||||
# create a new array CFType |
||||
def initialize(val=[]) |
||||
@value = val |
||||
end |
||||
|
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('array') |
||||
@value.each do |v| |
||||
n = parser.append_node(n, v.to_xml(parser)) |
||||
end |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.array_to_binary(self) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
ary = @value.map { |v| v.to_plain(plist) } |
||||
"( " + ary.join(", ") + " )" |
||||
end |
||||
end |
||||
|
||||
# this class contains a hash of values |
||||
class CFDictionary < CFType |
||||
# Create new CFDictonary type. |
||||
def initialize(value={}) |
||||
@value = value |
||||
end |
||||
|
||||
# convert to XML |
||||
def to_xml(parser) |
||||
n = parser.new_node('dict') |
||||
@value.each_pair do |key, value| |
||||
k = parser.append_node(parser.new_node('key'), parser.new_text(key.to_s)) |
||||
n = parser.append_node(n, k) |
||||
n = parser.append_node(n, value.to_xml(parser)) |
||||
end |
||||
n |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.dict_to_binary(self) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
str = "{ " |
||||
cfstr = CFString.new() |
||||
|
||||
@value.each do |k,v| |
||||
cfstr.value = k |
||||
str << cfstr.to_plain(plist) + " = " + v.to_plain(plist) + "; " |
||||
end |
||||
|
||||
str << "}" |
||||
end |
||||
end |
||||
|
||||
class CFUid < CFType |
||||
def to_xml(parser) |
||||
CFDictionary.new({'CF$UID' => CFInteger.new(@value)}).to_xml(parser) |
||||
end |
||||
|
||||
# convert to binary |
||||
def to_binary(bplist) |
||||
bplist.uid_to_binary(@value) |
||||
end |
||||
|
||||
def to_plain(plist) |
||||
CFDictionary.new({'CF$UID' => CFInteger.new(@value)}).to_plain(plist) |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,149 +0,0 @@
@@ -1,149 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'libxml' |
||||
|
||||
module CFPropertyList |
||||
# XML parser |
||||
class LibXMLParser < XMLParserInterface |
||||
LibXML::XML::Error.set_handler(&LibXML::XML::Error::QUIET_HANDLER) |
||||
PARSER_OPTIONS = LibXML::XML::Parser::Options::NOBLANKS|LibXML::XML::Parser::Options::NONET |
||||
# read a XML file |
||||
# opts:: |
||||
# * :file - The filename of the file to load |
||||
# * :data - The data to parse |
||||
def load(opts) |
||||
doc = nil |
||||
|
||||
if(opts.has_key?(:file)) then |
||||
doc = LibXML::XML::Document.file(opts[:file],:options => PARSER_OPTIONS) |
||||
else |
||||
doc = LibXML::XML::Document.string(opts[:data],:options => PARSER_OPTIONS) |
||||
end |
||||
|
||||
if doc |
||||
root = doc.root.first |
||||
return import_xml(root) |
||||
end |
||||
rescue LibXML::XML::Error => e |
||||
raise CFFormatError.new('invalid XML: ' + e.message) |
||||
end |
||||
|
||||
# serialize CFPropertyList object to XML |
||||
# opts = {}:: Specify options: :formatted - Use indention and line breaks |
||||
def to_str(opts={}) |
||||
doc = LibXML::XML::Document.new |
||||
|
||||
doc.root = LibXML::XML::Node.new('plist') |
||||
doc.encoding = LibXML::XML::Encoding::UTF_8 |
||||
|
||||
doc.root['version'] = '1.0' |
||||
doc.root << opts[:root].to_xml(self) |
||||
|
||||
# ugly hack, but there's no other possibility I know |
||||
str = doc.to_s(:indent => opts[:formatted]) |
||||
str1 = String.new |
||||
first = false |
||||
str.each_line do |line| |
||||
str1 << line |
||||
unless(first) then |
||||
str1 << "<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n" if line =~ /^\s*<\?xml/ |
||||
end |
||||
|
||||
first = true |
||||
end |
||||
|
||||
str1.force_encoding('UTF-8') if str1.respond_to?(:force_encoding) |
||||
return str1 |
||||
end |
||||
|
||||
def new_node(name) |
||||
LibXML::XML::Node.new(name) |
||||
end |
||||
|
||||
def new_text(val) |
||||
LibXML::XML::Node.new_text(val) |
||||
end |
||||
|
||||
def append_node(parent, child) |
||||
parent << child |
||||
end |
||||
|
||||
protected |
||||
|
||||
# get the value of a DOM node |
||||
def get_value(n) |
||||
content = if n.children? |
||||
n.first.content |
||||
else |
||||
n.content |
||||
end |
||||
|
||||
content.force_encoding('UTF-8') if content.respond_to?(:force_encoding) |
||||
content |
||||
end |
||||
|
||||
# import the XML values |
||||
def import_xml(node) |
||||
ret = nil |
||||
|
||||
case node.name |
||||
when 'dict' |
||||
hsh = Hash.new |
||||
key = nil |
||||
|
||||
if node.children? then |
||||
node.children.each do |n| |
||||
next if n.text? # avoid a bug of libxml |
||||
next if n.comment? |
||||
|
||||
if n.name == "key" then |
||||
key = get_value(n) |
||||
else |
||||
raise CFFormatError.new("Format error!") if key.nil? |
||||
hsh[key] = import_xml(n) |
||||
key = nil |
||||
end |
||||
end |
||||
end |
||||
|
||||
if hsh['CF$UID'] and hsh.keys.length == 1 |
||||
ret = CFUid.new(hsh['CF$UID'].value) |
||||
else |
||||
ret = CFDictionary.new(hsh) |
||||
end |
||||
|
||||
when 'array' |
||||
ary = Array.new |
||||
|
||||
if node.children? then |
||||
node.children.each do |n| |
||||
next if n.text? # avoid a bug of libxml |
||||
next if n.comment? |
||||
ary.push import_xml(n) |
||||
end |
||||
end |
||||
|
||||
ret = CFArray.new(ary) |
||||
|
||||
when 'true' |
||||
ret = CFBoolean.new(true) |
||||
when 'false' |
||||
ret = CFBoolean.new(false) |
||||
when 'real' |
||||
ret = CFReal.new(get_value(node).to_f) |
||||
when 'integer' |
||||
ret = CFInteger.new(get_value(node).to_i) |
||||
when 'string' |
||||
ret = CFString.new(get_value(node)) |
||||
when 'data' |
||||
ret = CFData.new(get_value(node)) |
||||
when 'date' |
||||
ret = CFDate.new(CFDate.parse_date(get_value(node))) |
||||
end |
||||
|
||||
return ret |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,152 +0,0 @@
@@ -1,152 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'nokogiri' |
||||
|
||||
module CFPropertyList |
||||
# XML parser |
||||
class NokogiriXMLParser < ParserInterface |
||||
PARSER_OPTIONS = Nokogiri::XML::ParseOptions::NOBLANKS|Nokogiri::XML::ParseOptions::NONET |
||||
# read a XML file |
||||
# opts:: |
||||
# * :file - The filename of the file to load |
||||
# * :data - The data to parse |
||||
def load(opts) |
||||
doc = nil |
||||
if(opts.has_key?(:file)) then |
||||
File.open(opts[:file], "rb") { |fd| doc = Nokogiri::XML::Document.parse(fd, nil, nil, PARSER_OPTIONS) } |
||||
else |
||||
doc = Nokogiri::XML::Document.parse(opts[:data], nil, nil, PARSER_OPTIONS) |
||||
end |
||||
|
||||
if doc |
||||
root = doc.root.children.first |
||||
return import_xml(root) |
||||
end |
||||
rescue Nokogiri::XML::SyntaxError => e |
||||
raise CFFormatError.new('invalid XML: ' + e.message) |
||||
end |
||||
|
||||
# serialize CFPropertyList object to XML |
||||
# opts = {}:: Specify options: :formatted - Use indention and line breaks |
||||
def to_str(opts={}) |
||||
doc = Nokogiri::XML::Document.new |
||||
@doc = doc |
||||
|
||||
doc.root = doc.create_element 'plist', :version => '1.0' |
||||
doc.encoding = 'UTF-8' |
||||
|
||||
doc.root << opts[:root].to_xml(self) |
||||
|
||||
# ugly hack, but there's no other possibility I know |
||||
s_opts = Nokogiri::XML::Node::SaveOptions::AS_XML |
||||
s_opts |= Nokogiri::XML::Node::SaveOptions::FORMAT if opts[:formatted] |
||||
|
||||
str = doc.serialize(:save_with => s_opts) |
||||
str1 = String.new |
||||
first = false |
||||
str.each_line do |line| |
||||
str1 << line |
||||
unless(first) then |
||||
str1 << "<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n" if line =~ /^\s*<\?xml/ |
||||
end |
||||
|
||||
first = true |
||||
end |
||||
|
||||
str1.force_encoding('UTF-8') if str1.respond_to?(:force_encoding) |
||||
return str1 |
||||
end |
||||
|
||||
def new_node(name) |
||||
@doc.create_element name |
||||
end |
||||
|
||||
def new_text(val) |
||||
@doc.create_text_node val |
||||
end |
||||
|
||||
def append_node(parent, child) |
||||
parent << child |
||||
end |
||||
|
||||
protected |
||||
|
||||
# get the value of a DOM node |
||||
def get_value(n) |
||||
content = if n.children.empty? |
||||
n.content |
||||
else |
||||
n.children.first.content |
||||
end |
||||
|
||||
content.force_encoding('UTF-8') if content.respond_to?(:force_encoding) |
||||
content |
||||
end |
||||
|
||||
# import the XML values |
||||
def import_xml(node) |
||||
ret = nil |
||||
|
||||
case node.name |
||||
when 'dict' |
||||
hsh = Hash.new |
||||
key = nil |
||||
children = node.children |
||||
|
||||
unless children.empty? then |
||||
children.each do |n| |
||||
next if n.text? # avoid a bug of libxml |
||||
next if n.comment? |
||||
|
||||
if n.name == "key" then |
||||
key = get_value(n) |
||||
else |
||||
raise CFFormatError.new("Format error!") if key.nil? |
||||
hsh[key] = import_xml(n) |
||||
key = nil |
||||
end |
||||
end |
||||
end |
||||
|
||||
if hsh['CF$UID'] and hsh.keys.length == 1 |
||||
ret = CFUid.new(hsh['CF$UID'].value) |
||||
else |
||||
ret = CFDictionary.new(hsh) |
||||
end |
||||
|
||||
when 'array' |
||||
ary = Array.new |
||||
children = node.children |
||||
|
||||
unless children.empty? then |
||||
children.each do |n| |
||||
next if n.text? # avoid a bug of libxml |
||||
next if n.comment? |
||||
ary.push import_xml(n) |
||||
end |
||||
end |
||||
|
||||
ret = CFArray.new(ary) |
||||
|
||||
when 'true' |
||||
ret = CFBoolean.new(true) |
||||
when 'false' |
||||
ret = CFBoolean.new(false) |
||||
when 'real' |
||||
ret = CFReal.new(get_value(node).to_f) |
||||
when 'integer' |
||||
ret = CFInteger.new(get_value(node).to_i) |
||||
when 'string' |
||||
ret = CFString.new(get_value(node)) |
||||
when 'data' |
||||
ret = CFData.new(get_value(node)) |
||||
when 'date' |
||||
ret = CFDate.new(CFDate.parse_date(get_value(node))) |
||||
end |
||||
|
||||
return ret |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,199 +0,0 @@
@@ -1,199 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'strscan' |
||||
|
||||
module CFPropertyList |
||||
# XML parser |
||||
class PlainParser < XMLParserInterface |
||||
# read a XML file |
||||
# opts:: |
||||
# * :file - The filename of the file to load |
||||
# * :data - The data to parse |
||||
def load(opts) |
||||
@doc = nil |
||||
|
||||
if(opts.has_key?(:file)) then |
||||
File.open(opts[:file], :external_encoding => "ASCII") do |fd| |
||||
@doc = StringScanner.new(fd.read) |
||||
end |
||||
else |
||||
@doc = StringScanner.new(opts[:data]) |
||||
end |
||||
|
||||
if @doc |
||||
root = import_plain |
||||
raise CFFormatError.new('content after root object') unless @doc.eos? |
||||
|
||||
return root |
||||
end |
||||
|
||||
raise CFFormatError.new('invalid plist string or file not found') |
||||
end |
||||
|
||||
SPACES_AND_COMMENTS = %r{((?:/\*.*?\*/)|(?://.*?$\n?)|(?:\s*))+}x |
||||
|
||||
# serialize CFPropertyList object to XML |
||||
# opts = {}:: Specify options: :formatted - Use indention and line breaks |
||||
def to_str(opts={}) |
||||
opts[:root].to_plain(self) |
||||
end |
||||
|
||||
protected |
||||
def skip_whitespaces |
||||
@doc.skip SPACES_AND_COMMENTS |
||||
end |
||||
|
||||
def read_dict |
||||
skip_whitespaces |
||||
hsh = {} |
||||
|
||||
while not @doc.scan(/\}/) |
||||
key = import_plain |
||||
raise CFFormatError.new("invalid dictionary format") if !key |
||||
|
||||
if key.is_a?(CFString) |
||||
key = key.value |
||||
elsif key.is_a?(CFInteger) or key.is_a?(CFReal) |
||||
key = key.value.to_s |
||||
else |
||||
raise CFFormatError.new("invalid key format") |
||||
end |
||||
|
||||
skip_whitespaces |
||||
|
||||
raise CFFormatError.new("invalid dictionary format") unless @doc.scan(/=/) |
||||
|
||||
skip_whitespaces |
||||
val = import_plain |
||||
|
||||
skip_whitespaces |
||||
raise CFFormatError.new("invalid dictionary format") unless @doc.scan(/;/) |
||||
skip_whitespaces |
||||
|
||||
hsh[key] = val |
||||
raise CFFormatError.new("invalid dictionary format") if @doc.eos? |
||||
end |
||||
|
||||
CFDictionary.new(hsh) |
||||
end |
||||
|
||||
def read_array |
||||
skip_whitespaces |
||||
ary = [] |
||||
|
||||
while not @doc.scan(/\)/) |
||||
val = import_plain |
||||
|
||||
return nil if not val or not val.value |
||||
skip_whitespaces |
||||
|
||||
if not @doc.skip(/,\s*/) |
||||
if @doc.scan(/\)/) |
||||
ary << val |
||||
return CFArray.new(ary) |
||||
end |
||||
|
||||
raise CFFormatError.new("invalid array format") |
||||
end |
||||
|
||||
ary << val |
||||
raise CFFormatError.new("invalid array format") if @doc.eos? |
||||
end |
||||
|
||||
CFArray.new(ary) |
||||
end |
||||
|
||||
def escape_char |
||||
case @doc.matched |
||||
when '"' |
||||
'"' |
||||
when '\\' |
||||
'\\' |
||||
when 'a' |
||||
"\a" |
||||
when 'b' |
||||
"\b" |
||||
when 'f' |
||||
"\f" |
||||
when 'n' |
||||
"\n" |
||||
when 'v' |
||||
"\v" |
||||
when 'r' |
||||
"\r" |
||||
when 't' |
||||
"\t" |
||||
when 'U' |
||||
@doc.scan(/.{4}/).hex.chr('utf-8') |
||||
end |
||||
end |
||||
|
||||
def read_quoted |
||||
str = '' |
||||
|
||||
while not @doc.scan(/"/) |
||||
if @doc.scan(/\\/) |
||||
@doc.scan(/./) |
||||
str << escape_char |
||||
|
||||
elsif @doc.eos? |
||||
raise CFFormatError.new("unterminated string") |
||||
|
||||
else @doc.scan(/./) |
||||
str << @doc.matched |
||||
end |
||||
end |
||||
|
||||
CFString.new(str) |
||||
end |
||||
|
||||
def read_unquoted |
||||
raise CFFormatError.new("unexpected end of file") if @doc.eos? |
||||
|
||||
if @doc.scan(/(\d\d\d\d)-(\d\d)-(\d\d)\s+(\d\d):(\d\d):(\d\d)(?:\s+(\+|-)(\d\d)(\d\d))?/) |
||||
year,month,day,hour,min,sec,pl_min,tz_hour, tz_min = @doc[1], @doc[2], @doc[3], @doc[4], @doc[5], @doc[6], @doc[7], @doc[8], @doc[9] |
||||
CFDate.new(Time.new(year, month, day, hour, min, sec, pl_min ? sprintf("%s%s:%s", pl_min, tz_hour, tz_min) : nil)) |
||||
|
||||
elsif @doc.scan(/-?\d+?\.\d+\b/) |
||||
CFReal.new(@doc.matched.to_f) |
||||
|
||||
elsif @doc.scan(/-?\d+\b/) |
||||
CFInteger.new(@doc.matched.to_i) |
||||
|
||||
elsif @doc.scan(/\b(true|false)\b/) |
||||
CFBoolean.new(@doc.matched == 'true') |
||||
else |
||||
CFString.new(@doc.scan(/\w+/)) |
||||
end |
||||
end |
||||
|
||||
def read_binary |
||||
@doc.scan(/(.*?)>/) |
||||
|
||||
hex_str = @doc[1].gsub(/ /, '') |
||||
CFData.new([hex_str].pack("H*"), CFData::DATA_RAW) |
||||
end |
||||
|
||||
# import the XML values |
||||
def import_plain |
||||
skip_whitespaces |
||||
ret = nil |
||||
|
||||
if @doc.scan(/\{/) # dict |
||||
ret = read_dict |
||||
elsif @doc.scan(/\(/) # array |
||||
ret = read_array |
||||
elsif @doc.scan(/"/) # string |
||||
ret = read_quoted |
||||
elsif @doc.scan(/</) # binary |
||||
ret = read_binary |
||||
else # string w/o quotes |
||||
ret = read_unquoted |
||||
end |
||||
|
||||
return ret |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,148 +0,0 @@
@@ -1,148 +0,0 @@
|
||||
# -*- coding: utf-8 -*- |
||||
|
||||
require 'rexml/document' |
||||
|
||||
module CFPropertyList |
||||
# XML parser |
||||
class ReXMLParser < ParserInterface |
||||
# read a XML file |
||||
# opts:: |
||||
# * :file - The filename of the file to load |
||||
# * :data - The data to parse |
||||
def load(opts) |
||||
|
||||
doc = nil |
||||
if(opts.has_key?(:file)) then |
||||
File.open(opts[:file], "rb") { |fd| doc = REXML::Document.new(fd) } |
||||
else |
||||
doc = REXML::Document.new(opts[:data]) |
||||
end |
||||
|
||||
if doc |
||||
root = doc.root.elements[1] |
||||
return import_xml(root) |
||||
end |
||||
rescue REXML::ParseException => e |
||||
raise CFFormatError.new('invalid XML: ' + e.message) |
||||
end |
||||
|
||||
# serialize CFPropertyList object to XML |
||||
# opts = {}:: Specify options: :formatted - Use indention and line breaks |
||||
def to_str(opts={}) |
||||
doc = REXML::Document.new |
||||
@doc = doc |
||||
|
||||
doc.context[:attribute_quote] = :quote |
||||
|
||||
doc.add_element 'plist', {'version' => '1.0'} |
||||
doc.root << opts[:root].to_xml(self) |
||||
|
||||
formatter = if opts[:formatted] then |
||||
f = REXML::Formatters::Pretty.new(2) |
||||
f.compact = true |
||||
f.width = Float::INFINITY |
||||
f |
||||
else |
||||
REXML::Formatters::Default.new |
||||
end |
||||
|
||||
str = formatter.write(doc.root, "") |
||||
str1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n" + str + "\n" |
||||
str1.force_encoding('UTF-8') if str1.respond_to?(:force_encoding) |
||||
|
||||
return str1 |
||||
end |
||||
|
||||
def new_node(name) |
||||
REXML::Element.new(name) |
||||
end |
||||
|
||||
def new_text(val) |
||||
val |
||||
end |
||||
|
||||
def append_node(parent, child) |
||||
if child.is_a?(String) then |
||||
parent.add_text child |
||||
else |
||||
parent.elements << child |
||||
end |
||||
parent |
||||
end |
||||
|
||||
protected |
||||
|
||||
# get the value of a DOM node |
||||
def get_value(n) |
||||
content = n.text |
||||
|
||||
content.force_encoding('UTF-8') if content.respond_to?(:force_encoding) |
||||
content |
||||
end |
||||
|
||||
# import the XML values |
||||
def import_xml(node) |
||||
ret = nil |
||||
|
||||
case node.name |
||||
when 'dict' |
||||
hsh = Hash.new |
||||
key = nil |
||||
|
||||
if node.has_elements? then |
||||
node.elements.each do |n| |
||||
next if n.name == '#text' # avoid a bug of libxml |
||||
next if n.name == '#comment' |
||||
|
||||
if n.name == "key" then |
||||
key = get_value(n) |
||||
key = '' if key.nil? # REXML returns nil if key is empty |
||||
else |
||||
raise CFFormatError.new("Format error!") if key.nil? |
||||
hsh[key] = import_xml(n) |
||||
key = nil |
||||
end |
||||
end |
||||
end |
||||
|
||||
if hsh['CF$UID'] and hsh.keys.length == 1 |
||||
ret = CFUid.new(hsh['CF$UID'].value) |
||||
else |
||||
ret = CFDictionary.new(hsh) |
||||
end |
||||
|
||||
when 'array' |
||||
ary = Array.new |
||||
|
||||
if node.has_elements? then |
||||
node.elements.each do |n| |
||||
next if n.name == '#text' # avoid a bug of libxml |
||||
ary.push import_xml(n) |
||||
end |
||||
end |
||||
|
||||
ret = CFArray.new(ary) |
||||
|
||||
when 'true' |
||||
ret = CFBoolean.new(true) |
||||
when 'false' |
||||
ret = CFBoolean.new(false) |
||||
when 'real' |
||||
ret = CFReal.new(get_value(node).to_f) |
||||
when 'integer' |
||||
ret = CFInteger.new(get_value(node).to_i) |
||||
when 'string' |
||||
ret = CFString.new(get_value(node)) |
||||
ret.value = '' if ret.value.nil? # REXML returns nil for empty elements' .text attribute |
||||
when 'data' |
||||
ret = CFData.new(get_value(node)) |
||||
when 'date' |
||||
ret = CFDate.new(CFDate.parse_date(get_value(node))) |
||||
end |
||||
|
||||
return ret |
||||
end |
||||
end |
||||
end |
||||
|
||||
# eof |
@ -1,708 +0,0 @@
@@ -1,708 +0,0 @@
|
||||
## Rails 6.1.5 (March 09, 2022) ## |
||||
|
||||
* Fix `ActiveSupport::Duration.build` to support negative values. |
||||
|
||||
The algorithm to collect the `parts` of the `ActiveSupport::Duration` |
||||
ignored the sign of the `value` and accumulated incorrect part values. This |
||||
impacted `ActiveSupport::Duration#sum` (which is dependent on `parts`) but |
||||
not `ActiveSupport::Duration#eql?` (which is dependent on `value`). |
||||
|
||||
*Caleb Buxton*, *Braden Staudacher* |
||||
|
||||
* `Time#change` and methods that call it (eg. `Time#advance`) will now |
||||
return a `Time` with the timezone argument provided, if the caller was |
||||
initialized with a timezone argument. |
||||
|
||||
Fixes [#42467](https://github.com/rails/rails/issues/42467). |
||||
|
||||
*Alex Ghiculescu* |
||||
|
||||
* Clone to keep extended Logger methods for tagged logger. |
||||
|
||||
*Orhan Toy* |
||||
|
||||
* `assert_changes` works on including `ActiveSupport::Assertions` module. |
||||
|
||||
*Pedro Medeiros* |
||||
|
||||
|
||||
## Rails 6.1.4.7 (March 08, 2022) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4.6 (February 11, 2022) ## |
||||
|
||||
* Fix Reloader method signature to work with the new Executor signature |
||||
|
||||
|
||||
## Rails 6.1.4.5 (February 11, 2022) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4.4 (December 15, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4.3 (December 14, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4.2 (December 14, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4.1 (August 19, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.4 (June 24, 2021) ## |
||||
|
||||
* MemCacheStore: convert any underlying value (including `false`) to an `Entry`. |
||||
|
||||
See [#42559](https://github.com/rails/rails/pull/42559). |
||||
|
||||
*Alex Ghiculescu* |
||||
|
||||
* Fix bug in `number_with_precision` when using large `BigDecimal` values. |
||||
|
||||
Fixes #42302. |
||||
|
||||
*Federico Aldunate*, *Zachary Scott* |
||||
|
||||
* Check byte size instead of length on `secure_compare`. |
||||
|
||||
*Tietew* |
||||
|
||||
* Fix `Time.at` to not lose `:in` option. |
||||
|
||||
*Ryuta Kamizono* |
||||
|
||||
* Require a path for `config.cache_store = :file_store`. |
||||
|
||||
*Alex Ghiculescu* |
||||
|
||||
* Avoid having to store complex object in the default translation file. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
|
||||
## Rails 6.1.3.2 (May 05, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.3.1 (March 26, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.3 (February 17, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.2.1 (February 10, 2021) ## |
||||
|
||||
* No changes. |
||||
|
||||
|
||||
## Rails 6.1.2 (February 09, 2021) ## |
||||
|
||||
* `ActiveSupport::Cache::MemCacheStore` now accepts an explicit `nil` for its `addresses` argument. |
||||
|
||||
```ruby |
||||
config.cache_store = :mem_cache_store, nil |
||||
|
||||
# is now equivalent to |
||||
|
||||
config.cache_store = :mem_cache_store |
||||
|
||||
# and is also equivalent to |
||||
|
||||
config.cache_store = :mem_cache_store, ENV["MEMCACHE_SERVERS"] || "localhost:11211" |
||||
|
||||
# which is the fallback behavior of Dalli |
||||
``` |
||||
|
||||
This helps those migrating from `:dalli_store`, where an explicit `nil` was permitted. |
||||
|
||||
*Michael Overmeyer* |
||||
|
||||
|
||||
## Rails 6.1.1 (January 07, 2021) ## |
||||
|
||||
* Change `IPAddr#to_json` to match the behavior of the json gem returning the string representation |
||||
instead of the instance variables of the object. |
||||
|
||||
Before: |
||||
|
||||
```ruby |
||||
IPAddr.new("127.0.0.1").to_json |
||||
# => "{\"addr\":2130706433,\"family\":2,\"mask_addr\":4294967295}" |
||||
``` |
||||
|
||||
After: |
||||
|
||||
```ruby |
||||
IPAddr.new("127.0.0.1").to_json |
||||
# => "\"127.0.0.1\"" |
||||
``` |
||||
|
||||
|
||||
## Rails 6.1.0 (December 09, 2020) ## |
||||
|
||||
* Ensure `MemoryStore` disables compression by default. Reverts behavior of |
||||
`MemoryStore` to its prior rails `5.1` behavior. |
||||
|
||||
*Max Gurewitz* |
||||
|
||||
* Calling `iso8601` on negative durations retains the negative sign on individual |
||||
digits instead of prepending it. |
||||
|
||||
This change is required so we can interoperate with PostgreSQL, which prefers |
||||
negative signs for each component. |
||||
|
||||
Compatibility with other iso8601 parsers which support leading negatives as well |
||||
as negatives per component is still retained. |
||||
|
||||
Before: |
||||
|
||||
(-1.year - 1.day).iso8601 |
||||
# => "-P1Y1D" |
||||
|
||||
After: |
||||
|
||||
(-1.year - 1.day).iso8601 |
||||
# => "P-1Y-1D" |
||||
|
||||
*Vipul A M* |
||||
|
||||
* Remove deprecated `ActiveSupport::Notifications::Instrumenter#end=`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Deprecate `ActiveSupport::Multibyte::Unicode.default_normalization_form`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated `ActiveSupport::Multibyte::Unicode.pack_graphemes`, |
||||
`ActiveSupport::Multibyte::Unicode.unpack_graphemes`, |
||||
`ActiveSupport::Multibyte::Unicode.normalize`, |
||||
`ActiveSupport::Multibyte::Unicode.downcase`, |
||||
`ActiveSupport::Multibyte::Unicode.upcase` and `ActiveSupport::Multibyte::Unicode.swapcase`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated `ActiveSupport::Multibyte::Chars#consumes?` and `ActiveSupport::Multibyte::Chars#normalize`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/range/include_range`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/hash/transform_values`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/hash/compact`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/array/prepend_and_append`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/numeric/inquiry`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated file `active_support/core_ext/module/reachable`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated `Module#parent_name`, `Module#parent` and `Module#parents`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated `ActiveSupport::LoggerThreadSafeLevel#after_initialize`. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated `LoggerSilence` constant. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove deprecated fallback to `I18n.default_local` when `config.i18n.fallbacks` is empty. |
||||
|
||||
*Rafael Mendonça França* |
||||
|
||||
* Remove entries from local cache on `RedisCacheStore#delete_matched` |
||||
|
||||
Fixes #38627 |
||||
|
||||
*ojab* |
||||
|
||||
* Speed up `ActiveSupport::SecurityUtils.fixed_length_secure_compare` by using |
||||
`OpenSSL.fixed_length_secure_compare`, if available. |
||||
|
||||
*Nate Matykiewicz* |
||||
|
||||
* `ActiveSupport::Cache::MemCacheStore` now checks `ENV["MEMCACHE_SERVERS"]` before falling back to `"localhost:11211"` if configured without any addresses. |
||||
|
||||
```ruby |
||||
config.cache_store = :mem_cache_store |
||||
|
||||
# is now equivalent to |
||||
|
||||
config.cache_store = :mem_cache_store, ENV["MEMCACHE_SERVERS"] || "localhost:11211" |
||||
|
||||
# instead of |
||||
|
||||
config.cache_store = :mem_cache_store, "localhost:11211" # ignores ENV["MEMCACHE_SERVERS"] |
||||
``` |
||||
|
||||
*Sam Bostock* |
||||
|
||||
* `ActiveSupport::Subscriber#attach_to` now accepts an `inherit_all:` argument. When set to true, |
||||
it allows a subscriber to receive events for methods defined in the subscriber's ancestor class(es). |
||||
|
||||
```ruby |
||||
class ActionControllerSubscriber < ActiveSupport::Subscriber |
||||
attach_to :action_controller |
||||
|
||||
def start_processing(event) |
||||
info "Processing by #{event.payload[:controller]}##{event.payload[:action]} as #{format}" |
||||
end |
||||
|
||||
def redirect_to(event) |
||||
info { "Redirected to #{event.payload[:location]}" } |
||||
end |
||||
end |
||||
|
||||
# We detach ActionControllerSubscriber from the :action_controller namespace so that our CustomActionControllerSubscriber |
||||
# can provide its own instrumentation for certain events in the namespace |
||||
ActionControllerSubscriber.detach_from(:action_controller) |
||||
|
||||
class CustomActionControllerSubscriber < ActionControllerSubscriber |
||||
attach_to :action_controller, inherit_all: true |
||||
|
||||
def start_processing(event) |
||||
info "A custom response to start_processing events" |
||||
end |
||||
|
||||
# => CustomActionControllerSubscriber will process events for "start_processing.action_controller" notifications |
||||
# using its own #start_processing implementation, while retaining ActionControllerSubscriber's instrumentation |
||||
# for "redirect_to.action_controller" notifications |
||||
end |
||||
``` |
||||
|
||||
*Adrianna Chang* |
||||
|
||||
* Allow the digest class used to generate non-sensitive digests to be configured with `config.active_support.hash_digest_class`. |
||||
|
||||
`config.active_support.use_sha1_digests` is deprecated in favour of `config.active_support.hash_digest_class = ::Digest::SHA1`. |
||||
|
||||
*Dirkjan Bussink* |
||||
|
||||
* Fix bug to make memcached write_entry expire correctly with unless_exist |
||||
|
||||
*Jye Lee* |
||||
|
||||
* Add `ActiveSupport::Duration` conversion methods |
||||
|
||||
`in_seconds`, `in_minutes`, `in_hours`, `in_days`, `in_weeks`, `in_months`, and `in_years` return the respective duration covered. |
||||
|
||||
*Jason York* |
||||
|
||||
* Fixed issue in `ActiveSupport::Cache::RedisCacheStore` not passing options |
||||
to `read_multi` causing `fetch_multi` to not work properly |
||||
|
||||
*Rajesh Sharma* |
||||
|
||||
* Fixed issue in `ActiveSupport::Cache::MemCacheStore` which caused duplicate compression, |
||||
and caused the provided `compression_threshold` to not be respected. |
||||
|
||||
*Max Gurewitz* |
||||
|
||||
* Prevent `RedisCacheStore` and `MemCacheStore` from performing compression |
||||
when reading entries written with `raw: true`. |
||||
|
||||
*Max Gurewitz* |
||||
|
||||
* `URI.parser` is deprecated and will be removed in Rails 7.0. Use |
||||
`URI::DEFAULT_PARSER` instead. |
||||
|
||||
*Jean Boussier* |
||||
|
||||
* `require_dependency` has been documented to be _obsolete_ in `:zeitwerk` |
||||
mode. The method is not deprecated as such (yet), but applications are |
||||
encouraged to not use it. |
||||
|
||||
In `:zeitwerk` mode, semantics match Ruby's and you do not need to be |
||||
defensive with load order. Just refer to classes and modules normally. If |
||||
the constant name is dynamic, camelize if needed, and constantize. |
||||
|
||||
*Xavier Noria* |
||||
|
||||
* Add 3rd person aliases of `Symbol#start_with?` and `Symbol#end_with?`. |
||||
|
||||
```ruby |
||||
:foo.starts_with?("f") # => true |
||||
:foo.ends_with?("o") # => true |
||||
``` |
||||
|
||||
*Ryuta Kamizono* |
||||
|
||||
* Add override of unary plus for `ActiveSupport::Duration`. |
||||
|
||||
`+ 1.second` is now identical to `+1.second` to prevent errors |
||||
where a seemingly innocent change of formatting leads to a change in the code behavior. |
||||
|
||||
Before: |
||||
```ruby |
||||
+1.second.class |
||||
# => ActiveSupport::Duration |
||||
(+ 1.second).class |
||||
# => Integer |
||||
``` |
||||
|
||||
After: |
||||
```ruby |
||||
+1.second.class |
||||
# => ActiveSupport::Duration |
||||
(+ 1.second).class |
||||
# => ActiveSupport::Duration |
||||
``` |
||||
|
||||
Fixes #39079. |
||||
|
||||
*Roman Kushnir* |
||||
|
||||
* Add subsec to `ActiveSupport::TimeWithZone#inspect`. |
||||
|
||||
Before: |
||||
|
||||
Time.at(1498099140).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00 UTC +00:00" |
||||
Time.at(1498099140, 123456780, :nsec).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00 UTC +00:00" |
||||
Time.at(1498099140 + Rational("1/3")).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00 UTC +00:00" |
||||
|
||||
After: |
||||
|
||||
Time.at(1498099140).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00.000000000 UTC +00:00" |
||||
Time.at(1498099140, 123456780, :nsec).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00.123456780 UTC +00:00" |
||||
Time.at(1498099140 + Rational("1/3")).in_time_zone.inspect |
||||
# => "Thu, 22 Jun 2017 02:39:00.333333333 UTC +00:00" |
||||
|
||||
*akinomaeni* |
||||
|
||||
* Calling `ActiveSupport::TaggedLogging#tagged` without a block now returns a tagged logger. |
||||
|
||||
```ruby |
||||
logger.tagged("BCX").info("Funky time!") # => [BCX] Funky time! |
||||
``` |
||||
|
||||
*Eugene Kenny* |
||||
|
||||
* Align `Range#cover?` extension behavior with Ruby behavior for backwards ranges. |
||||
|
||||
`(1..10).cover?(5..3)` now returns `false`, as it does in plain Ruby. |
||||
|
||||
Also update `#include?` and `#===` behavior to match. |
||||
|
||||
*Michael Groeneman* |
||||
|
||||
* Update to TZInfo v2.0.0. |
||||
|
||||
This changes the output of `ActiveSupport::TimeZone.utc_to_local`, but |
||||
can be controlled with the |
||||
`ActiveSupport.utc_to_local_returns_utc_offset_times` config. |
||||
|
||||
New Rails 6.1 apps have it enabled by default, existing apps can upgrade |
||||
via the config in config/initializers/new_framework_defaults_6_1.rb |
||||
|
||||
See the `utc_to_local_returns_utc_offset_times` documentation for details. |
||||
|
||||
*Phil Ross*, *Jared Beck* |
||||
|
||||
* Add Date and Time `#yesterday?` and `#tomorrow?` alongside `#today?`. |
||||
|
||||
Aliased to `#prev_day?` and `#next_day?` to match the existing `#prev/next_day` methods. |
||||
|
||||
*Jatin Dhankhar* |
||||
|
||||
* Add `Enumerable#pick` to complement `ActiveRecord::Relation#pick`. |
||||
|
||||
*Eugene Kenny* |
||||
|
||||
* [Breaking change] `ActiveSupport::Callbacks#halted_callback_hook` now receive a 2nd argument: |
||||
|
||||
`ActiveSupport::Callbacks#halted_callback_hook` now receive the name of the callback |
||||
being halted as second argument. |
||||
This change will allow you to differentiate which callbacks halted the chain |
||||
and act accordingly. |
||||
|
||||
```ruby |
||||
class Book < ApplicationRecord |
||||
before_save { throw(:abort) } |
||||
before_create { throw(:abort) } |
||||
|
||||
def halted_callback_hook(filter, callback_name) |
||||
Rails.logger.info("Book couldn't be #{callback_name}d") |
||||
end |
||||
|
||||
Book.create # => "Book couldn't be created" |
||||
book.save # => "Book couldn't be saved" |
||||
end |
||||
``` |
||||
|
||||
*Edouard Chin* |
||||
|
||||
* Support `prepend` with `ActiveSupport::Concern`. |
||||
|
||||
Allows a module with `extend ActiveSupport::Concern` to be prepended. |
||||
|
||||
module Imposter |
||||
extend ActiveSupport::Concern |
||||
|
||||
# Same as `included`, except only run when prepended. |
||||
prepended do |
||||
end |
||||
end |
||||
|
||||
class Person |
||||
prepend Imposter |
||||
end |
||||
|
||||
Class methods are prepended to the base class, concerning is also |
||||
updated: `concerning :Imposter, prepend: true do`. |
||||
|
||||
*Jason Karns*, *Elia Schito* |
||||
|
||||
* Deprecate using `Range#include?` method to check the inclusion of a value |
||||
in a date time range. It is recommended to use `Range#cover?` method |
||||
instead of `Range#include?` to check the inclusion of a value |
||||
in a date time range. |
||||
|
||||
*Vishal Telangre* |
||||
|
||||
* Support added for a `round_mode` parameter, in all number helpers. (See: `BigDecimal::mode`.) |
||||
|
||||
```ruby |
||||
number_to_currency(1234567890.50, precision: 0, round_mode: :half_down) # => "$1,234,567,890" |
||||
number_to_percentage(302.24398923423, precision: 5, round_mode: :down) # => "302.24398%" |
||||
number_to_rounded(389.32314, precision: 0, round_mode: :ceil) # => "390" |
||||
number_to_human_size(483989, precision: 2, round_mode: :up) # => "480 KB" |
||||
number_to_human(489939, precision: 2, round_mode: :floor) # => "480 Thousand" |
||||
|
||||
485000.to_s(:human, precision: 2, round_mode: :half_even) # => "480 Thousand" |
||||
``` |
||||
|
||||
*Tom Lord* |
||||
|
||||
* `Array#to_sentence` no longer returns a frozen string. |
||||
|
||||
Before: |
||||
|
||||
['one', 'two'].to_sentence.frozen? |
||||
# => true |
||||
|
||||
After: |
||||
|
||||
['one', 'two'].to_sentence.frozen? |
||||
# => false |
||||
|
||||
*Nicolas Dular* |
||||
|
||||
* When an instance of `ActiveSupport::Duration` is converted to an `iso8601` duration string, if `weeks` are mixed with `date` parts, the `week` part will be converted to days. |
||||
This keeps the parser and serializer on the same page. |
||||
|
||||
```ruby |
||||
duration = ActiveSupport::Duration.build(1000000) |
||||
# 1 week, 4 days, 13 hours, 46 minutes, and 40.0 seconds |
||||
|
||||
duration_iso = duration.iso8601 |
||||
# P11DT13H46M40S |
||||
|
||||
ActiveSupport::Duration.parse(duration_iso) |
||||
# 11 days, 13 hours, 46 minutes, and 40 seconds |
||||
|
||||
duration = ActiveSupport::Duration.build(604800) |
||||
# 1 week |
||||
|
||||
duration_iso = duration.iso8601 |
||||
# P1W |
||||
|
||||
ActiveSupport::Duration.parse(duration_iso) |
||||
# 1 week |
||||
``` |
||||
|
||||
*Abhishek Sarkar* |
||||
|
||||
* Add block support to `ActiveSupport::Testing::TimeHelpers#travel_back`. |
||||
|
||||
*Tim Masliuchenko* |
||||
|
||||
* Update `ActiveSupport::Messages::Metadata#fresh?` to work for cookies with expiry set when |
||||
`ActiveSupport.parse_json_times = true`. |
||||
|
||||
*Christian Gregg* |
||||
|
||||
* Support symbolic links for `content_path` in `ActiveSupport::EncryptedFile`. |
||||
|
||||
*Takumi Shotoku* |
||||
|
||||
* Improve `Range#===`, `Range#include?`, and `Range#cover?` to work with beginless (startless) |
||||
and endless range targets. |
||||
|
||||
*Allen Hsu*, *Andrew Hodgkinson* |
||||
|
||||
* Don't use `Process#clock_gettime(CLOCK_THREAD_CPUTIME_ID)` on Solaris. |
||||
|
||||
*Iain Beeston* |
||||
|
||||
* Prevent `ActiveSupport::Duration.build(value)` from creating instances of |
||||
`ActiveSupport::Duration` unless `value` is of type `Numeric`. |
||||
|
||||
Addresses the errant set of behaviours described in #37012 where |
||||
`ActiveSupport::Duration` comparisons would fail confusingly |
||||
or return unexpected results when comparing durations built from instances of `String`. |
||||
|
||||
Before: |
||||
|
||||
small_duration_from_string = ActiveSupport::Duration.build('9') |
||||
large_duration_from_string = ActiveSupport::Duration.build('100000000000000') |
||||
small_duration_from_int = ActiveSupport::Duration.build(9) |
||||
|
||||
large_duration_from_string > small_duration_from_string |
||||
# => false |
||||
|
||||
small_duration_from_string == small_duration_from_int |
||||
# => false |
||||
|
||||
small_duration_from_int < large_duration_from_string |
||||
# => ArgumentError (comparison of ActiveSupport::Duration::Scalar with ActiveSupport::Duration failed) |
||||
|
||||
large_duration_from_string > small_duration_from_int |
||||
# => ArgumentError (comparison of String with ActiveSupport::Duration failed) |
||||
|
||||
After: |
||||
|
||||
small_duration_from_string = ActiveSupport::Duration.build('9') |
||||
# => TypeError (can't build an ActiveSupport::Duration from a String) |
||||
|
||||
*Alexei Emam* |
||||
|
||||
* Add `ActiveSupport::Cache::Store#delete_multi` method to delete multiple keys from the cache store. |
||||
|
||||
*Peter Zhu* |
||||
|
||||
* Support multiple arguments in `HashWithIndifferentAccess` for `merge` and `update` methods, to |
||||
follow Ruby 2.6 addition. |
||||
|
||||
*Wojciech Wnętrzak* |
||||
|
||||
* Allow initializing `thread_mattr_*` attributes via `:default` option. |
||||
|
||||
class Scraper |
||||
thread_mattr_reader :client, default: Api::Client.new |
||||
end |
||||
|
||||
*Guilherme Mansur* |
||||
|
||||
* Add `compact_blank` for those times when you want to remove #blank? values from |
||||
an Enumerable (also `compact_blank!` on Hash, Array, ActionController::Parameters). |
||||
|
||||
*Dana Sherson* |
||||
|
||||
* Make ActiveSupport::Logger Fiber-safe. |
||||
|
||||
Use `Fiber.current.__id__` in `ActiveSupport::Logger#local_level=` in order |
||||
to make log level local to Ruby Fibers in addition to Threads. |
||||
|
||||
Example: |
||||
|
||||
logger = ActiveSupport::Logger.new(STDOUT) |
||||
logger.level = 1 |
||||
puts "Main is debug? #{logger.debug?}" |
||||
|
||||
Fiber.new { |
||||
logger.local_level = 0 |
||||
puts "Thread is debug? #{logger.debug?}" |
||||
}.resume |
||||
|
||||
puts "Main is debug? #{logger.debug?}" |
||||
|
||||
Before: |
||||
|
||||
Main is debug? false |
||||
Thread is debug? true |
||||
Main is debug? true |
||||
|
||||
After: |
||||
|
||||
Main is debug? false |
||||
Thread is debug? true |
||||
Main is debug? false |
||||
|
||||
Fixes #36752. |
||||
|
||||
*Alexander Varnin* |
||||
|
||||
* Allow the `on_rotation` proc used when decrypting/verifying a message to be |
||||
passed at the constructor level. |
||||
|
||||
Before: |
||||
|
||||
crypt = ActiveSupport::MessageEncryptor.new('long_secret') |
||||
crypt.decrypt_and_verify(encrypted_message, on_rotation: proc { ... }) |
||||
crypt.decrypt_and_verify(another_encrypted_message, on_rotation: proc { ... }) |
||||
|
||||
After: |
||||
|
||||
crypt = ActiveSupport::MessageEncryptor.new('long_secret', on_rotation: proc { ... }) |
||||
crypt.decrypt_and_verify(encrypted_message) |
||||
crypt.decrypt_and_verify(another_encrypted_message) |
||||
|
||||
*Edouard Chin* |
||||
|
||||
* `delegate_missing_to` would raise a `DelegationError` if the object |
||||
delegated to was `nil`. Now the `allow_nil` option has been added to enable |
||||
the user to specify they want `nil` returned in this case. |
||||
|
||||
*Matthew Tanous* |
||||
|
||||
* `truncate` would return the original string if it was too short to be truncated |
||||
and a frozen string if it were long enough to be truncated. Now truncate will |
||||
consistently return an unfrozen string regardless. This behavior is consistent |
||||
with `gsub` and `strip`. |
||||
|
||||
Before: |
||||
|
||||
'foobar'.truncate(5).frozen? |
||||
# => true |
||||
'foobar'.truncate(6).frozen? |
||||
# => false |
||||
|
||||
After: |
||||
|
||||
'foobar'.truncate(5).frozen? |
||||
# => false |
||||
'foobar'.truncate(6).frozen? |
||||
# => false |
||||
|
||||
*Jordan Thomas* |
||||
|
||||
|
||||
Please check [6-0-stable](https://github.com/rails/rails/blob/6-0-stable/activesupport/CHANGELOG.md) for previous changes. |
@ -1,20 +0,0 @@
@@ -1,20 +0,0 @@
|
||||
Copyright (c) 2005-2022 David Heinemeier Hansson |
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining |
||||
a copy of this software and associated documentation files (the |
||||
"Software"), to deal in the Software without restriction, including |
||||
without limitation the rights to use, copy, modify, merge, publish, |
||||
distribute, sublicense, and/or sell copies of the Software, and to |
||||
permit persons to whom the Software is furnished to do so, subject to |
||||
the following conditions: |
||||
|
||||
The above copyright notice and this permission notice shall be |
||||
included in all copies or substantial portions of the Software. |
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE |
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION |
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
@ -1,40 +0,0 @@
@@ -1,40 +0,0 @@
|
||||
= Active Support -- Utility classes and Ruby extensions from Rails |
||||
|
||||
Active Support is a collection of utility classes and standard library |
||||
extensions that were found useful for the Rails framework. These additions |
||||
reside in this package so they can be loaded as needed in Ruby projects |
||||
outside of Rails. |
||||
|
||||
You can read more about the extensions in the {Active Support Core Extensions}[https://edgeguides.rubyonrails.org/active_support_core_extensions.html] guide. |
||||
|
||||
== Download and installation |
||||
|
||||
The latest version of Active Support can be installed with RubyGems: |
||||
|
||||
$ gem install activesupport |
||||
|
||||
Source code can be downloaded as part of the Rails project on GitHub: |
||||
|
||||
* https://github.com/rails/rails/tree/main/activesupport |
||||
|
||||
|
||||
== License |
||||
|
||||
Active Support is released under the MIT license: |
||||
|
||||
* https://opensource.org/licenses/MIT |
||||
|
||||
|
||||
== Support |
||||
|
||||
API documentation is at: |
||||
|
||||
* https://api.rubyonrails.org |
||||
|
||||
Bug reports for the Ruby on Rails project can be filed here: |
||||
|
||||
* https://github.com/rails/rails/issues |
||||
|
||||
Feature requests should be discussed on the rails-core mailing list here: |
||||
|
||||
* https://discuss.rubyonrails.org/c/rubyonrails-core |
@ -1,108 +0,0 @@
@@ -1,108 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
#-- |
||||
# Copyright (c) 2005-2022 David Heinemeier Hansson |
||||
# |
||||
# Permission is hereby granted, free of charge, to any person obtaining |
||||
# a copy of this software and associated documentation files (the |
||||
# "Software"), to deal in the Software without restriction, including |
||||
# without limitation the rights to use, copy, modify, merge, publish, |
||||
# distribute, sublicense, and/or sell copies of the Software, and to |
||||
# permit persons to whom the Software is furnished to do so, subject to |
||||
# the following conditions: |
||||
# |
||||
# The above copyright notice and this permission notice shall be |
||||
# included in all copies or substantial portions of the Software. |
||||
# |
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE |
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION |
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
||||
#++ |
||||
|
||||
require "securerandom" |
||||
require "active_support/dependencies/autoload" |
||||
require "active_support/version" |
||||
require "active_support/logger" |
||||
require "active_support/lazy_load_hooks" |
||||
require "active_support/core_ext/date_and_time/compatibility" |
||||
|
||||
module ActiveSupport |
||||
extend ActiveSupport::Autoload |
||||
|
||||
autoload :Concern |
||||
autoload :ActionableError |
||||
autoload :ConfigurationFile |
||||
autoload :CurrentAttributes |
||||
autoload :Dependencies |
||||
autoload :DescendantsTracker |
||||
autoload :ExecutionWrapper |
||||
autoload :Executor |
||||
autoload :FileUpdateChecker |
||||
autoload :EventedFileUpdateChecker |
||||
autoload :ForkTracker |
||||
autoload :LogSubscriber |
||||
autoload :Notifications |
||||
autoload :Reloader |
||||
autoload :SecureCompareRotator |
||||
|
||||
eager_autoload do |
||||
autoload :BacktraceCleaner |
||||
autoload :ProxyObject |
||||
autoload :Benchmarkable |
||||
autoload :Cache |
||||
autoload :Callbacks |
||||
autoload :Configurable |
||||
autoload :Deprecation |
||||
autoload :Digest |
||||
autoload :Gzip |
||||
autoload :Inflector |
||||
autoload :JSON |
||||
autoload :KeyGenerator |
||||
autoload :MessageEncryptor |
||||
autoload :MessageVerifier |
||||
autoload :Multibyte |
||||
autoload :NumberHelper |
||||
autoload :OptionMerger |
||||
autoload :OrderedHash |
||||
autoload :OrderedOptions |
||||
autoload :StringInquirer |
||||
autoload :EnvironmentInquirer |
||||
autoload :TaggedLogging |
||||
autoload :XmlMini |
||||
autoload :ArrayInquirer |
||||
end |
||||
|
||||
autoload :Rescuable |
||||
autoload :SafeBuffer, "active_support/core_ext/string/output_safety" |
||||
autoload :TestCase |
||||
|
||||
def self.eager_load! |
||||
super |
||||
|
||||
NumberHelper.eager_load! |
||||
end |
||||
|
||||
cattr_accessor :test_order # :nodoc: |
||||
|
||||
def self.to_time_preserves_timezone |
||||
DateAndTime::Compatibility.preserve_timezone |
||||
end |
||||
|
||||
def self.to_time_preserves_timezone=(value) |
||||
DateAndTime::Compatibility.preserve_timezone = value |
||||
end |
||||
|
||||
def self.utc_to_local_returns_utc_offset_times |
||||
DateAndTime::Compatibility.utc_to_local_returns_utc_offset_times |
||||
end |
||||
|
||||
def self.utc_to_local_returns_utc_offset_times=(value) |
||||
DateAndTime::Compatibility.utc_to_local_returns_utc_offset_times = value |
||||
end |
||||
end |
||||
|
||||
autoload :I18n, "active_support/i18n" |
@ -1,48 +0,0 @@
@@ -1,48 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
module ActiveSupport |
||||
# Actionable errors let's you define actions to resolve an error. |
||||
# |
||||
# To make an error actionable, include the <tt>ActiveSupport::ActionableError</tt> |
||||
# module and invoke the +action+ class macro to define the action. An action |
||||
# needs a name and a block to execute. |
||||
module ActionableError |
||||
extend Concern |
||||
|
||||
class NonActionable < StandardError; end |
||||
|
||||
included do |
||||
class_attribute :_actions, default: {} |
||||
end |
||||
|
||||
def self.actions(error) # :nodoc: |
||||
case error |
||||
when ActionableError, -> it { Class === it && it < ActionableError } |
||||
error._actions |
||||
else |
||||
{} |
||||
end |
||||
end |
||||
|
||||
def self.dispatch(error, name) # :nodoc: |
||||
actions(error).fetch(name).call |
||||
rescue KeyError |
||||
raise NonActionable, "Cannot find action \"#{name}\"" |
||||
end |
||||
|
||||
module ClassMethods |
||||
# Defines an action that can resolve the error. |
||||
# |
||||
# class PendingMigrationError < MigrationError |
||||
# include ActiveSupport::ActionableError |
||||
# |
||||
# action "Run pending migrations" do |
||||
# ActiveRecord::Tasks::DatabaseTasks.migrate |
||||
# end |
||||
# end |
||||
def action(name, &block) |
||||
_actions[name] = block |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,5 +0,0 @@
@@ -1,5 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support" |
||||
require "active_support/time" |
||||
require "active_support/core_ext" |
@ -1,50 +0,0 @@
@@ -1,50 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/core_ext/symbol/starts_ends_with" |
||||
|
||||
module ActiveSupport |
||||
# Wrapping an array in an +ArrayInquirer+ gives a friendlier way to check |
||||
# its string-like contents: |
||||
# |
||||
# variants = ActiveSupport::ArrayInquirer.new([:phone, :tablet]) |
||||
# |
||||
# variants.phone? # => true |
||||
# variants.tablet? # => true |
||||
# variants.desktop? # => false |
||||
class ArrayInquirer < Array |
||||
# Passes each element of +candidates+ collection to ArrayInquirer collection. |
||||
# The method returns true if any element from the ArrayInquirer collection |
||||
# is equal to the stringified or symbolized form of any element in the +candidates+ collection. |
||||
# |
||||
# If +candidates+ collection is not given, method returns true. |
||||
# |
||||
# variants = ActiveSupport::ArrayInquirer.new([:phone, :tablet]) |
||||
# |
||||
# variants.any? # => true |
||||
# variants.any?(:phone, :tablet) # => true |
||||
# variants.any?('phone', 'desktop') # => true |
||||
# variants.any?(:desktop, :watch) # => false |
||||
def any?(*candidates) |
||||
if candidates.none? |
||||
super |
||||
else |
||||
candidates.any? do |candidate| |
||||
include?(candidate.to_sym) || include?(candidate.to_s) |
||||
end |
||||
end |
||||
end |
||||
|
||||
private |
||||
def respond_to_missing?(name, include_private = false) |
||||
name.end_with?("?") || super |
||||
end |
||||
|
||||
def method_missing(name, *args) |
||||
if name.end_with?("?") |
||||
any?(name[0..-2]) |
||||
else |
||||
super |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,131 +0,0 @@
@@ -1,131 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
module ActiveSupport |
||||
# Backtraces often include many lines that are not relevant for the context |
||||
# under review. This makes it hard to find the signal amongst the backtrace |
||||
# noise, and adds debugging time. With a BacktraceCleaner, filters and |
||||
# silencers are used to remove the noisy lines, so that only the most relevant |
||||
# lines remain. |
||||
# |
||||
# Filters are used to modify lines of data, while silencers are used to remove |
||||
# lines entirely. The typical filter use case is to remove lengthy path |
||||
# information from the start of each line, and view file paths relevant to the |
||||
# app directory instead of the file system root. The typical silencer use case |
||||
# is to exclude the output of a noisy library from the backtrace, so that you |
||||
# can focus on the rest. |
||||
# |
||||
# bc = ActiveSupport::BacktraceCleaner.new |
||||
# bc.add_filter { |line| line.gsub(Rails.root.to_s, '') } # strip the Rails.root prefix |
||||
# bc.add_silencer { |line| /puma|rubygems/.match?(line) } # skip any lines from puma or rubygems |
||||
# bc.clean(exception.backtrace) # perform the cleanup |
||||
# |
||||
# To reconfigure an existing BacktraceCleaner (like the default one in Rails) |
||||
# and show as much data as possible, you can always call |
||||
# <tt>BacktraceCleaner#remove_silencers!</tt>, which will restore the |
||||
# backtrace to a pristine state. If you need to reconfigure an existing |
||||
# BacktraceCleaner so that it does not filter or modify the paths of any lines |
||||
# of the backtrace, you can call <tt>BacktraceCleaner#remove_filters!</tt> |
||||
# These two methods will give you a completely untouched backtrace. |
||||
# |
||||
# Inspired by the Quiet Backtrace gem by thoughtbot. |
||||
class BacktraceCleaner |
||||
def initialize |
||||
@filters, @silencers = [], [] |
||||
add_gem_filter |
||||
add_gem_silencer |
||||
add_stdlib_silencer |
||||
end |
||||
|
||||
# Returns the backtrace after all filters and silencers have been run |
||||
# against it. Filters run first, then silencers. |
||||
def clean(backtrace, kind = :silent) |
||||
filtered = filter_backtrace(backtrace) |
||||
|
||||
case kind |
||||
when :silent |
||||
silence(filtered) |
||||
when :noise |
||||
noise(filtered) |
||||
else |
||||
filtered |
||||
end |
||||
end |
||||
alias :filter :clean |
||||
|
||||
# Adds a filter from the block provided. Each line in the backtrace will be |
||||
# mapped against this filter. |
||||
# |
||||
# # Will turn "/my/rails/root/app/models/person.rb" into "/app/models/person.rb" |
||||
# backtrace_cleaner.add_filter { |line| line.gsub(Rails.root, '') } |
||||
def add_filter(&block) |
||||
@filters << block |
||||
end |
||||
|
||||
# Adds a silencer from the block provided. If the silencer returns +true+ |
||||
# for a given line, it will be excluded from the clean backtrace. |
||||
# |
||||
# # Will reject all lines that include the word "puma", like "/gems/puma/server.rb" or "/app/my_puma_server/rb" |
||||
# backtrace_cleaner.add_silencer { |line| /puma/.match?(line) } |
||||
def add_silencer(&block) |
||||
@silencers << block |
||||
end |
||||
|
||||
# Removes all silencers, but leaves in the filters. Useful if your |
||||
# context of debugging suddenly expands as you suspect a bug in one of |
||||
# the libraries you use. |
||||
def remove_silencers! |
||||
@silencers = [] |
||||
end |
||||
|
||||
# Removes all filters, but leaves in the silencers. Useful if you suddenly |
||||
# need to see entire filepaths in the backtrace that you had already |
||||
# filtered out. |
||||
def remove_filters! |
||||
@filters = [] |
||||
end |
||||
|
||||
private |
||||
FORMATTED_GEMS_PATTERN = /\A[^\/]+ \([\w.]+\) / |
||||
|
||||
def add_gem_filter |
||||
gems_paths = (Gem.path | [Gem.default_dir]).map { |p| Regexp.escape(p) } |
||||
return if gems_paths.empty? |
||||
|
||||
gems_regexp = %r{\A(#{gems_paths.join('|')})/(bundler/)?gems/([^/]+)-([\w.]+)/(.*)} |
||||
gems_result = '\3 (\4) \5' |
||||
add_filter { |line| line.sub(gems_regexp, gems_result) } |
||||
end |
||||
|
||||
def add_gem_silencer |
||||
add_silencer { |line| FORMATTED_GEMS_PATTERN.match?(line) } |
||||
end |
||||
|
||||
def add_stdlib_silencer |
||||
add_silencer { |line| line.start_with?(RbConfig::CONFIG["rubylibdir"]) } |
||||
end |
||||
|
||||
def filter_backtrace(backtrace) |
||||
@filters.each do |f| |
||||
backtrace = backtrace.map { |line| f.call(line) } |
||||
end |
||||
|
||||
backtrace |
||||
end |
||||
|
||||
def silence(backtrace) |
||||
@silencers.each do |s| |
||||
backtrace = backtrace.reject { |line| s.call(line) } |
||||
end |
||||
|
||||
backtrace |
||||
end |
||||
|
||||
def noise(backtrace) |
||||
backtrace.select do |line| |
||||
@silencers.any? do |s| |
||||
s.call(line) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,51 +0,0 @@
@@ -1,51 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/core_ext/benchmark" |
||||
require "active_support/core_ext/hash/keys" |
||||
|
||||
module ActiveSupport |
||||
module Benchmarkable |
||||
# Allows you to measure the execution time of a block in a template and |
||||
# records the result to the log. Wrap this block around expensive operations |
||||
# or possible bottlenecks to get a time reading for the operation. For |
||||
# example, let's say you thought your file processing method was taking too |
||||
# long; you could wrap it in a benchmark block. |
||||
# |
||||
# <% benchmark 'Process data files' do %> |
||||
# <%= expensive_files_operation %> |
||||
# <% end %> |
||||
# |
||||
# That would add something like "Process data files (345.2ms)" to the log, |
||||
# which you can then use to compare timings when optimizing your code. |
||||
# |
||||
# You may give an optional logger level (<tt>:debug</tt>, <tt>:info</tt>, |
||||
# <tt>:warn</tt>, <tt>:error</tt>) as the <tt>:level</tt> option. The |
||||
# default logger level value is <tt>:info</tt>. |
||||
# |
||||
# <% benchmark 'Low-level files', level: :debug do %> |
||||
# <%= lowlevel_files_operation %> |
||||
# <% end %> |
||||
# |
||||
# Finally, you can pass true as the third argument to silence all log |
||||
# activity (other than the timing information) from inside the block. This |
||||
# is great for boiling down a noisy block to just a single statement that |
||||
# produces one log line: |
||||
# |
||||
# <% benchmark 'Process data files', level: :info, silence: true do %> |
||||
# <%= expensive_and_chatty_files_operation %> |
||||
# <% end %> |
||||
def benchmark(message = "Benchmarking", options = {}) |
||||
if logger |
||||
options.assert_valid_keys(:level, :silence) |
||||
options[:level] ||= :info |
||||
|
||||
result = nil |
||||
ms = Benchmark.ms { result = options[:silence] ? logger.silence { yield } : yield } |
||||
logger.public_send(options[:level], "%s (%.1fms)" % [ message, ms ]) |
||||
result |
||||
else |
||||
yield |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,8 +0,0 @@
@@ -1,8 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
begin |
||||
require "builder" |
||||
rescue LoadError => e |
||||
$stderr.puts "You don't have builder installed in your application. Please add it to your Gemfile and run bundle install" |
||||
raise e |
||||
end |
@ -1,878 +0,0 @@
@@ -1,878 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "zlib" |
||||
require "active_support/core_ext/array/extract_options" |
||||
require "active_support/core_ext/array/wrap" |
||||
require "active_support/core_ext/enumerable" |
||||
require "active_support/core_ext/module/attribute_accessors" |
||||
require "active_support/core_ext/numeric/bytes" |
||||
require "active_support/core_ext/numeric/time" |
||||
require "active_support/core_ext/object/to_param" |
||||
require "active_support/core_ext/object/try" |
||||
require "active_support/core_ext/string/inflections" |
||||
|
||||
module ActiveSupport |
||||
# See ActiveSupport::Cache::Store for documentation. |
||||
module Cache |
||||
autoload :FileStore, "active_support/cache/file_store" |
||||
autoload :MemoryStore, "active_support/cache/memory_store" |
||||
autoload :MemCacheStore, "active_support/cache/mem_cache_store" |
||||
autoload :NullStore, "active_support/cache/null_store" |
||||
autoload :RedisCacheStore, "active_support/cache/redis_cache_store" |
||||
|
||||
# These options mean something to all cache implementations. Individual cache |
||||
# implementations may support additional options. |
||||
UNIVERSAL_OPTIONS = [:namespace, :compress, :compress_threshold, :expires_in, :race_condition_ttl, :coder] |
||||
|
||||
module Strategy |
||||
autoload :LocalCache, "active_support/cache/strategy/local_cache" |
||||
end |
||||
|
||||
class << self |
||||
# Creates a new Store object according to the given options. |
||||
# |
||||
# If no arguments are passed to this method, then a new |
||||
# ActiveSupport::Cache::MemoryStore object will be returned. |
||||
# |
||||
# If you pass a Symbol as the first argument, then a corresponding cache |
||||
# store class under the ActiveSupport::Cache namespace will be created. |
||||
# For example: |
||||
# |
||||
# ActiveSupport::Cache.lookup_store(:memory_store) |
||||
# # => returns a new ActiveSupport::Cache::MemoryStore object |
||||
# |
||||
# ActiveSupport::Cache.lookup_store(:mem_cache_store) |
||||
# # => returns a new ActiveSupport::Cache::MemCacheStore object |
||||
# |
||||
# Any additional arguments will be passed to the corresponding cache store |
||||
# class's constructor: |
||||
# |
||||
# ActiveSupport::Cache.lookup_store(:file_store, '/tmp/cache') |
||||
# # => same as: ActiveSupport::Cache::FileStore.new('/tmp/cache') |
||||
# |
||||
# If the first argument is not a Symbol, then it will simply be returned: |
||||
# |
||||
# ActiveSupport::Cache.lookup_store(MyOwnCacheStore.new) |
||||
# # => returns MyOwnCacheStore.new |
||||
def lookup_store(store = nil, *parameters) |
||||
case store |
||||
when Symbol |
||||
options = parameters.extract_options! |
||||
# clean this up once Ruby 2.7 support is dropped |
||||
# see https://github.com/rails/rails/pull/41522#discussion_r581186602 |
||||
if options.empty? |
||||
retrieve_store_class(store).new(*parameters) |
||||
else |
||||
retrieve_store_class(store).new(*parameters, **options) |
||||
end |
||||
when Array |
||||
lookup_store(*store) |
||||
when nil |
||||
ActiveSupport::Cache::MemoryStore.new |
||||
else |
||||
store |
||||
end |
||||
end |
||||
|
||||
# Expands out the +key+ argument into a key that can be used for the |
||||
# cache store. Optionally accepts a namespace, and all keys will be |
||||
# scoped within that namespace. |
||||
# |
||||
# If the +key+ argument provided is an array, or responds to +to_a+, then |
||||
# each of elements in the array will be turned into parameters/keys and |
||||
# concatenated into a single key. For example: |
||||
# |
||||
# ActiveSupport::Cache.expand_cache_key([:foo, :bar]) # => "foo/bar" |
||||
# ActiveSupport::Cache.expand_cache_key([:foo, :bar], "namespace") # => "namespace/foo/bar" |
||||
# |
||||
# The +key+ argument can also respond to +cache_key+ or +to_param+. |
||||
def expand_cache_key(key, namespace = nil) |
||||
expanded_cache_key = namespace ? +"#{namespace}/" : +"" |
||||
|
||||
if prefix = ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"] |
||||
expanded_cache_key << "#{prefix}/" |
||||
end |
||||
|
||||
expanded_cache_key << retrieve_cache_key(key) |
||||
expanded_cache_key |
||||
end |
||||
|
||||
private |
||||
def retrieve_cache_key(key) |
||||
case |
||||
when key.respond_to?(:cache_key_with_version) then key.cache_key_with_version |
||||
when key.respond_to?(:cache_key) then key.cache_key |
||||
when key.is_a?(Array) then key.map { |element| retrieve_cache_key(element) }.to_param |
||||
when key.respond_to?(:to_a) then retrieve_cache_key(key.to_a) |
||||
else key.to_param |
||||
end.to_s |
||||
end |
||||
|
||||
# Obtains the specified cache store class, given the name of the +store+. |
||||
# Raises an error when the store class cannot be found. |
||||
def retrieve_store_class(store) |
||||
# require_relative cannot be used here because the class might be |
||||
# provided by another gem, like redis-activesupport for example. |
||||
require "active_support/cache/#{store}" |
||||
rescue LoadError => e |
||||
raise "Could not find cache store adapter for #{store} (#{e})" |
||||
else |
||||
ActiveSupport::Cache.const_get(store.to_s.camelize) |
||||
end |
||||
end |
||||
|
||||
# An abstract cache store class. There are multiple cache store |
||||
# implementations, each having its own additional features. See the classes |
||||
# under the ActiveSupport::Cache module, e.g. |
||||
# ActiveSupport::Cache::MemCacheStore. MemCacheStore is currently the most |
||||
# popular cache store for large production websites. |
||||
# |
||||
# Some implementations may not support all methods beyond the basic cache |
||||
# methods of +fetch+, +write+, +read+, +exist?+, and +delete+. |
||||
# |
||||
# ActiveSupport::Cache::Store can store any serializable Ruby object. |
||||
# |
||||
# cache = ActiveSupport::Cache::MemoryStore.new |
||||
# |
||||
# cache.read('city') # => nil |
||||
# cache.write('city', "Duckburgh") |
||||
# cache.read('city') # => "Duckburgh" |
||||
# |
||||
# Keys are always translated into Strings and are case sensitive. When an |
||||
# object is specified as a key and has a +cache_key+ method defined, this |
||||
# method will be called to define the key. Otherwise, the +to_param+ |
||||
# method will be called. Hashes and Arrays can also be used as keys. The |
||||
# elements will be delimited by slashes, and the elements within a Hash |
||||
# will be sorted by key so they are consistent. |
||||
# |
||||
# cache.read('city') == cache.read(:city) # => true |
||||
# |
||||
# Nil values can be cached. |
||||
# |
||||
# If your cache is on a shared infrastructure, you can define a namespace |
||||
# for your cache entries. If a namespace is defined, it will be prefixed on |
||||
# to every key. The namespace can be either a static value or a Proc. If it |
||||
# is a Proc, it will be invoked when each key is evaluated so that you can |
||||
# use application logic to invalidate keys. |
||||
# |
||||
# cache.namespace = -> { @last_mod_time } # Set the namespace to a variable |
||||
# @last_mod_time = Time.now # Invalidate the entire cache by changing namespace |
||||
# |
||||
# Cached data larger than 1kB are compressed by default. To turn off |
||||
# compression, pass <tt>compress: false</tt> to the initializer or to |
||||
# individual +fetch+ or +write+ method calls. The 1kB compression |
||||
# threshold is configurable with the <tt>:compress_threshold</tt> option, |
||||
# specified in bytes. |
||||
class Store |
||||
DEFAULT_CODER = Marshal |
||||
|
||||
cattr_accessor :logger, instance_writer: true |
||||
|
||||
attr_reader :silence, :options |
||||
alias :silence? :silence |
||||
|
||||
class << self |
||||
private |
||||
def retrieve_pool_options(options) |
||||
{}.tap do |pool_options| |
||||
pool_options[:size] = options.delete(:pool_size) if options[:pool_size] |
||||
pool_options[:timeout] = options.delete(:pool_timeout) if options[:pool_timeout] |
||||
end |
||||
end |
||||
|
||||
def ensure_connection_pool_added! |
||||
require "connection_pool" |
||||
rescue LoadError => e |
||||
$stderr.puts "You don't have connection_pool installed in your application. Please add it to your Gemfile and run bundle install" |
||||
raise e |
||||
end |
||||
end |
||||
|
||||
# Creates a new cache. The options will be passed to any write method calls |
||||
# except for <tt>:namespace</tt> which can be used to set the global |
||||
# namespace for the cache. |
||||
def initialize(options = nil) |
||||
@options = options ? options.dup : {} |
||||
@coder = @options.delete(:coder) { self.class::DEFAULT_CODER } || NullCoder |
||||
end |
||||
|
||||
# Silences the logger. |
||||
def silence! |
||||
@silence = true |
||||
self |
||||
end |
||||
|
||||
# Silences the logger within a block. |
||||
def mute |
||||
previous_silence, @silence = defined?(@silence) && @silence, true |
||||
yield |
||||
ensure |
||||
@silence = previous_silence |
||||
end |
||||
|
||||
# Fetches data from the cache, using the given key. If there is data in |
||||
# the cache with the given key, then that data is returned. |
||||
# |
||||
# If there is no such data in the cache (a cache miss), then +nil+ will be |
||||
# returned. However, if a block has been passed, that block will be passed |
||||
# the key and executed in the event of a cache miss. The return value of the |
||||
# block will be written to the cache under the given cache key, and that |
||||
# return value will be returned. |
||||
# |
||||
# cache.write('today', 'Monday') |
||||
# cache.fetch('today') # => "Monday" |
||||
# |
||||
# cache.fetch('city') # => nil |
||||
# cache.fetch('city') do |
||||
# 'Duckburgh' |
||||
# end |
||||
# cache.fetch('city') # => "Duckburgh" |
||||
# |
||||
# You may also specify additional options via the +options+ argument. |
||||
# Setting <tt>force: true</tt> forces a cache "miss," meaning we treat |
||||
# the cache value as missing even if it's present. Passing a block is |
||||
# required when +force+ is true so this always results in a cache write. |
||||
# |
||||
# cache.write('today', 'Monday') |
||||
# cache.fetch('today', force: true) { 'Tuesday' } # => 'Tuesday' |
||||
# cache.fetch('today', force: true) # => ArgumentError |
||||
# |
||||
# The +:force+ option is useful when you're calling some other method to |
||||
# ask whether you should force a cache write. Otherwise, it's clearer to |
||||
# just call <tt>Cache#write</tt>. |
||||
# |
||||
# Setting <tt>skip_nil: true</tt> will not cache nil result: |
||||
# |
||||
# cache.fetch('foo') { nil } |
||||
# cache.fetch('bar', skip_nil: true) { nil } |
||||
# cache.exist?('foo') # => true |
||||
# cache.exist?('bar') # => false |
||||
# |
||||
# |
||||
# Setting <tt>compress: false</tt> disables compression of the cache entry. |
||||
# |
||||
# Setting <tt>:expires_in</tt> will set an expiration time on the cache. |
||||
# All caches support auto-expiring content after a specified number of |
||||
# seconds. This value can be specified as an option to the constructor |
||||
# (in which case all entries will be affected), or it can be supplied to |
||||
# the +fetch+ or +write+ method to effect just one entry. |
||||
# |
||||
# cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 5.minutes) |
||||
# cache.write(key, value, expires_in: 1.minute) # Set a lower value for one entry |
||||
# |
||||
# Setting <tt>:version</tt> verifies the cache stored under <tt>name</tt> |
||||
# is of the same version. nil is returned on mismatches despite contents. |
||||
# This feature is used to support recyclable cache keys. |
||||
# |
||||
# Setting <tt>:race_condition_ttl</tt> is very useful in situations where |
||||
# a cache entry is used very frequently and is under heavy load. If a |
||||
# cache expires and due to heavy load several different processes will try |
||||
# to read data natively and then they all will try to write to cache. To |
||||
# avoid that case the first process to find an expired cache entry will |
||||
# bump the cache expiration time by the value set in <tt>:race_condition_ttl</tt>. |
||||
# Yes, this process is extending the time for a stale value by another few |
||||
# seconds. Because of extended life of the previous cache, other processes |
||||
# will continue to use slightly stale data for a just a bit longer. In the |
||||
# meantime that first process will go ahead and will write into cache the |
||||
# new value. After that all the processes will start getting the new value. |
||||
# The key is to keep <tt>:race_condition_ttl</tt> small. |
||||
# |
||||
# If the process regenerating the entry errors out, the entry will be |
||||
# regenerated after the specified number of seconds. Also note that the |
||||
# life of stale cache is extended only if it expired recently. Otherwise |
||||
# a new value is generated and <tt>:race_condition_ttl</tt> does not play |
||||
# any role. |
||||
# |
||||
# # Set all values to expire after one minute. |
||||
# cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 1.minute) |
||||
# |
||||
# cache.write('foo', 'original value') |
||||
# val_1 = nil |
||||
# val_2 = nil |
||||
# sleep 60 |
||||
# |
||||
# Thread.new do |
||||
# val_1 = cache.fetch('foo', race_condition_ttl: 10.seconds) do |
||||
# sleep 1 |
||||
# 'new value 1' |
||||
# end |
||||
# end |
||||
# |
||||
# Thread.new do |
||||
# val_2 = cache.fetch('foo', race_condition_ttl: 10.seconds) do |
||||
# 'new value 2' |
||||
# end |
||||
# end |
||||
# |
||||
# cache.fetch('foo') # => "original value" |
||||
# sleep 10 # First thread extended the life of cache by another 10 seconds |
||||
# cache.fetch('foo') # => "new value 1" |
||||
# val_1 # => "new value 1" |
||||
# val_2 # => "original value" |
||||
# |
||||
# Other options will be handled by the specific cache store implementation. |
||||
# Internally, #fetch calls #read_entry, and calls #write_entry on a cache |
||||
# miss. +options+ will be passed to the #read and #write calls. |
||||
# |
||||
# For example, MemCacheStore's #write method supports the +:raw+ |
||||
# option, which tells the memcached server to store all values as strings. |
||||
# We can use this option with #fetch too: |
||||
# |
||||
# cache = ActiveSupport::Cache::MemCacheStore.new |
||||
# cache.fetch("foo", force: true, raw: true) do |
||||
# :bar |
||||
# end |
||||
# cache.fetch('foo') # => "bar" |
||||
def fetch(name, options = nil, &block) |
||||
if block_given? |
||||
options = merged_options(options) |
||||
key = normalize_key(name, options) |
||||
|
||||
entry = nil |
||||
instrument(:read, name, options) do |payload| |
||||
cached_entry = read_entry(key, **options, event: payload) unless options[:force] |
||||
entry = handle_expired_entry(cached_entry, key, options) |
||||
entry = nil if entry && entry.mismatched?(normalize_version(name, options)) |
||||
payload[:super_operation] = :fetch if payload |
||||
payload[:hit] = !!entry if payload |
||||
end |
||||
|
||||
if entry |
||||
get_entry_value(entry, name, options) |
||||
else |
||||
save_block_result_to_cache(name, options, &block) |
||||
end |
||||
elsif options && options[:force] |
||||
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block." |
||||
else |
||||
read(name, options) |
||||
end |
||||
end |
||||
|
||||
# Reads data from the cache, using the given key. If there is data in |
||||
# the cache with the given key, then that data is returned. Otherwise, |
||||
# +nil+ is returned. |
||||
# |
||||
# Note, if data was written with the <tt>:expires_in</tt> or |
||||
# <tt>:version</tt> options, both of these conditions are applied before |
||||
# the data is returned. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
def read(name, options = nil) |
||||
options = merged_options(options) |
||||
key = normalize_key(name, options) |
||||
version = normalize_version(name, options) |
||||
|
||||
instrument(:read, name, options) do |payload| |
||||
entry = read_entry(key, **options, event: payload) |
||||
|
||||
if entry |
||||
if entry.expired? |
||||
delete_entry(key, **options) |
||||
payload[:hit] = false if payload |
||||
nil |
||||
elsif entry.mismatched?(version) |
||||
payload[:hit] = false if payload |
||||
nil |
||||
else |
||||
payload[:hit] = true if payload |
||||
entry.value |
||||
end |
||||
else |
||||
payload[:hit] = false if payload |
||||
nil |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Reads multiple values at once from the cache. Options can be passed |
||||
# in the last argument. |
||||
# |
||||
# Some cache implementation may optimize this method. |
||||
# |
||||
# Returns a hash mapping the names provided to the values found. |
||||
def read_multi(*names) |
||||
options = names.extract_options! |
||||
options = merged_options(options) |
||||
|
||||
instrument :read_multi, names, options do |payload| |
||||
read_multi_entries(names, **options, event: payload).tap do |results| |
||||
payload[:hits] = results.keys |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Cache Storage API to write multiple values at once. |
||||
def write_multi(hash, options = nil) |
||||
options = merged_options(options) |
||||
|
||||
instrument :write_multi, hash, options do |payload| |
||||
entries = hash.each_with_object({}) do |(name, value), memo| |
||||
memo[normalize_key(name, options)] = Entry.new(value, **options.merge(version: normalize_version(name, options))) |
||||
end |
||||
|
||||
write_multi_entries entries, **options |
||||
end |
||||
end |
||||
|
||||
# Fetches data from the cache, using the given keys. If there is data in |
||||
# the cache with the given keys, then that data is returned. Otherwise, |
||||
# the supplied block is called for each key for which there was no data, |
||||
# and the result will be written to the cache and returned. |
||||
# Therefore, you need to pass a block that returns the data to be written |
||||
# to the cache. If you do not want to write the cache when the cache is |
||||
# not found, use #read_multi. |
||||
# |
||||
# Returns a hash with the data for each of the names. For example: |
||||
# |
||||
# cache.write("bim", "bam") |
||||
# cache.fetch_multi("bim", "unknown_key") do |key| |
||||
# "Fallback value for key: #{key}" |
||||
# end |
||||
# # => { "bim" => "bam", |
||||
# # "unknown_key" => "Fallback value for key: unknown_key" } |
||||
# |
||||
# Options are passed to the underlying cache implementation. For example: |
||||
# |
||||
# cache.fetch_multi("fizz", expires_in: 5.seconds) do |key| |
||||
# "buzz" |
||||
# end |
||||
# # => {"fizz"=>"buzz"} |
||||
# cache.read("fizz") |
||||
# # => "buzz" |
||||
# sleep(6) |
||||
# cache.read("fizz") |
||||
# # => nil |
||||
def fetch_multi(*names) |
||||
raise ArgumentError, "Missing block: `Cache#fetch_multi` requires a block." unless block_given? |
||||
|
||||
options = names.extract_options! |
||||
options = merged_options(options) |
||||
|
||||
instrument :read_multi, names, options do |payload| |
||||
reads = read_multi_entries(names, **options) |
||||
writes = {} |
||||
ordered = names.index_with do |name| |
||||
reads.fetch(name) { writes[name] = yield(name) } |
||||
end |
||||
|
||||
payload[:hits] = reads.keys |
||||
payload[:super_operation] = :fetch_multi |
||||
|
||||
write_multi(writes, options) |
||||
|
||||
ordered |
||||
end |
||||
end |
||||
|
||||
# Writes the value to the cache, with the key. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
def write(name, value, options = nil) |
||||
options = merged_options(options) |
||||
|
||||
instrument(:write, name, options) do |
||||
entry = Entry.new(value, **options.merge(version: normalize_version(name, options))) |
||||
write_entry(normalize_key(name, options), entry, **options) |
||||
end |
||||
end |
||||
|
||||
# Deletes an entry in the cache. Returns +true+ if an entry is deleted. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
def delete(name, options = nil) |
||||
options = merged_options(options) |
||||
|
||||
instrument(:delete, name) do |
||||
delete_entry(normalize_key(name, options), **options) |
||||
end |
||||
end |
||||
|
||||
# Deletes multiple entries in the cache. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
def delete_multi(names, options = nil) |
||||
options = merged_options(options) |
||||
names.map! { |key| normalize_key(key, options) } |
||||
|
||||
instrument :delete_multi, names do |
||||
delete_multi_entries(names, **options) |
||||
end |
||||
end |
||||
|
||||
# Returns +true+ if the cache contains an entry for the given key. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
def exist?(name, options = nil) |
||||
options = merged_options(options) |
||||
|
||||
instrument(:exist?, name) do |payload| |
||||
entry = read_entry(normalize_key(name, options), **options, event: payload) |
||||
(entry && !entry.expired? && !entry.mismatched?(normalize_version(name, options))) || false |
||||
end |
||||
end |
||||
|
||||
# Deletes all entries with keys matching the pattern. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
# |
||||
# Some implementations may not support this method. |
||||
def delete_matched(matcher, options = nil) |
||||
raise NotImplementedError.new("#{self.class.name} does not support delete_matched") |
||||
end |
||||
|
||||
# Increments an integer value in the cache. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
# |
||||
# Some implementations may not support this method. |
||||
def increment(name, amount = 1, options = nil) |
||||
raise NotImplementedError.new("#{self.class.name} does not support increment") |
||||
end |
||||
|
||||
# Decrements an integer value in the cache. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
# |
||||
# Some implementations may not support this method. |
||||
def decrement(name, amount = 1, options = nil) |
||||
raise NotImplementedError.new("#{self.class.name} does not support decrement") |
||||
end |
||||
|
||||
# Cleanups the cache by removing expired entries. |
||||
# |
||||
# Options are passed to the underlying cache implementation. |
||||
# |
||||
# Some implementations may not support this method. |
||||
def cleanup(options = nil) |
||||
raise NotImplementedError.new("#{self.class.name} does not support cleanup") |
||||
end |
||||
|
||||
# Clears the entire cache. Be careful with this method since it could |
||||
# affect other processes if shared cache is being used. |
||||
# |
||||
# The options hash is passed to the underlying cache implementation. |
||||
# |
||||
# Some implementations may not support this method. |
||||
def clear(options = nil) |
||||
raise NotImplementedError.new("#{self.class.name} does not support clear") |
||||
end |
||||
|
||||
private |
||||
# Adds the namespace defined in the options to a pattern designed to |
||||
# match keys. Implementations that support delete_matched should call |
||||
# this method to translate a pattern that matches names into one that |
||||
# matches namespaced keys. |
||||
def key_matcher(pattern, options) # :doc: |
||||
prefix = options[:namespace].is_a?(Proc) ? options[:namespace].call : options[:namespace] |
||||
if prefix |
||||
source = pattern.source |
||||
if source.start_with?("^") |
||||
source = source[1, source.length] |
||||
else |
||||
source = ".*#{source[0, source.length]}" |
||||
end |
||||
Regexp.new("^#{Regexp.escape(prefix)}:#{source}", pattern.options) |
||||
else |
||||
pattern |
||||
end |
||||
end |
||||
|
||||
# Reads an entry from the cache implementation. Subclasses must implement |
||||
# this method. |
||||
def read_entry(key, **options) |
||||
raise NotImplementedError.new |
||||
end |
||||
|
||||
# Writes an entry to the cache implementation. Subclasses must implement |
||||
# this method. |
||||
def write_entry(key, entry, **options) |
||||
raise NotImplementedError.new |
||||
end |
||||
|
||||
def serialize_entry(entry) |
||||
@coder.dump(entry) |
||||
end |
||||
|
||||
def deserialize_entry(payload) |
||||
payload.nil? ? nil : @coder.load(payload) |
||||
end |
||||
|
||||
# Reads multiple entries from the cache implementation. Subclasses MAY |
||||
# implement this method. |
||||
def read_multi_entries(names, **options) |
||||
names.each_with_object({}) do |name, results| |
||||
key = normalize_key(name, options) |
||||
entry = read_entry(key, **options) |
||||
|
||||
next unless entry |
||||
|
||||
version = normalize_version(name, options) |
||||
|
||||
if entry.expired? |
||||
delete_entry(key, **options) |
||||
elsif !entry.mismatched?(version) |
||||
results[name] = entry.value |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Writes multiple entries to the cache implementation. Subclasses MAY |
||||
# implement this method. |
||||
def write_multi_entries(hash, **options) |
||||
hash.each do |key, entry| |
||||
write_entry key, entry, **options |
||||
end |
||||
end |
||||
|
||||
# Deletes an entry from the cache implementation. Subclasses must |
||||
# implement this method. |
||||
def delete_entry(key, **options) |
||||
raise NotImplementedError.new |
||||
end |
||||
|
||||
# Deletes multiples entries in the cache implementation. Subclasses MAY |
||||
# implement this method. |
||||
def delete_multi_entries(entries, **options) |
||||
entries.count { |key| delete_entry(key, **options) } |
||||
end |
||||
|
||||
# Merges the default options with ones specific to a method call. |
||||
def merged_options(call_options) |
||||
if call_options |
||||
if options.empty? |
||||
call_options |
||||
else |
||||
options.merge(call_options) |
||||
end |
||||
else |
||||
options |
||||
end |
||||
end |
||||
|
||||
# Expands and namespaces the cache key. May be overridden by |
||||
# cache stores to do additional normalization. |
||||
def normalize_key(key, options = nil) |
||||
namespace_key expanded_key(key), options |
||||
end |
||||
|
||||
# Prefix the key with a namespace string: |
||||
# |
||||
# namespace_key 'foo', namespace: 'cache' |
||||
# # => 'cache:foo' |
||||
# |
||||
# With a namespace block: |
||||
# |
||||
# namespace_key 'foo', namespace: -> { 'cache' } |
||||
# # => 'cache:foo' |
||||
def namespace_key(key, options = nil) |
||||
options = merged_options(options) |
||||
namespace = options[:namespace] |
||||
|
||||
if namespace.respond_to?(:call) |
||||
namespace = namespace.call |
||||
end |
||||
|
||||
if key && key.encoding != Encoding::UTF_8 |
||||
key = key.dup.force_encoding(Encoding::UTF_8) |
||||
end |
||||
|
||||
if namespace |
||||
"#{namespace}:#{key}" |
||||
else |
||||
key |
||||
end |
||||
end |
||||
|
||||
# Expands key to be a consistent string value. Invokes +cache_key+ if |
||||
# object responds to +cache_key+. Otherwise, +to_param+ method will be |
||||
# called. If the key is a Hash, then keys will be sorted alphabetically. |
||||
def expanded_key(key) |
||||
return key.cache_key.to_s if key.respond_to?(:cache_key) |
||||
|
||||
case key |
||||
when Array |
||||
if key.size > 1 |
||||
key.collect { |element| expanded_key(element) } |
||||
else |
||||
expanded_key(key.first) |
||||
end |
||||
when Hash |
||||
key.collect { |k, v| "#{k}=#{v}" }.sort! |
||||
else |
||||
key |
||||
end.to_param |
||||
end |
||||
|
||||
def normalize_version(key, options = nil) |
||||
(options && options[:version].try(:to_param)) || expanded_version(key) |
||||
end |
||||
|
||||
def expanded_version(key) |
||||
case |
||||
when key.respond_to?(:cache_version) then key.cache_version.to_param |
||||
when key.is_a?(Array) then key.map { |element| expanded_version(element) }.tap(&:compact!).to_param |
||||
when key.respond_to?(:to_a) then expanded_version(key.to_a) |
||||
end |
||||
end |
||||
|
||||
def instrument(operation, key, options = nil) |
||||
if logger && logger.debug? && !silence? |
||||
logger.debug "Cache #{operation}: #{normalize_key(key, options)}#{options.blank? ? "" : " (#{options.inspect})"}" |
||||
end |
||||
|
||||
payload = { key: key, store: self.class.name } |
||||
payload.merge!(options) if options.is_a?(Hash) |
||||
ActiveSupport::Notifications.instrument("cache_#{operation}.active_support", payload) { yield(payload) } |
||||
end |
||||
|
||||
def handle_expired_entry(entry, key, options) |
||||
if entry && entry.expired? |
||||
race_ttl = options[:race_condition_ttl].to_i |
||||
if (race_ttl > 0) && (Time.now.to_f - entry.expires_at <= race_ttl) |
||||
# When an entry has a positive :race_condition_ttl defined, put the stale entry back into the cache |
||||
# for a brief period while the entry is being recalculated. |
||||
entry.expires_at = Time.now + race_ttl |
||||
write_entry(key, entry, expires_in: race_ttl * 2) |
||||
else |
||||
delete_entry(key, **options) |
||||
end |
||||
entry = nil |
||||
end |
||||
entry |
||||
end |
||||
|
||||
def get_entry_value(entry, name, options) |
||||
instrument(:fetch_hit, name, options) { } |
||||
entry.value |
||||
end |
||||
|
||||
def save_block_result_to_cache(name, options) |
||||
result = instrument(:generate, name, options) do |
||||
yield(name) |
||||
end |
||||
|
||||
write(name, result, options) unless result.nil? && options[:skip_nil] |
||||
result |
||||
end |
||||
end |
||||
|
||||
module NullCoder # :nodoc: |
||||
class << self |
||||
def load(payload) |
||||
payload |
||||
end |
||||
|
||||
def dump(entry) |
||||
entry |
||||
end |
||||
end |
||||
end |
||||
|
||||
# This class is used to represent cache entries. Cache entries have a value, an optional |
||||
# expiration time, and an optional version. The expiration time is used to support the :race_condition_ttl option |
||||
# on the cache. The version is used to support the :version option on the cache for rejecting |
||||
# mismatches. |
||||
# |
||||
# Since cache entries in most instances will be serialized, the internals of this class are highly optimized |
||||
# using short instance variable names that are lazily defined. |
||||
class Entry # :nodoc: |
||||
attr_reader :version |
||||
|
||||
DEFAULT_COMPRESS_LIMIT = 1.kilobyte |
||||
|
||||
# Creates a new cache entry for the specified value. Options supported are |
||||
# +:compress+, +:compress_threshold+, +:version+ and +:expires_in+. |
||||
def initialize(value, compress: true, compress_threshold: DEFAULT_COMPRESS_LIMIT, version: nil, expires_in: nil, **) |
||||
@value = value |
||||
@version = version |
||||
@created_at = Time.now.to_f |
||||
@expires_in = expires_in && expires_in.to_f |
||||
|
||||
compress!(compress_threshold) if compress |
||||
end |
||||
|
||||
def value |
||||
compressed? ? uncompress(@value) : @value |
||||
end |
||||
|
||||
def mismatched?(version) |
||||
@version && version && @version != version |
||||
end |
||||
|
||||
# Checks if the entry is expired. The +expires_in+ parameter can override |
||||
# the value set when the entry was created. |
||||
def expired? |
||||
@expires_in && @created_at + @expires_in <= Time.now.to_f |
||||
end |
||||
|
||||
def expires_at |
||||
@expires_in ? @created_at + @expires_in : nil |
||||
end |
||||
|
||||
def expires_at=(value) |
||||
if value |
||||
@expires_in = value.to_f - @created_at |
||||
else |
||||
@expires_in = nil |
||||
end |
||||
end |
||||
|
||||
# Returns the size of the cached value. This could be less than |
||||
# <tt>value.bytesize</tt> if the data is compressed. |
||||
def bytesize |
||||
case value |
||||
when NilClass |
||||
0 |
||||
when String |
||||
@value.bytesize |
||||
else |
||||
@s ||= Marshal.dump(@value).bytesize |
||||
end |
||||
end |
||||
|
||||
# Duplicates the value in a class. This is used by cache implementations that don't natively |
||||
# serialize entries to protect against accidental cache modifications. |
||||
def dup_value! |
||||
if @value && !compressed? && !(@value.is_a?(Numeric) || @value == true || @value == false) |
||||
if @value.is_a?(String) |
||||
@value = @value.dup |
||||
else |
||||
@value = Marshal.load(Marshal.dump(@value)) |
||||
end |
||||
end |
||||
end |
||||
|
||||
private |
||||
def compress!(compress_threshold) |
||||
case @value |
||||
when nil, true, false, Numeric |
||||
uncompressed_size = 0 |
||||
when String |
||||
uncompressed_size = @value.bytesize |
||||
else |
||||
serialized = Marshal.dump(@value) |
||||
uncompressed_size = serialized.bytesize |
||||
end |
||||
|
||||
if uncompressed_size >= compress_threshold |
||||
serialized ||= Marshal.dump(@value) |
||||
compressed = Zlib::Deflate.deflate(serialized) |
||||
|
||||
if compressed.bytesize < uncompressed_size |
||||
@value = compressed |
||||
@compressed = true |
||||
end |
||||
end |
||||
end |
||||
|
||||
def compressed? |
||||
defined?(@compressed) |
||||
end |
||||
|
||||
def uncompress(value) |
||||
Marshal.load(Zlib::Inflate.inflate(value)) |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,196 +0,0 @@
@@ -1,196 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/core_ext/marshal" |
||||
require "active_support/core_ext/file/atomic" |
||||
require "active_support/core_ext/string/conversions" |
||||
require "uri/common" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
# A cache store implementation which stores everything on the filesystem. |
||||
# |
||||
# FileStore implements the Strategy::LocalCache strategy which implements |
||||
# an in-memory cache inside of a block. |
||||
class FileStore < Store |
||||
prepend Strategy::LocalCache |
||||
attr_reader :cache_path |
||||
|
||||
DIR_FORMATTER = "%03X" |
||||
FILENAME_MAX_SIZE = 226 # max filename size on file system is 255, minus room for timestamp, pid, and random characters appended by Tempfile (used by atomic write) |
||||
FILEPATH_MAX_SIZE = 900 # max is 1024, plus some room |
||||
GITKEEP_FILES = [".gitkeep", ".keep"].freeze |
||||
|
||||
def initialize(cache_path, **options) |
||||
super(options) |
||||
@cache_path = cache_path.to_s |
||||
end |
||||
|
||||
# Advertise cache versioning support. |
||||
def self.supports_cache_versioning? |
||||
true |
||||
end |
||||
|
||||
# Deletes all items from the cache. In this case it deletes all the entries in the specified |
||||
# file store directory except for .keep or .gitkeep. Be careful which directory is specified in your |
||||
# config file when using +FileStore+ because everything in that directory will be deleted. |
||||
def clear(options = nil) |
||||
root_dirs = (Dir.children(cache_path) - GITKEEP_FILES) |
||||
FileUtils.rm_r(root_dirs.collect { |f| File.join(cache_path, f) }) |
||||
rescue Errno::ENOENT, Errno::ENOTEMPTY |
||||
end |
||||
|
||||
# Preemptively iterates through all stored keys and removes the ones which have expired. |
||||
def cleanup(options = nil) |
||||
options = merged_options(options) |
||||
search_dir(cache_path) do |fname| |
||||
entry = read_entry(fname, **options) |
||||
delete_entry(fname, **options) if entry && entry.expired? |
||||
end |
||||
end |
||||
|
||||
# Increments an already existing integer value that is stored in the cache. |
||||
# If the key is not found nothing is done. |
||||
def increment(name, amount = 1, options = nil) |
||||
modify_value(name, amount, options) |
||||
end |
||||
|
||||
# Decrements an already existing integer value that is stored in the cache. |
||||
# If the key is not found nothing is done. |
||||
def decrement(name, amount = 1, options = nil) |
||||
modify_value(name, -amount, options) |
||||
end |
||||
|
||||
def delete_matched(matcher, options = nil) |
||||
options = merged_options(options) |
||||
instrument(:delete_matched, matcher.inspect) do |
||||
matcher = key_matcher(matcher, options) |
||||
search_dir(cache_path) do |path| |
||||
key = file_path_key(path) |
||||
delete_entry(path, **options) if key.match(matcher) |
||||
end |
||||
end |
||||
end |
||||
|
||||
private |
||||
def read_entry(key, **options) |
||||
if File.exist?(key) |
||||
entry = File.open(key) { |f| deserialize_entry(f.read) } |
||||
entry if entry.is_a?(Cache::Entry) |
||||
end |
||||
rescue => e |
||||
logger.error("FileStoreError (#{e}): #{e.message}") if logger |
||||
nil |
||||
end |
||||
|
||||
def write_entry(key, entry, **options) |
||||
return false if options[:unless_exist] && File.exist?(key) |
||||
ensure_cache_path(File.dirname(key)) |
||||
File.atomic_write(key, cache_path) { |f| f.write(serialize_entry(entry)) } |
||||
true |
||||
end |
||||
|
||||
def delete_entry(key, **options) |
||||
if File.exist?(key) |
||||
begin |
||||
File.delete(key) |
||||
delete_empty_directories(File.dirname(key)) |
||||
true |
||||
rescue => e |
||||
# Just in case the error was caused by another process deleting the file first. |
||||
raise e if File.exist?(key) |
||||
false |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Lock a file for a block so only one process can modify it at a time. |
||||
def lock_file(file_name, &block) |
||||
if File.exist?(file_name) |
||||
File.open(file_name, "r+") do |f| |
||||
f.flock File::LOCK_EX |
||||
yield |
||||
ensure |
||||
f.flock File::LOCK_UN |
||||
end |
||||
else |
||||
yield |
||||
end |
||||
end |
||||
|
||||
# Translate a key into a file path. |
||||
def normalize_key(key, options) |
||||
key = super |
||||
fname = URI.encode_www_form_component(key) |
||||
|
||||
if fname.size > FILEPATH_MAX_SIZE |
||||
fname = ActiveSupport::Digest.hexdigest(key) |
||||
end |
||||
|
||||
hash = Zlib.adler32(fname) |
||||
hash, dir_1 = hash.divmod(0x1000) |
||||
dir_2 = hash.modulo(0x1000) |
||||
|
||||
# Make sure file name doesn't exceed file system limits. |
||||
if fname.length < FILENAME_MAX_SIZE |
||||
fname_paths = fname |
||||
else |
||||
fname_paths = [] |
||||
begin |
||||
fname_paths << fname[0, FILENAME_MAX_SIZE] |
||||
fname = fname[FILENAME_MAX_SIZE..-1] |
||||
end until fname.blank? |
||||
end |
||||
|
||||
File.join(cache_path, DIR_FORMATTER % dir_1, DIR_FORMATTER % dir_2, fname_paths) |
||||
end |
||||
|
||||
# Translate a file path into a key. |
||||
def file_path_key(path) |
||||
fname = path[cache_path.to_s.size..-1].split(File::SEPARATOR, 4).last |
||||
URI.decode_www_form_component(fname, Encoding::UTF_8) |
||||
end |
||||
|
||||
# Delete empty directories in the cache. |
||||
def delete_empty_directories(dir) |
||||
return if File.realpath(dir) == File.realpath(cache_path) |
||||
if Dir.children(dir).empty? |
||||
Dir.delete(dir) rescue nil |
||||
delete_empty_directories(File.dirname(dir)) |
||||
end |
||||
end |
||||
|
||||
# Make sure a file path's directories exist. |
||||
def ensure_cache_path(path) |
||||
FileUtils.makedirs(path) unless File.exist?(path) |
||||
end |
||||
|
||||
def search_dir(dir, &callback) |
||||
return if !File.exist?(dir) |
||||
Dir.each_child(dir) do |d| |
||||
name = File.join(dir, d) |
||||
if File.directory?(name) |
||||
search_dir(name, &callback) |
||||
else |
||||
callback.call name |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Modifies the amount of an already existing integer value that is stored in the cache. |
||||
# If the key is not found nothing is done. |
||||
def modify_value(name, amount, options) |
||||
file_name = normalize_key(name, options) |
||||
|
||||
lock_file(file_name) do |
||||
options = merged_options(options) |
||||
|
||||
if num = read(name, options) |
||||
num = num.to_i + amount |
||||
write(name, num, options) |
||||
num |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,213 +0,0 @@
@@ -1,213 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
begin |
||||
require "dalli" |
||||
rescue LoadError => e |
||||
$stderr.puts "You don't have dalli installed in your application. Please add it to your Gemfile and run bundle install" |
||||
raise e |
||||
end |
||||
|
||||
require "active_support/core_ext/enumerable" |
||||
require "active_support/core_ext/marshal" |
||||
require "active_support/core_ext/array/extract_options" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
# A cache store implementation which stores data in Memcached: |
||||
# https://memcached.org |
||||
# |
||||
# This is currently the most popular cache store for production websites. |
||||
# |
||||
# Special features: |
||||
# - Clustering and load balancing. One can specify multiple memcached servers, |
||||
# and MemCacheStore will load balance between all available servers. If a |
||||
# server goes down, then MemCacheStore will ignore it until it comes back up. |
||||
# |
||||
# MemCacheStore implements the Strategy::LocalCache strategy which implements |
||||
# an in-memory cache inside of a block. |
||||
class MemCacheStore < Store |
||||
DEFAULT_CODER = NullCoder # Dalli automatically Marshal values |
||||
|
||||
# Provide support for raw values in the local cache strategy. |
||||
module LocalCacheWithRaw # :nodoc: |
||||
private |
||||
def write_entry(key, entry, **options) |
||||
if options[:raw] && local_cache |
||||
raw_entry = Entry.new(entry.value.to_s) |
||||
raw_entry.expires_at = entry.expires_at |
||||
super(key, raw_entry, **options) |
||||
else |
||||
super |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Advertise cache versioning support. |
||||
def self.supports_cache_versioning? |
||||
true |
||||
end |
||||
|
||||
prepend Strategy::LocalCache |
||||
prepend LocalCacheWithRaw |
||||
|
||||
ESCAPE_KEY_CHARS = /[\x00-\x20%\x7F-\xFF]/n |
||||
|
||||
# Creates a new Dalli::Client instance with specified addresses and options. |
||||
# If no addresses are provided, we give nil to Dalli::Client, so it uses its fallbacks: |
||||
# - ENV["MEMCACHE_SERVERS"] (if defined) |
||||
# - "127.0.0.1:11211" (otherwise) |
||||
# |
||||
# ActiveSupport::Cache::MemCacheStore.build_mem_cache |
||||
# # => #<Dalli::Client:0x007f98a47d2028 @servers=["127.0.0.1:11211"], @options={}, @ring=nil> |
||||
# ActiveSupport::Cache::MemCacheStore.build_mem_cache('localhost:10290') |
||||
# # => #<Dalli::Client:0x007f98a47b3a60 @servers=["localhost:10290"], @options={}, @ring=nil> |
||||
def self.build_mem_cache(*addresses) # :nodoc: |
||||
addresses = addresses.flatten |
||||
options = addresses.extract_options! |
||||
addresses = nil if addresses.compact.empty? |
||||
pool_options = retrieve_pool_options(options) |
||||
|
||||
if pool_options.empty? |
||||
Dalli::Client.new(addresses, options) |
||||
else |
||||
ensure_connection_pool_added! |
||||
ConnectionPool.new(pool_options) { Dalli::Client.new(addresses, options.merge(threadsafe: false)) } |
||||
end |
||||
end |
||||
|
||||
# Creates a new MemCacheStore object, with the given memcached server |
||||
# addresses. Each address is either a host name, or a host-with-port string |
||||
# in the form of "host_name:port". For example: |
||||
# |
||||
# ActiveSupport::Cache::MemCacheStore.new("localhost", "server-downstairs.localnetwork:8229") |
||||
# |
||||
# If no addresses are provided, but ENV['MEMCACHE_SERVERS'] is defined, it will be used instead. Otherwise, |
||||
# MemCacheStore will connect to localhost:11211 (the default memcached port). |
||||
def initialize(*addresses) |
||||
addresses = addresses.flatten |
||||
options = addresses.extract_options! |
||||
super(options) |
||||
|
||||
unless [String, Dalli::Client, NilClass].include?(addresses.first.class) |
||||
raise ArgumentError, "First argument must be an empty array, an array of hosts or a Dalli::Client instance." |
||||
end |
||||
if addresses.first.is_a?(Dalli::Client) |
||||
@data = addresses.first |
||||
else |
||||
mem_cache_options = options.dup |
||||
UNIVERSAL_OPTIONS.each { |name| mem_cache_options.delete(name) } |
||||
@data = self.class.build_mem_cache(*(addresses + [mem_cache_options])) |
||||
end |
||||
end |
||||
|
||||
# Increment a cached value. This method uses the memcached incr atomic |
||||
# operator and can only be used on values written with the :raw option. |
||||
# Calling it on a value not stored with :raw will initialize that value |
||||
# to zero. |
||||
def increment(name, amount = 1, options = nil) |
||||
options = merged_options(options) |
||||
instrument(:increment, name, amount: amount) do |
||||
rescue_error_with nil do |
||||
@data.with { |c| c.incr(normalize_key(name, options), amount, options[:expires_in]) } |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Decrement a cached value. This method uses the memcached decr atomic |
||||
# operator and can only be used on values written with the :raw option. |
||||
# Calling it on a value not stored with :raw will initialize that value |
||||
# to zero. |
||||
def decrement(name, amount = 1, options = nil) |
||||
options = merged_options(options) |
||||
instrument(:decrement, name, amount: amount) do |
||||
rescue_error_with nil do |
||||
@data.with { |c| c.decr(normalize_key(name, options), amount, options[:expires_in]) } |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Clear the entire cache on all memcached servers. This method should |
||||
# be used with care when shared cache is being used. |
||||
def clear(options = nil) |
||||
rescue_error_with(nil) { @data.with { |c| c.flush_all } } |
||||
end |
||||
|
||||
# Get the statistics from the memcached servers. |
||||
def stats |
||||
@data.with { |c| c.stats } |
||||
end |
||||
|
||||
private |
||||
# Read an entry from the cache. |
||||
def read_entry(key, **options) |
||||
rescue_error_with(nil) { deserialize_entry(@data.with { |c| c.get(key, options) }) } |
||||
end |
||||
|
||||
# Write an entry to the cache. |
||||
def write_entry(key, entry, **options) |
||||
method = options[:unless_exist] ? :add : :set |
||||
value = options[:raw] ? entry.value.to_s : serialize_entry(entry) |
||||
expires_in = options[:expires_in].to_i |
||||
if options[:race_condition_ttl] && expires_in > 0 && !options[:raw] |
||||
# Set the memcache expire a few minutes in the future to support race condition ttls on read |
||||
expires_in += 5.minutes |
||||
end |
||||
rescue_error_with false do |
||||
# The value "compress: false" prevents duplicate compression within Dalli. |
||||
@data.with { |c| c.send(method, key, value, expires_in, **options, compress: false) } |
||||
end |
||||
end |
||||
|
||||
# Reads multiple entries from the cache implementation. |
||||
def read_multi_entries(names, **options) |
||||
keys_to_names = names.index_by { |name| normalize_key(name, options) } |
||||
|
||||
raw_values = @data.with { |c| c.get_multi(keys_to_names.keys) } |
||||
values = {} |
||||
|
||||
raw_values.each do |key, value| |
||||
entry = deserialize_entry(value) |
||||
|
||||
unless entry.expired? || entry.mismatched?(normalize_version(keys_to_names[key], options)) |
||||
values[keys_to_names[key]] = entry.value |
||||
end |
||||
end |
||||
|
||||
values |
||||
end |
||||
|
||||
# Delete an entry from the cache. |
||||
def delete_entry(key, **options) |
||||
rescue_error_with(false) { @data.with { |c| c.delete(key) } } |
||||
end |
||||
|
||||
# Memcache keys are binaries. So we need to force their encoding to binary |
||||
# before applying the regular expression to ensure we are escaping all |
||||
# characters properly. |
||||
def normalize_key(key, options) |
||||
key = super |
||||
|
||||
if key |
||||
key = key.dup.force_encoding(Encoding::ASCII_8BIT) |
||||
key = key.gsub(ESCAPE_KEY_CHARS) { |match| "%#{match.getbyte(0).to_s(16).upcase}" } |
||||
key = "#{key[0, 213]}:md5:#{ActiveSupport::Digest.hexdigest(key)}" if key.size > 250 |
||||
end |
||||
|
||||
key |
||||
end |
||||
|
||||
def deserialize_entry(payload) |
||||
entry = super |
||||
entry = Entry.new(entry, compress: false) unless entry.nil? || entry.is_a?(Entry) |
||||
entry |
||||
end |
||||
|
||||
def rescue_error_with(fallback) |
||||
yield |
||||
rescue Dalli::DalliError => e |
||||
logger.error("DalliError (#{e}): #{e.message}") if logger |
||||
fallback |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,195 +0,0 @@
@@ -1,195 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "monitor" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
# A cache store implementation which stores everything into memory in the |
||||
# same process. If you're running multiple Ruby on Rails server processes |
||||
# (which is the case if you're using Phusion Passenger or puma clustered mode), |
||||
# then this means that Rails server process instances won't be able |
||||
# to share cache data with each other and this may not be the most |
||||
# appropriate cache in that scenario. |
||||
# |
||||
# This cache has a bounded size specified by the :size options to the |
||||
# initializer (default is 32Mb). When the cache exceeds the allotted size, |
||||
# a cleanup will occur which tries to prune the cache down to three quarters |
||||
# of the maximum size by removing the least recently used entries. |
||||
# |
||||
# Unlike other Cache store implementations, MemoryStore does not compress |
||||
# values by default. MemoryStore does not benefit from compression as much |
||||
# as other Store implementations, as it does not send data over a network. |
||||
# However, when compression is enabled, it still pays the full cost of |
||||
# compression in terms of cpu use. |
||||
# |
||||
# MemoryStore is thread-safe. |
||||
class MemoryStore < Store |
||||
module DupCoder # :nodoc: |
||||
class << self |
||||
def load(entry) |
||||
entry = entry.dup |
||||
entry.dup_value! |
||||
entry |
||||
end |
||||
|
||||
def dump(entry) |
||||
entry.dup_value! |
||||
entry |
||||
end |
||||
end |
||||
end |
||||
|
||||
DEFAULT_CODER = DupCoder |
||||
|
||||
def initialize(options = nil) |
||||
options ||= {} |
||||
# Disable compression by default. |
||||
options[:compress] ||= false |
||||
super(options) |
||||
@data = {} |
||||
@max_size = options[:size] || 32.megabytes |
||||
@max_prune_time = options[:max_prune_time] || 2 |
||||
@cache_size = 0 |
||||
@monitor = Monitor.new |
||||
@pruning = false |
||||
end |
||||
|
||||
# Advertise cache versioning support. |
||||
def self.supports_cache_versioning? |
||||
true |
||||
end |
||||
|
||||
# Delete all data stored in a given cache store. |
||||
def clear(options = nil) |
||||
synchronize do |
||||
@data.clear |
||||
@cache_size = 0 |
||||
end |
||||
end |
||||
|
||||
# Preemptively iterates through all stored keys and removes the ones which have expired. |
||||
def cleanup(options = nil) |
||||
options = merged_options(options) |
||||
instrument(:cleanup, size: @data.size) do |
||||
keys = synchronize { @data.keys } |
||||
keys.each do |key| |
||||
entry = @data[key] |
||||
delete_entry(key, **options) if entry && entry.expired? |
||||
end |
||||
end |
||||
end |
||||
|
||||
# To ensure entries fit within the specified memory prune the cache by removing the least |
||||
# recently accessed entries. |
||||
def prune(target_size, max_time = nil) |
||||
return if pruning? |
||||
@pruning = true |
||||
begin |
||||
start_time = Concurrent.monotonic_time |
||||
cleanup |
||||
instrument(:prune, target_size, from: @cache_size) do |
||||
keys = synchronize { @data.keys } |
||||
keys.each do |key| |
||||
delete_entry(key, **options) |
||||
return if @cache_size <= target_size || (max_time && Concurrent.monotonic_time - start_time > max_time) |
||||
end |
||||
end |
||||
ensure |
||||
@pruning = false |
||||
end |
||||
end |
||||
|
||||
# Returns true if the cache is currently being pruned. |
||||
def pruning? |
||||
@pruning |
||||
end |
||||
|
||||
# Increment an integer value in the cache. |
||||
def increment(name, amount = 1, options = nil) |
||||
modify_value(name, amount, options) |
||||
end |
||||
|
||||
# Decrement an integer value in the cache. |
||||
def decrement(name, amount = 1, options = nil) |
||||
modify_value(name, -amount, options) |
||||
end |
||||
|
||||
# Deletes cache entries if the cache key matches a given pattern. |
||||
def delete_matched(matcher, options = nil) |
||||
options = merged_options(options) |
||||
instrument(:delete_matched, matcher.inspect) do |
||||
matcher = key_matcher(matcher, options) |
||||
keys = synchronize { @data.keys } |
||||
keys.each do |key| |
||||
delete_entry(key, **options) if key.match(matcher) |
||||
end |
||||
end |
||||
end |
||||
|
||||
def inspect # :nodoc: |
||||
"#<#{self.class.name} entries=#{@data.size}, size=#{@cache_size}, options=#{@options.inspect}>" |
||||
end |
||||
|
||||
# Synchronize calls to the cache. This should be called wherever the underlying cache implementation |
||||
# is not thread safe. |
||||
def synchronize(&block) # :nodoc: |
||||
@monitor.synchronize(&block) |
||||
end |
||||
|
||||
private |
||||
PER_ENTRY_OVERHEAD = 240 |
||||
|
||||
def cached_size(key, payload) |
||||
key.to_s.bytesize + payload.bytesize + PER_ENTRY_OVERHEAD |
||||
end |
||||
|
||||
def read_entry(key, **options) |
||||
entry = nil |
||||
synchronize do |
||||
payload = @data.delete(key) |
||||
if payload |
||||
@data[key] = payload |
||||
entry = deserialize_entry(payload) |
||||
end |
||||
end |
||||
entry |
||||
end |
||||
|
||||
def write_entry(key, entry, **options) |
||||
payload = serialize_entry(entry) |
||||
synchronize do |
||||
return false if options[:unless_exist] && @data.key?(key) |
||||
|
||||
old_payload = @data[key] |
||||
if old_payload |
||||
@cache_size -= (old_payload.bytesize - payload.bytesize) |
||||
else |
||||
@cache_size += cached_size(key, payload) |
||||
end |
||||
@data[key] = payload |
||||
prune(@max_size * 0.75, @max_prune_time) if @cache_size > @max_size |
||||
true |
||||
end |
||||
end |
||||
|
||||
def delete_entry(key, **options) |
||||
synchronize do |
||||
payload = @data.delete(key) |
||||
@cache_size -= cached_size(key, payload) if payload |
||||
!!payload |
||||
end |
||||
end |
||||
|
||||
def modify_value(name, amount, options) |
||||
options = merged_options(options) |
||||
synchronize do |
||||
if num = read(name, options) |
||||
num = num.to_i + amount |
||||
write(name, num, options) |
||||
num |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,48 +0,0 @@
@@ -1,48 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
# A cache store implementation which doesn't actually store anything. Useful in |
||||
# development and test environments where you don't want caching turned on but |
||||
# need to go through the caching interface. |
||||
# |
||||
# This cache does implement the local cache strategy, so values will actually |
||||
# be cached inside blocks that utilize this strategy. See |
||||
# ActiveSupport::Cache::Strategy::LocalCache for more details. |
||||
class NullStore < Store |
||||
prepend Strategy::LocalCache |
||||
|
||||
# Advertise cache versioning support. |
||||
def self.supports_cache_versioning? |
||||
true |
||||
end |
||||
|
||||
def clear(options = nil) |
||||
end |
||||
|
||||
def cleanup(options = nil) |
||||
end |
||||
|
||||
def increment(name, amount = 1, options = nil) |
||||
end |
||||
|
||||
def decrement(name, amount = 1, options = nil) |
||||
end |
||||
|
||||
def delete_matched(matcher, options = nil) |
||||
end |
||||
|
||||
private |
||||
def read_entry(key, **options) |
||||
end |
||||
|
||||
def write_entry(key, entry, **options) |
||||
true |
||||
end |
||||
|
||||
def delete_entry(key, **options) |
||||
false |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,493 +0,0 @@
@@ -1,493 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
begin |
||||
gem "redis", ">= 4.0.1" |
||||
require "redis" |
||||
require "redis/distributed" |
||||
rescue LoadError |
||||
warn "The Redis cache store requires the redis gem, version 4.0.1 or later. Please add it to your Gemfile: `gem \"redis\", \"~> 4.0\"`" |
||||
raise |
||||
end |
||||
|
||||
# Prefer the hiredis driver but don't require it. |
||||
begin |
||||
require "redis/connection/hiredis" |
||||
rescue LoadError |
||||
end |
||||
|
||||
require "digest/sha2" |
||||
require "active_support/core_ext/marshal" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
module ConnectionPoolLike |
||||
def with |
||||
yield self |
||||
end |
||||
end |
||||
|
||||
::Redis.include(ConnectionPoolLike) |
||||
::Redis::Distributed.include(ConnectionPoolLike) |
||||
|
||||
# Redis cache store. |
||||
# |
||||
# Deployment note: Take care to use a *dedicated Redis cache* rather |
||||
# than pointing this at your existing Redis server. It won't cope well |
||||
# with mixed usage patterns and it won't expire cache entries by default. |
||||
# |
||||
# Redis cache server setup guide: https://redis.io/topics/lru-cache |
||||
# |
||||
# * Supports vanilla Redis, hiredis, and Redis::Distributed. |
||||
# * Supports Memcached-like sharding across Redises with Redis::Distributed. |
||||
# * Fault tolerant. If the Redis server is unavailable, no exceptions are |
||||
# raised. Cache fetches are all misses and writes are dropped. |
||||
# * Local cache. Hot in-memory primary cache within block/middleware scope. |
||||
# * +read_multi+ and +write_multi+ support for Redis mget/mset. Use Redis::Distributed |
||||
# 4.0.1+ for distributed mget support. |
||||
# * +delete_matched+ support for Redis KEYS globs. |
||||
class RedisCacheStore < Store |
||||
# Keys are truncated with their own SHA2 digest if they exceed 1kB |
||||
MAX_KEY_BYTESIZE = 1024 |
||||
|
||||
DEFAULT_REDIS_OPTIONS = { |
||||
connect_timeout: 20, |
||||
read_timeout: 1, |
||||
write_timeout: 1, |
||||
reconnect_attempts: 0, |
||||
} |
||||
|
||||
DEFAULT_ERROR_HANDLER = -> (method:, returning:, exception:) do |
||||
if logger |
||||
logger.error { "RedisCacheStore: #{method} failed, returned #{returning.inspect}: #{exception.class}: #{exception.message}" } |
||||
end |
||||
end |
||||
|
||||
# The maximum number of entries to receive per SCAN call. |
||||
SCAN_BATCH_SIZE = 1000 |
||||
private_constant :SCAN_BATCH_SIZE |
||||
|
||||
# Advertise cache versioning support. |
||||
def self.supports_cache_versioning? |
||||
true |
||||
end |
||||
|
||||
# Support raw values in the local cache strategy. |
||||
module LocalCacheWithRaw # :nodoc: |
||||
private |
||||
def write_entry(key, entry, **options) |
||||
if options[:raw] && local_cache |
||||
raw_entry = Entry.new(serialize_entry(entry, raw: true)) |
||||
raw_entry.expires_at = entry.expires_at |
||||
super(key, raw_entry, **options) |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
def write_multi_entries(entries, **options) |
||||
if options[:raw] && local_cache |
||||
raw_entries = entries.map do |key, entry| |
||||
raw_entry = Entry.new(serialize_entry(entry, raw: true)) |
||||
raw_entry.expires_at = entry.expires_at |
||||
end.to_h |
||||
|
||||
super(raw_entries, **options) |
||||
else |
||||
super |
||||
end |
||||
end |
||||
end |
||||
|
||||
prepend Strategy::LocalCache |
||||
prepend LocalCacheWithRaw |
||||
|
||||
class << self |
||||
# Factory method to create a new Redis instance. |
||||
# |
||||
# Handles four options: :redis block, :redis instance, single :url |
||||
# string, and multiple :url strings. |
||||
# |
||||
# Option Class Result |
||||
# :redis Proc -> options[:redis].call |
||||
# :redis Object -> options[:redis] |
||||
# :url String -> Redis.new(url: …) |
||||
# :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …]) |
||||
# |
||||
def build_redis(redis: nil, url: nil, **redis_options) #:nodoc: |
||||
urls = Array(url) |
||||
|
||||
if redis.is_a?(Proc) |
||||
redis.call |
||||
elsif redis |
||||
redis |
||||
elsif urls.size > 1 |
||||
build_redis_distributed_client urls: urls, **redis_options |
||||
else |
||||
build_redis_client url: urls.first, **redis_options |
||||
end |
||||
end |
||||
|
||||
private |
||||
def build_redis_distributed_client(urls:, **redis_options) |
||||
::Redis::Distributed.new([], DEFAULT_REDIS_OPTIONS.merge(redis_options)).tap do |dist| |
||||
urls.each { |u| dist.add_node url: u } |
||||
end |
||||
end |
||||
|
||||
def build_redis_client(url:, **redis_options) |
||||
::Redis.new DEFAULT_REDIS_OPTIONS.merge(redis_options.merge(url: url)) |
||||
end |
||||
end |
||||
|
||||
attr_reader :redis_options |
||||
attr_reader :max_key_bytesize |
||||
|
||||
# Creates a new Redis cache store. |
||||
# |
||||
# Handles four options: :redis block, :redis instance, single :url |
||||
# string, and multiple :url strings. |
||||
# |
||||
# Option Class Result |
||||
# :redis Proc -> options[:redis].call |
||||
# :redis Object -> options[:redis] |
||||
# :url String -> Redis.new(url: …) |
||||
# :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …]) |
||||
# |
||||
# No namespace is set by default. Provide one if the Redis cache |
||||
# server is shared with other apps: <tt>namespace: 'myapp-cache'</tt>. |
||||
# |
||||
# Compression is enabled by default with a 1kB threshold, so cached |
||||
# values larger than 1kB are automatically compressed. Disable by |
||||
# passing <tt>compress: false</tt> or change the threshold by passing |
||||
# <tt>compress_threshold: 4.kilobytes</tt>. |
||||
# |
||||
# No expiry is set on cache entries by default. Redis is expected to |
||||
# be configured with an eviction policy that automatically deletes |
||||
# least-recently or -frequently used keys when it reaches max memory. |
||||
# See https://redis.io/topics/lru-cache for cache server setup. |
||||
# |
||||
# Race condition TTL is not set by default. This can be used to avoid |
||||
# "thundering herd" cache writes when hot cache entries are expired. |
||||
# See <tt>ActiveSupport::Cache::Store#fetch</tt> for more. |
||||
def initialize(namespace: nil, compress: true, compress_threshold: 1.kilobyte, coder: DEFAULT_CODER, expires_in: nil, race_condition_ttl: nil, error_handler: DEFAULT_ERROR_HANDLER, **redis_options) |
||||
@redis_options = redis_options |
||||
|
||||
@max_key_bytesize = MAX_KEY_BYTESIZE |
||||
@error_handler = error_handler |
||||
|
||||
super namespace: namespace, |
||||
compress: compress, compress_threshold: compress_threshold, |
||||
expires_in: expires_in, race_condition_ttl: race_condition_ttl, |
||||
coder: coder |
||||
end |
||||
|
||||
def redis |
||||
@redis ||= begin |
||||
pool_options = self.class.send(:retrieve_pool_options, redis_options) |
||||
|
||||
if pool_options.any? |
||||
self.class.send(:ensure_connection_pool_added!) |
||||
::ConnectionPool.new(pool_options) { self.class.build_redis(**redis_options) } |
||||
else |
||||
self.class.build_redis(**redis_options) |
||||
end |
||||
end |
||||
end |
||||
|
||||
def inspect |
||||
instance = @redis || @redis_options |
||||
"#<#{self.class} options=#{options.inspect} redis=#{instance.inspect}>" |
||||
end |
||||
|
||||
# Cache Store API implementation. |
||||
# |
||||
# Read multiple values at once. Returns a hash of requested keys -> |
||||
# fetched values. |
||||
def read_multi(*names) |
||||
if mget_capable? |
||||
instrument(:read_multi, names, options) do |payload| |
||||
read_multi_mget(*names).tap do |results| |
||||
payload[:hits] = results.keys |
||||
end |
||||
end |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
# Cache Store API implementation. |
||||
# |
||||
# Supports Redis KEYS glob patterns: |
||||
# |
||||
# h?llo matches hello, hallo and hxllo |
||||
# h*llo matches hllo and heeeello |
||||
# h[ae]llo matches hello and hallo, but not hillo |
||||
# h[^e]llo matches hallo, hbllo, ... but not hello |
||||
# h[a-b]llo matches hallo and hbllo |
||||
# |
||||
# Use \ to escape special characters if you want to match them verbatim. |
||||
# |
||||
# See https://redis.io/commands/KEYS for more. |
||||
# |
||||
# Failsafe: Raises errors. |
||||
def delete_matched(matcher, options = nil) |
||||
instrument :delete_matched, matcher do |
||||
unless String === matcher |
||||
raise ArgumentError, "Only Redis glob strings are supported: #{matcher.inspect}" |
||||
end |
||||
redis.with do |c| |
||||
pattern = namespace_key(matcher, options) |
||||
cursor = "0" |
||||
# Fetch keys in batches using SCAN to avoid blocking the Redis server. |
||||
nodes = c.respond_to?(:nodes) ? c.nodes : [c] |
||||
|
||||
nodes.each do |node| |
||||
begin |
||||
cursor, keys = node.scan(cursor, match: pattern, count: SCAN_BATCH_SIZE) |
||||
node.del(*keys) unless keys.empty? |
||||
end until cursor == "0" |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Cache Store API implementation. |
||||
# |
||||
# Increment a cached value. This method uses the Redis incr atomic |
||||
# operator and can only be used on values written with the :raw option. |
||||
# Calling it on a value not stored with :raw will initialize that value |
||||
# to zero. |
||||
# |
||||
# Failsafe: Raises errors. |
||||
def increment(name, amount = 1, options = nil) |
||||
instrument :increment, name, amount: amount do |
||||
failsafe :increment do |
||||
options = merged_options(options) |
||||
key = normalize_key(name, options) |
||||
|
||||
redis.with do |c| |
||||
c.incrby(key, amount).tap do |
||||
write_key_expiry(c, key, options) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Cache Store API implementation. |
||||
# |
||||
# Decrement a cached value. This method uses the Redis decr atomic |
||||
# operator and can only be used on values written with the :raw option. |
||||
# Calling it on a value not stored with :raw will initialize that value |
||||
# to zero. |
||||
# |
||||
# Failsafe: Raises errors. |
||||
def decrement(name, amount = 1, options = nil) |
||||
instrument :decrement, name, amount: amount do |
||||
failsafe :decrement do |
||||
options = merged_options(options) |
||||
key = normalize_key(name, options) |
||||
|
||||
redis.with do |c| |
||||
c.decrby(key, amount).tap do |
||||
write_key_expiry(c, key, options) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Cache Store API implementation. |
||||
# |
||||
# Removes expired entries. Handled natively by Redis least-recently-/ |
||||
# least-frequently-used expiry, so manual cleanup is not supported. |
||||
def cleanup(options = nil) |
||||
super |
||||
end |
||||
|
||||
# Clear the entire cache on all Redis servers. Safe to use on |
||||
# shared servers if the cache is namespaced. |
||||
# |
||||
# Failsafe: Raises errors. |
||||
def clear(options = nil) |
||||
failsafe :clear do |
||||
if namespace = merged_options(options)[:namespace] |
||||
delete_matched "*", namespace: namespace |
||||
else |
||||
redis.with { |c| c.flushdb } |
||||
end |
||||
end |
||||
end |
||||
|
||||
def mget_capable? #:nodoc: |
||||
set_redis_capabilities unless defined? @mget_capable |
||||
@mget_capable |
||||
end |
||||
|
||||
def mset_capable? #:nodoc: |
||||
set_redis_capabilities unless defined? @mset_capable |
||||
@mset_capable |
||||
end |
||||
|
||||
private |
||||
def set_redis_capabilities |
||||
case redis |
||||
when Redis::Distributed |
||||
@mget_capable = true |
||||
@mset_capable = false |
||||
else |
||||
@mget_capable = true |
||||
@mset_capable = true |
||||
end |
||||
end |
||||
|
||||
# Store provider interface: |
||||
# Read an entry from the cache. |
||||
def read_entry(key, **options) |
||||
failsafe :read_entry do |
||||
raw = options&.fetch(:raw, false) |
||||
deserialize_entry(redis.with { |c| c.get(key) }, raw: raw) |
||||
end |
||||
end |
||||
|
||||
def read_multi_entries(names, **options) |
||||
if mget_capable? |
||||
read_multi_mget(*names, **options) |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
def read_multi_mget(*names) |
||||
options = names.extract_options! |
||||
options = merged_options(options) |
||||
return {} if names == [] |
||||
raw = options&.fetch(:raw, false) |
||||
|
||||
keys = names.map { |name| normalize_key(name, options) } |
||||
|
||||
values = failsafe(:read_multi_mget, returning: {}) do |
||||
redis.with { |c| c.mget(*keys) } |
||||
end |
||||
|
||||
names.zip(values).each_with_object({}) do |(name, value), results| |
||||
if value |
||||
entry = deserialize_entry(value, raw: raw) |
||||
unless entry.nil? || entry.expired? || entry.mismatched?(normalize_version(name, options)) |
||||
results[name] = entry.value |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Write an entry to the cache. |
||||
# |
||||
# Requires Redis 2.6.12+ for extended SET options. |
||||
def write_entry(key, entry, unless_exist: false, raw: false, expires_in: nil, race_condition_ttl: nil, **options) |
||||
serialized_entry = serialize_entry(entry, raw: raw) |
||||
|
||||
# If race condition TTL is in use, ensure that cache entries |
||||
# stick around a bit longer after they would have expired |
||||
# so we can purposefully serve stale entries. |
||||
if race_condition_ttl && expires_in && expires_in > 0 && !raw |
||||
expires_in += 5.minutes |
||||
end |
||||
|
||||
failsafe :write_entry, returning: false do |
||||
if unless_exist || expires_in |
||||
modifiers = {} |
||||
modifiers[:nx] = unless_exist |
||||
modifiers[:px] = (1000 * expires_in.to_f).ceil if expires_in |
||||
|
||||
redis.with { |c| c.set key, serialized_entry, **modifiers } |
||||
else |
||||
redis.with { |c| c.set key, serialized_entry } |
||||
end |
||||
end |
||||
end |
||||
|
||||
def write_key_expiry(client, key, options) |
||||
if options[:expires_in] && client.ttl(key).negative? |
||||
client.expire key, options[:expires_in].to_i |
||||
end |
||||
end |
||||
|
||||
# Delete an entry from the cache. |
||||
def delete_entry(key, options) |
||||
failsafe :delete_entry, returning: false do |
||||
redis.with { |c| c.del key } |
||||
end |
||||
end |
||||
|
||||
# Deletes multiple entries in the cache. Returns the number of entries deleted. |
||||
def delete_multi_entries(entries, **_options) |
||||
redis.with { |c| c.del(entries) } |
||||
end |
||||
|
||||
# Nonstandard store provider API to write multiple values at once. |
||||
def write_multi_entries(entries, expires_in: nil, **options) |
||||
if entries.any? |
||||
if mset_capable? && expires_in.nil? |
||||
failsafe :write_multi_entries do |
||||
redis.with { |c| c.mapped_mset(serialize_entries(entries, raw: options[:raw])) } |
||||
end |
||||
else |
||||
super |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Truncate keys that exceed 1kB. |
||||
def normalize_key(key, options) |
||||
truncate_key super&.b |
||||
end |
||||
|
||||
def truncate_key(key) |
||||
if key && key.bytesize > max_key_bytesize |
||||
suffix = ":sha2:#{::Digest::SHA2.hexdigest(key)}" |
||||
truncate_at = max_key_bytesize - suffix.bytesize |
||||
"#{key.byteslice(0, truncate_at)}#{suffix}" |
||||
else |
||||
key |
||||
end |
||||
end |
||||
|
||||
def deserialize_entry(payload, raw:) |
||||
if payload && raw |
||||
Entry.new(payload, compress: false) |
||||
else |
||||
super(payload) |
||||
end |
||||
end |
||||
|
||||
def serialize_entry(entry, raw: false) |
||||
if raw |
||||
entry.value.to_s |
||||
else |
||||
super(entry) |
||||
end |
||||
end |
||||
|
||||
def serialize_entries(entries, raw: false) |
||||
entries.transform_values do |entry| |
||||
serialize_entry entry, raw: raw |
||||
end |
||||
end |
||||
|
||||
def failsafe(method, returning: nil) |
||||
yield |
||||
rescue ::Redis::BaseError => e |
||||
handle_exception exception: e, method: method, returning: returning |
||||
returning |
||||
end |
||||
|
||||
def handle_exception(exception:, method:, returning:) |
||||
if @error_handler |
||||
@error_handler.(method: method, exception: exception, returning: returning) |
||||
end |
||||
rescue => failsafe |
||||
warn "RedisCacheStore ignored exception in handle_exception: #{failsafe.class}: #{failsafe.message}\n #{failsafe.backtrace.join("\n ")}" |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,209 +0,0 @@
@@ -1,209 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/core_ext/string/inflections" |
||||
require "active_support/per_thread_registry" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
module Strategy |
||||
# Caches that implement LocalCache will be backed by an in-memory cache for the |
||||
# duration of a block. Repeated calls to the cache for the same key will hit the |
||||
# in-memory cache for faster access. |
||||
module LocalCache |
||||
autoload :Middleware, "active_support/cache/strategy/local_cache_middleware" |
||||
|
||||
# Class for storing and registering the local caches. |
||||
class LocalCacheRegistry # :nodoc: |
||||
extend ActiveSupport::PerThreadRegistry |
||||
|
||||
def initialize |
||||
@registry = {} |
||||
end |
||||
|
||||
def cache_for(local_cache_key) |
||||
@registry[local_cache_key] |
||||
end |
||||
|
||||
def set_cache_for(local_cache_key, value) |
||||
@registry[local_cache_key] = value |
||||
end |
||||
|
||||
def self.set_cache_for(l, v); instance.set_cache_for l, v; end |
||||
def self.cache_for(l); instance.cache_for l; end |
||||
end |
||||
|
||||
# Simple memory backed cache. This cache is not thread safe and is intended only |
||||
# for serving as a temporary memory cache for a single thread. |
||||
class LocalStore < Store |
||||
def initialize |
||||
super |
||||
@data = {} |
||||
end |
||||
|
||||
# Don't allow synchronizing since it isn't thread safe. |
||||
def synchronize # :nodoc: |
||||
yield |
||||
end |
||||
|
||||
def clear(options = nil) |
||||
@data.clear |
||||
end |
||||
|
||||
def read_entry(key, **options) |
||||
@data[key] |
||||
end |
||||
|
||||
def read_multi_entries(keys, **options) |
||||
values = {} |
||||
|
||||
keys.each do |name| |
||||
entry = read_entry(name, **options) |
||||
values[name] = entry.value if entry |
||||
end |
||||
|
||||
values |
||||
end |
||||
|
||||
def write_entry(key, entry, **options) |
||||
entry.dup_value! |
||||
@data[key] = entry |
||||
true |
||||
end |
||||
|
||||
def delete_entry(key, **options) |
||||
!!@data.delete(key) |
||||
end |
||||
|
||||
def fetch_entry(key, options = nil) # :nodoc: |
||||
entry = @data.fetch(key) { @data[key] = yield } |
||||
dup_entry = entry.dup |
||||
dup_entry&.dup_value! |
||||
dup_entry |
||||
end |
||||
end |
||||
|
||||
# Use a local cache for the duration of block. |
||||
def with_local_cache |
||||
use_temporary_local_cache(LocalStore.new) { yield } |
||||
end |
||||
|
||||
# Middleware class can be inserted as a Rack handler to be local cache for the |
||||
# duration of request. |
||||
def middleware |
||||
@middleware ||= Middleware.new( |
||||
"ActiveSupport::Cache::Strategy::LocalCache", |
||||
local_cache_key) |
||||
end |
||||
|
||||
def clear(**options) # :nodoc: |
||||
return super unless cache = local_cache |
||||
cache.clear(options) |
||||
super |
||||
end |
||||
|
||||
def cleanup(**options) # :nodoc: |
||||
return super unless cache = local_cache |
||||
cache.clear |
||||
super |
||||
end |
||||
|
||||
def delete_matched(matcher, options = nil) # :nodoc: |
||||
return super unless cache = local_cache |
||||
cache.clear |
||||
super |
||||
end |
||||
|
||||
def increment(name, amount = 1, **options) # :nodoc: |
||||
return super unless local_cache |
||||
value = bypass_local_cache { super } |
||||
write_cache_value(name, value, **options) |
||||
value |
||||
end |
||||
|
||||
def decrement(name, amount = 1, **options) # :nodoc: |
||||
return super unless local_cache |
||||
value = bypass_local_cache { super } |
||||
write_cache_value(name, value, **options) |
||||
value |
||||
end |
||||
|
||||
private |
||||
def read_entry(key, **options) |
||||
if cache = local_cache |
||||
hit = true |
||||
value = cache.fetch_entry(key) do |
||||
hit = false |
||||
super |
||||
end |
||||
options[:event][:store] = cache.class.name if hit && options[:event] |
||||
value |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
def read_multi_entries(keys, **options) |
||||
return super unless local_cache |
||||
|
||||
local_entries = local_cache.read_multi_entries(keys, **options) |
||||
missed_keys = keys - local_entries.keys |
||||
|
||||
if missed_keys.any? |
||||
local_entries.merge!(super(missed_keys, **options)) |
||||
else |
||||
local_entries |
||||
end |
||||
end |
||||
|
||||
def write_entry(key, entry, **options) |
||||
if options[:unless_exist] |
||||
local_cache.delete_entry(key, **options) if local_cache |
||||
else |
||||
local_cache.write_entry(key, entry, **options) if local_cache |
||||
end |
||||
|
||||
super |
||||
end |
||||
|
||||
def delete_entry(key, **options) |
||||
local_cache.delete_entry(key, **options) if local_cache |
||||
super |
||||
end |
||||
|
||||
def write_cache_value(name, value, **options) |
||||
name = normalize_key(name, options) |
||||
cache = local_cache |
||||
cache.mute do |
||||
if value |
||||
cache.write(name, value, options) |
||||
else |
||||
cache.delete(name, **options) |
||||
end |
||||
end |
||||
end |
||||
|
||||
def local_cache_key |
||||
@local_cache_key ||= "#{self.class.name.underscore}_local_cache_#{object_id}".gsub(/[\/-]/, "_").to_sym |
||||
end |
||||
|
||||
def local_cache |
||||
LocalCacheRegistry.cache_for(local_cache_key) |
||||
end |
||||
|
||||
def bypass_local_cache |
||||
use_temporary_local_cache(nil) { yield } |
||||
end |
||||
|
||||
def use_temporary_local_cache(temporary_cache) |
||||
save_cache = LocalCacheRegistry.cache_for(local_cache_key) |
||||
begin |
||||
LocalCacheRegistry.set_cache_for(local_cache_key, temporary_cache) |
||||
yield |
||||
ensure |
||||
LocalCacheRegistry.set_cache_for(local_cache_key, save_cache) |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,45 +0,0 @@
@@ -1,45 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "rack/body_proxy" |
||||
require "rack/utils" |
||||
|
||||
module ActiveSupport |
||||
module Cache |
||||
module Strategy |
||||
module LocalCache |
||||
#-- |
||||
# This class wraps up local storage for middlewares. Only the middleware method should |
||||
# construct them. |
||||
class Middleware # :nodoc: |
||||
attr_reader :name, :local_cache_key |
||||
|
||||
def initialize(name, local_cache_key) |
||||
@name = name |
||||
@local_cache_key = local_cache_key |
||||
@app = nil |
||||
end |
||||
|
||||
def new(app) |
||||
@app = app |
||||
self |
||||
end |
||||
|
||||
def call(env) |
||||
LocalCacheRegistry.set_cache_for(local_cache_key, LocalStore.new) |
||||
response = @app.call(env) |
||||
response[2] = ::Rack::BodyProxy.new(response[2]) do |
||||
LocalCacheRegistry.set_cache_for(local_cache_key, nil) |
||||
end |
||||
cleanup_on_body_close = true |
||||
response |
||||
rescue Rack::Utils::InvalidParameterError |
||||
[400, {}, []] |
||||
ensure |
||||
LocalCacheRegistry.set_cache_for(local_cache_key, nil) unless |
||||
cleanup_on_body_close |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,862 +0,0 @@
@@ -1,862 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/concern" |
||||
require "active_support/descendants_tracker" |
||||
require "active_support/core_ext/array/extract_options" |
||||
require "active_support/core_ext/class/attribute" |
||||
require "active_support/core_ext/string/filters" |
||||
require "thread" |
||||
|
||||
module ActiveSupport |
||||
# Callbacks are code hooks that are run at key points in an object's life cycle. |
||||
# The typical use case is to have a base class define a set of callbacks |
||||
# relevant to the other functionality it supplies, so that subclasses can |
||||
# install callbacks that enhance or modify the base functionality without |
||||
# needing to override or redefine methods of the base class. |
||||
# |
||||
# Mixing in this module allows you to define the events in the object's |
||||
# life cycle that will support callbacks (via +ClassMethods.define_callbacks+), |
||||
# set the instance methods, procs, or callback objects to be called (via |
||||
# +ClassMethods.set_callback+), and run the installed callbacks at the |
||||
# appropriate times (via +run_callbacks+). |
||||
# |
||||
# By default callbacks are halted by throwing +:abort+. |
||||
# See +ClassMethods.define_callbacks+ for details. |
||||
# |
||||
# Three kinds of callbacks are supported: before callbacks, run before a |
||||
# certain event; after callbacks, run after the event; and around callbacks, |
||||
# blocks that surround the event, triggering it when they yield. Callback code |
||||
# can be contained in instance methods, procs or lambdas, or callback objects |
||||
# that respond to certain predetermined methods. See +ClassMethods.set_callback+ |
||||
# for details. |
||||
# |
||||
# class Record |
||||
# include ActiveSupport::Callbacks |
||||
# define_callbacks :save |
||||
# |
||||
# def save |
||||
# run_callbacks :save do |
||||
# puts "- save" |
||||
# end |
||||
# end |
||||
# end |
||||
# |
||||
# class PersonRecord < Record |
||||
# set_callback :save, :before, :saving_message |
||||
# def saving_message |
||||
# puts "saving..." |
||||
# end |
||||
# |
||||
# set_callback :save, :after do |object| |
||||
# puts "saved" |
||||
# end |
||||
# end |
||||
# |
||||
# person = PersonRecord.new |
||||
# person.save |
||||
# |
||||
# Output: |
||||
# saving... |
||||
# - save |
||||
# saved |
||||
module Callbacks |
||||
extend Concern |
||||
|
||||
included do |
||||
extend ActiveSupport::DescendantsTracker |
||||
class_attribute :__callbacks, instance_writer: false, default: {} |
||||
end |
||||
|
||||
CALLBACK_FILTER_TYPES = [:before, :after, :around] |
||||
|
||||
# Runs the callbacks for the given event. |
||||
# |
||||
# Calls the before and around callbacks in the order they were set, yields |
||||
# the block (if given one), and then runs the after callbacks in reverse |
||||
# order. |
||||
# |
||||
# If the callback chain was halted, returns +false+. Otherwise returns the |
||||
# result of the block, +nil+ if no callbacks have been set, or +true+ |
||||
# if callbacks have been set but no block is given. |
||||
# |
||||
# run_callbacks :save do |
||||
# save |
||||
# end |
||||
# |
||||
#-- |
||||
# |
||||
# As this method is used in many places, and often wraps large portions of |
||||
# user code, it has an additional design goal of minimizing its impact on |
||||
# the visible call stack. An exception from inside a :before or :after |
||||
# callback can be as noisy as it likes -- but when control has passed |
||||
# smoothly through and into the supplied block, we want as little evidence |
||||
# as possible that we were here. |
||||
def run_callbacks(kind) |
||||
callbacks = __callbacks[kind.to_sym] |
||||
|
||||
if callbacks.empty? |
||||
yield if block_given? |
||||
else |
||||
env = Filters::Environment.new(self, false, nil) |
||||
next_sequence = callbacks.compile |
||||
|
||||
# Common case: no 'around' callbacks defined |
||||
if next_sequence.final? |
||||
next_sequence.invoke_before(env) |
||||
env.value = !env.halted && (!block_given? || yield) |
||||
next_sequence.invoke_after(env) |
||||
env.value |
||||
else |
||||
invoke_sequence = Proc.new do |
||||
skipped = nil |
||||
|
||||
while true |
||||
current = next_sequence |
||||
current.invoke_before(env) |
||||
if current.final? |
||||
env.value = !env.halted && (!block_given? || yield) |
||||
elsif current.skip?(env) |
||||
(skipped ||= []) << current |
||||
next_sequence = next_sequence.nested |
||||
next |
||||
else |
||||
next_sequence = next_sequence.nested |
||||
begin |
||||
target, block, method, *arguments = current.expand_call_template(env, invoke_sequence) |
||||
target.send(method, *arguments, &block) |
||||
ensure |
||||
next_sequence = current |
||||
end |
||||
end |
||||
current.invoke_after(env) |
||||
skipped.pop.invoke_after(env) while skipped&.first |
||||
break env.value |
||||
end |
||||
end |
||||
|
||||
invoke_sequence.call |
||||
end |
||||
end |
||||
end |
||||
|
||||
private |
||||
# A hook invoked every time a before callback is halted. |
||||
# This can be overridden in ActiveSupport::Callbacks implementors in order |
||||
# to provide better debugging/logging. |
||||
def halted_callback_hook(filter, name) |
||||
end |
||||
|
||||
module Conditionals # :nodoc: |
||||
class Value |
||||
def initialize(&block) |
||||
@block = block |
||||
end |
||||
def call(target, value); @block.call(value); end |
||||
end |
||||
end |
||||
|
||||
module Filters |
||||
Environment = Struct.new(:target, :halted, :value) |
||||
|
||||
class Before |
||||
def self.build(callback_sequence, user_callback, user_conditions, chain_config, filter, name) |
||||
halted_lambda = chain_config[:terminator] |
||||
|
||||
if user_conditions.any? |
||||
halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter, name) |
||||
else |
||||
halting(callback_sequence, user_callback, halted_lambda, filter, name) |
||||
end |
||||
end |
||||
|
||||
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter, name) |
||||
callback_sequence.before do |env| |
||||
target = env.target |
||||
value = env.value |
||||
halted = env.halted |
||||
|
||||
if !halted && user_conditions.all? { |c| c.call(target, value) } |
||||
result_lambda = -> { user_callback.call target, value } |
||||
env.halted = halted_lambda.call(target, result_lambda) |
||||
if env.halted |
||||
target.send :halted_callback_hook, filter, name |
||||
end |
||||
end |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :halting_and_conditional |
||||
|
||||
def self.halting(callback_sequence, user_callback, halted_lambda, filter, name) |
||||
callback_sequence.before do |env| |
||||
target = env.target |
||||
value = env.value |
||||
halted = env.halted |
||||
|
||||
unless halted |
||||
result_lambda = -> { user_callback.call target, value } |
||||
env.halted = halted_lambda.call(target, result_lambda) |
||||
if env.halted |
||||
target.send :halted_callback_hook, filter, name |
||||
end |
||||
end |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :halting |
||||
end |
||||
|
||||
class After |
||||
def self.build(callback_sequence, user_callback, user_conditions, chain_config) |
||||
if chain_config[:skip_after_callbacks_if_terminated] |
||||
if user_conditions.any? |
||||
halting_and_conditional(callback_sequence, user_callback, user_conditions) |
||||
else |
||||
halting(callback_sequence, user_callback) |
||||
end |
||||
else |
||||
if user_conditions.any? |
||||
conditional callback_sequence, user_callback, user_conditions |
||||
else |
||||
simple callback_sequence, user_callback |
||||
end |
||||
end |
||||
end |
||||
|
||||
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions) |
||||
callback_sequence.after do |env| |
||||
target = env.target |
||||
value = env.value |
||||
halted = env.halted |
||||
|
||||
if !halted && user_conditions.all? { |c| c.call(target, value) } |
||||
user_callback.call target, value |
||||
end |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :halting_and_conditional |
||||
|
||||
def self.halting(callback_sequence, user_callback) |
||||
callback_sequence.after do |env| |
||||
unless env.halted |
||||
user_callback.call env.target, env.value |
||||
end |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :halting |
||||
|
||||
def self.conditional(callback_sequence, user_callback, user_conditions) |
||||
callback_sequence.after do |env| |
||||
target = env.target |
||||
value = env.value |
||||
|
||||
if user_conditions.all? { |c| c.call(target, value) } |
||||
user_callback.call target, value |
||||
end |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :conditional |
||||
|
||||
def self.simple(callback_sequence, user_callback) |
||||
callback_sequence.after do |env| |
||||
user_callback.call env.target, env.value |
||||
|
||||
env |
||||
end |
||||
end |
||||
private_class_method :simple |
||||
end |
||||
end |
||||
|
||||
class Callback #:nodoc:# |
||||
def self.build(chain, filter, kind, options) |
||||
if filter.is_a?(String) |
||||
raise ArgumentError, <<-MSG.squish |
||||
Passing string to define a callback is not supported. See the `.set_callback` |
||||
documentation to see supported values. |
||||
MSG |
||||
end |
||||
|
||||
new chain.name, filter, kind, options, chain.config |
||||
end |
||||
|
||||
attr_accessor :kind, :name |
||||
attr_reader :chain_config |
||||
|
||||
def initialize(name, filter, kind, options, chain_config) |
||||
@chain_config = chain_config |
||||
@name = name |
||||
@kind = kind |
||||
@filter = filter |
||||
@key = compute_identifier filter |
||||
@if = check_conditionals(options[:if]) |
||||
@unless = check_conditionals(options[:unless]) |
||||
end |
||||
|
||||
def filter; @key; end |
||||
def raw_filter; @filter; end |
||||
|
||||
def merge_conditional_options(chain, if_option:, unless_option:) |
||||
options = { |
||||
if: @if.dup, |
||||
unless: @unless.dup |
||||
} |
||||
|
||||
options[:if].concat Array(unless_option) |
||||
options[:unless].concat Array(if_option) |
||||
|
||||
self.class.build chain, @filter, @kind, options |
||||
end |
||||
|
||||
def matches?(_kind, _filter) |
||||
@kind == _kind && filter == _filter |
||||
end |
||||
|
||||
def duplicates?(other) |
||||
case @filter |
||||
when Symbol |
||||
matches?(other.kind, other.filter) |
||||
else |
||||
false |
||||
end |
||||
end |
||||
|
||||
# Wraps code with filter |
||||
def apply(callback_sequence) |
||||
user_conditions = conditions_lambdas |
||||
user_callback = CallTemplate.build(@filter, self) |
||||
|
||||
case kind |
||||
when :before |
||||
Filters::Before.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config, @filter, name) |
||||
when :after |
||||
Filters::After.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config) |
||||
when :around |
||||
callback_sequence.around(user_callback, user_conditions) |
||||
end |
||||
end |
||||
|
||||
def current_scopes |
||||
Array(chain_config[:scope]).map { |s| public_send(s) } |
||||
end |
||||
|
||||
private |
||||
EMPTY_ARRAY = [].freeze |
||||
private_constant :EMPTY_ARRAY |
||||
|
||||
def check_conditionals(conditionals) |
||||
return EMPTY_ARRAY if conditionals.blank? |
||||
|
||||
conditionals = Array(conditionals) |
||||
if conditionals.any? { |c| c.is_a?(String) } |
||||
raise ArgumentError, <<-MSG.squish |
||||
Passing string to be evaluated in :if and :unless conditional |
||||
options is not supported. Pass a symbol for an instance method, |
||||
or a lambda, proc or block, instead. |
||||
MSG |
||||
end |
||||
|
||||
conditionals.freeze |
||||
end |
||||
|
||||
def compute_identifier(filter) |
||||
case filter |
||||
when ::Proc |
||||
filter.object_id |
||||
else |
||||
filter |
||||
end |
||||
end |
||||
|
||||
def conditions_lambdas |
||||
@if.map { |c| CallTemplate.build(c, self).make_lambda } + |
||||
@unless.map { |c| CallTemplate.build(c, self).inverted_lambda } |
||||
end |
||||
end |
||||
|
||||
# A future invocation of user-supplied code (either as a callback, |
||||
# or a condition filter). |
||||
class CallTemplate # :nodoc: |
||||
def initialize(target, method, arguments, block) |
||||
@override_target = target |
||||
@method_name = method |
||||
@arguments = arguments |
||||
@override_block = block |
||||
end |
||||
|
||||
# Return the parts needed to make this call, with the given |
||||
# input values. |
||||
# |
||||
# Returns an array of the form: |
||||
# |
||||
# [target, block, method, *arguments] |
||||
# |
||||
# This array can be used as such: |
||||
# |
||||
# target.send(method, *arguments, &block) |
||||
# |
||||
# The actual invocation is left up to the caller to minimize |
||||
# call stack pollution. |
||||
def expand(target, value, block) |
||||
expanded = [@override_target || target, @override_block || block, @method_name] |
||||
|
||||
@arguments.each do |arg| |
||||
case arg |
||||
when :value then expanded << value |
||||
when :target then expanded << target |
||||
when :block then expanded << (block || raise(ArgumentError)) |
||||
end |
||||
end |
||||
|
||||
expanded |
||||
end |
||||
|
||||
# Return a lambda that will make this call when given the input |
||||
# values. |
||||
def make_lambda |
||||
lambda do |target, value, &block| |
||||
target, block, method, *arguments = expand(target, value, block) |
||||
target.send(method, *arguments, &block) |
||||
end |
||||
end |
||||
|
||||
# Return a lambda that will make this call when given the input |
||||
# values, but then return the boolean inverse of that result. |
||||
def inverted_lambda |
||||
lambda do |target, value, &block| |
||||
target, block, method, *arguments = expand(target, value, block) |
||||
! target.send(method, *arguments, &block) |
||||
end |
||||
end |
||||
|
||||
# Filters support: |
||||
# |
||||
# Symbols:: A method to call. |
||||
# Procs:: A proc to call with the object. |
||||
# Objects:: An object with a <tt>before_foo</tt> method on it to call. |
||||
# |
||||
# All of these objects are converted into a CallTemplate and handled |
||||
# the same after this point. |
||||
def self.build(filter, callback) |
||||
case filter |
||||
when Symbol |
||||
new(nil, filter, [], nil) |
||||
when Conditionals::Value |
||||
new(filter, :call, [:target, :value], nil) |
||||
when ::Proc |
||||
if filter.arity > 1 |
||||
new(nil, :instance_exec, [:target, :block], filter) |
||||
elsif filter.arity > 0 |
||||
new(nil, :instance_exec, [:target], filter) |
||||
else |
||||
new(nil, :instance_exec, [], filter) |
||||
end |
||||
else |
||||
method_to_call = callback.current_scopes.join("_") |
||||
|
||||
new(filter, method_to_call, [:target], nil) |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Execute before and after filters in a sequence instead of |
||||
# chaining them with nested lambda calls, see: |
||||
# https://github.com/rails/rails/issues/18011 |
||||
class CallbackSequence # :nodoc: |
||||
def initialize(nested = nil, call_template = nil, user_conditions = nil) |
||||
@nested = nested |
||||
@call_template = call_template |
||||
@user_conditions = user_conditions |
||||
|
||||
@before = [] |
||||
@after = [] |
||||
end |
||||
|
||||
def before(&before) |
||||
@before.unshift(before) |
||||
self |
||||
end |
||||
|
||||
def after(&after) |
||||
@after.push(after) |
||||
self |
||||
end |
||||
|
||||
def around(call_template, user_conditions) |
||||
CallbackSequence.new(self, call_template, user_conditions) |
||||
end |
||||
|
||||
def skip?(arg) |
||||
arg.halted || !@user_conditions.all? { |c| c.call(arg.target, arg.value) } |
||||
end |
||||
|
||||
attr_reader :nested |
||||
|
||||
def final? |
||||
!@call_template |
||||
end |
||||
|
||||
def expand_call_template(arg, block) |
||||
@call_template.expand(arg.target, arg.value, block) |
||||
end |
||||
|
||||
def invoke_before(arg) |
||||
@before.each { |b| b.call(arg) } |
||||
end |
||||
|
||||
def invoke_after(arg) |
||||
@after.each { |a| a.call(arg) } |
||||
end |
||||
end |
||||
|
||||
class CallbackChain #:nodoc:# |
||||
include Enumerable |
||||
|
||||
attr_reader :name, :config |
||||
|
||||
def initialize(name, config) |
||||
@name = name |
||||
@config = { |
||||
scope: [:kind], |
||||
terminator: default_terminator |
||||
}.merge!(config) |
||||
@chain = [] |
||||
@callbacks = nil |
||||
@mutex = Mutex.new |
||||
end |
||||
|
||||
def each(&block); @chain.each(&block); end |
||||
def index(o); @chain.index(o); end |
||||
def empty?; @chain.empty?; end |
||||
|
||||
def insert(index, o) |
||||
@callbacks = nil |
||||
@chain.insert(index, o) |
||||
end |
||||
|
||||
def delete(o) |
||||
@callbacks = nil |
||||
@chain.delete(o) |
||||
end |
||||
|
||||
def clear |
||||
@callbacks = nil |
||||
@chain.clear |
||||
self |
||||
end |
||||
|
||||
def initialize_copy(other) |
||||
@callbacks = nil |
||||
@chain = other.chain.dup |
||||
@mutex = Mutex.new |
||||
end |
||||
|
||||
def compile |
||||
@callbacks || @mutex.synchronize do |
||||
final_sequence = CallbackSequence.new |
||||
@callbacks ||= @chain.reverse.inject(final_sequence) do |callback_sequence, callback| |
||||
callback.apply callback_sequence |
||||
end |
||||
end |
||||
end |
||||
|
||||
def append(*callbacks) |
||||
callbacks.each { |c| append_one(c) } |
||||
end |
||||
|
||||
def prepend(*callbacks) |
||||
callbacks.each { |c| prepend_one(c) } |
||||
end |
||||
|
||||
protected |
||||
attr_reader :chain |
||||
|
||||
private |
||||
def append_one(callback) |
||||
@callbacks = nil |
||||
remove_duplicates(callback) |
||||
@chain.push(callback) |
||||
end |
||||
|
||||
def prepend_one(callback) |
||||
@callbacks = nil |
||||
remove_duplicates(callback) |
||||
@chain.unshift(callback) |
||||
end |
||||
|
||||
def remove_duplicates(callback) |
||||
@callbacks = nil |
||||
@chain.delete_if { |c| callback.duplicates?(c) } |
||||
end |
||||
|
||||
def default_terminator |
||||
Proc.new do |target, result_lambda| |
||||
terminate = true |
||||
catch(:abort) do |
||||
result_lambda.call |
||||
terminate = false |
||||
end |
||||
terminate |
||||
end |
||||
end |
||||
end |
||||
|
||||
module ClassMethods |
||||
def normalize_callback_params(filters, block) # :nodoc: |
||||
type = CALLBACK_FILTER_TYPES.include?(filters.first) ? filters.shift : :before |
||||
options = filters.extract_options! |
||||
filters.unshift(block) if block |
||||
[type, filters, options.dup] |
||||
end |
||||
|
||||
# This is used internally to append, prepend and skip callbacks to the |
||||
# CallbackChain. |
||||
def __update_callbacks(name) #:nodoc: |
||||
([self] + ActiveSupport::DescendantsTracker.descendants(self)).reverse_each do |target| |
||||
chain = target.get_callbacks name |
||||
yield target, chain.dup |
||||
end |
||||
end |
||||
|
||||
# Install a callback for the given event. |
||||
# |
||||
# set_callback :save, :before, :before_method |
||||
# set_callback :save, :after, :after_method, if: :condition |
||||
# set_callback :save, :around, ->(r, block) { stuff; result = block.call; stuff } |
||||
# |
||||
# The second argument indicates whether the callback is to be run +:before+, |
||||
# +:after+, or +:around+ the event. If omitted, +:before+ is assumed. This |
||||
# means the first example above can also be written as: |
||||
# |
||||
# set_callback :save, :before_method |
||||
# |
||||
# The callback can be specified as a symbol naming an instance method; as a |
||||
# proc, lambda, or block; or as an object that responds to a certain method |
||||
# determined by the <tt>:scope</tt> argument to +define_callbacks+. |
||||
# |
||||
# If a proc, lambda, or block is given, its body is evaluated in the context |
||||
# of the current object. It can also optionally accept the current object as |
||||
# an argument. |
||||
# |
||||
# Before and around callbacks are called in the order that they are set; |
||||
# after callbacks are called in the reverse order. |
||||
# |
||||
# Around callbacks can access the return value from the event, if it |
||||
# wasn't halted, from the +yield+ call. |
||||
# |
||||
# ===== Options |
||||
# |
||||
# * <tt>:if</tt> - A symbol or an array of symbols, each naming an instance |
||||
# method or a proc; the callback will be called only when they all return |
||||
# a true value. |
||||
# |
||||
# If a proc is given, its body is evaluated in the context of the |
||||
# current object. It can also optionally accept the current object as |
||||
# an argument. |
||||
# * <tt>:unless</tt> - A symbol or an array of symbols, each naming an |
||||
# instance method or a proc; the callback will be called only when they |
||||
# all return a false value. |
||||
# |
||||
# If a proc is given, its body is evaluated in the context of the |
||||
# current object. It can also optionally accept the current object as |
||||
# an argument. |
||||
# * <tt>:prepend</tt> - If +true+, the callback will be prepended to the |
||||
# existing chain rather than appended. |
||||
def set_callback(name, *filter_list, &block) |
||||
type, filters, options = normalize_callback_params(filter_list, block) |
||||
|
||||
self_chain = get_callbacks name |
||||
mapped = filters.map do |filter| |
||||
Callback.build(self_chain, filter, type, options) |
||||
end |
||||
|
||||
__update_callbacks(name) do |target, chain| |
||||
options[:prepend] ? chain.prepend(*mapped) : chain.append(*mapped) |
||||
target.set_callbacks name, chain |
||||
end |
||||
end |
||||
|
||||
# Skip a previously set callback. Like +set_callback+, <tt>:if</tt> or |
||||
# <tt>:unless</tt> options may be passed in order to control when the |
||||
# callback is skipped. |
||||
# |
||||
# class Writer < Person |
||||
# skip_callback :validate, :before, :check_membership, if: -> { age > 18 } |
||||
# end |
||||
# |
||||
# An <tt>ArgumentError</tt> will be raised if the callback has not |
||||
# already been set (unless the <tt>:raise</tt> option is set to <tt>false</tt>). |
||||
def skip_callback(name, *filter_list, &block) |
||||
type, filters, options = normalize_callback_params(filter_list, block) |
||||
|
||||
options[:raise] = true unless options.key?(:raise) |
||||
|
||||
__update_callbacks(name) do |target, chain| |
||||
filters.each do |filter| |
||||
callback = chain.find { |c| c.matches?(type, filter) } |
||||
|
||||
if !callback && options[:raise] |
||||
raise ArgumentError, "#{type.to_s.capitalize} #{name} callback #{filter.inspect} has not been defined" |
||||
end |
||||
|
||||
if callback && (options.key?(:if) || options.key?(:unless)) |
||||
new_callback = callback.merge_conditional_options(chain, if_option: options[:if], unless_option: options[:unless]) |
||||
chain.insert(chain.index(callback), new_callback) |
||||
end |
||||
|
||||
chain.delete(callback) |
||||
end |
||||
target.set_callbacks name, chain |
||||
end |
||||
end |
||||
|
||||
# Remove all set callbacks for the given event. |
||||
def reset_callbacks(name) |
||||
callbacks = get_callbacks name |
||||
|
||||
ActiveSupport::DescendantsTracker.descendants(self).each do |target| |
||||
chain = target.get_callbacks(name).dup |
||||
callbacks.each { |c| chain.delete(c) } |
||||
target.set_callbacks name, chain |
||||
end |
||||
|
||||
set_callbacks(name, callbacks.dup.clear) |
||||
end |
||||
|
||||
# Define sets of events in the object life cycle that support callbacks. |
||||
# |
||||
# define_callbacks :validate |
||||
# define_callbacks :initialize, :save, :destroy |
||||
# |
||||
# ===== Options |
||||
# |
||||
# * <tt>:terminator</tt> - Determines when a before filter will halt the |
||||
# callback chain, preventing following before and around callbacks from |
||||
# being called and the event from being triggered. |
||||
# This should be a lambda to be executed. |
||||
# The current object and the result lambda of the callback will be provided |
||||
# to the terminator lambda. |
||||
# |
||||
# define_callbacks :validate, terminator: ->(target, result_lambda) { result_lambda.call == false } |
||||
# |
||||
# In this example, if any before validate callbacks returns +false+, |
||||
# any successive before and around callback is not executed. |
||||
# |
||||
# The default terminator halts the chain when a callback throws +:abort+. |
||||
# |
||||
# * <tt>:skip_after_callbacks_if_terminated</tt> - Determines if after |
||||
# callbacks should be terminated by the <tt>:terminator</tt> option. By |
||||
# default after callbacks are executed no matter if callback chain was |
||||
# terminated or not. This option has no effect if <tt>:terminator</tt> |
||||
# option is set to +nil+. |
||||
# |
||||
# * <tt>:scope</tt> - Indicates which methods should be executed when an |
||||
# object is used as a callback. |
||||
# |
||||
# class Audit |
||||
# def before(caller) |
||||
# puts 'Audit: before is called' |
||||
# end |
||||
# |
||||
# def before_save(caller) |
||||
# puts 'Audit: before_save is called' |
||||
# end |
||||
# end |
||||
# |
||||
# class Account |
||||
# include ActiveSupport::Callbacks |
||||
# |
||||
# define_callbacks :save |
||||
# set_callback :save, :before, Audit.new |
||||
# |
||||
# def save |
||||
# run_callbacks :save do |
||||
# puts 'save in main' |
||||
# end |
||||
# end |
||||
# end |
||||
# |
||||
# In the above case whenever you save an account the method |
||||
# <tt>Audit#before</tt> will be called. On the other hand |
||||
# |
||||
# define_callbacks :save, scope: [:kind, :name] |
||||
# |
||||
# would trigger <tt>Audit#before_save</tt> instead. That's constructed |
||||
# by calling <tt>#{kind}_#{name}</tt> on the given instance. In this |
||||
# case "kind" is "before" and "name" is "save". In this context +:kind+ |
||||
# and +:name+ have special meanings: +:kind+ refers to the kind of |
||||
# callback (before/after/around) and +:name+ refers to the method on |
||||
# which callbacks are being defined. |
||||
# |
||||
# A declaration like |
||||
# |
||||
# define_callbacks :save, scope: [:name] |
||||
# |
||||
# would call <tt>Audit#save</tt>. |
||||
# |
||||
# ===== Notes |
||||
# |
||||
# +names+ passed to +define_callbacks+ must not end with |
||||
# <tt>!</tt>, <tt>?</tt> or <tt>=</tt>. |
||||
# |
||||
# Calling +define_callbacks+ multiple times with the same +names+ will |
||||
# overwrite previous callbacks registered with +set_callback+. |
||||
def define_callbacks(*names) |
||||
options = names.extract_options! |
||||
|
||||
names.each do |name| |
||||
name = name.to_sym |
||||
|
||||
([self] + ActiveSupport::DescendantsTracker.descendants(self)).each do |target| |
||||
target.set_callbacks name, CallbackChain.new(name, options) |
||||
end |
||||
|
||||
module_eval <<-RUBY, __FILE__, __LINE__ + 1 |
||||
def _run_#{name}_callbacks(&block) |
||||
run_callbacks #{name.inspect}, &block |
||||
end |
||||
|
||||
def self._#{name}_callbacks |
||||
get_callbacks(#{name.inspect}) |
||||
end |
||||
|
||||
def self._#{name}_callbacks=(value) |
||||
set_callbacks(#{name.inspect}, value) |
||||
end |
||||
|
||||
def _#{name}_callbacks |
||||
__callbacks[#{name.inspect}] |
||||
end |
||||
RUBY |
||||
end |
||||
end |
||||
|
||||
protected |
||||
def get_callbacks(name) # :nodoc: |
||||
__callbacks[name.to_sym] |
||||
end |
||||
|
||||
if Module.instance_method(:method_defined?).arity == 1 # Ruby 2.5 and older |
||||
def set_callbacks(name, callbacks) # :nodoc: |
||||
self.__callbacks = __callbacks.merge(name.to_sym => callbacks) |
||||
end |
||||
else # Ruby 2.6 and newer |
||||
def set_callbacks(name, callbacks) # :nodoc: |
||||
unless singleton_class.method_defined?(:__callbacks, false) |
||||
self.__callbacks = __callbacks.dup |
||||
end |
||||
self.__callbacks[name.to_sym] = callbacks |
||||
self.__callbacks |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,215 +0,0 @@
@@ -1,215 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
module ActiveSupport |
||||
# A typical module looks like this: |
||||
# |
||||
# module M |
||||
# def self.included(base) |
||||
# base.extend ClassMethods |
||||
# base.class_eval do |
||||
# scope :disabled, -> { where(disabled: true) } |
||||
# end |
||||
# end |
||||
# |
||||
# module ClassMethods |
||||
# ... |
||||
# end |
||||
# end |
||||
# |
||||
# By using <tt>ActiveSupport::Concern</tt> the above module could instead be |
||||
# written as: |
||||
# |
||||
# require "active_support/concern" |
||||
# |
||||
# module M |
||||
# extend ActiveSupport::Concern |
||||
# |
||||
# included do |
||||
# scope :disabled, -> { where(disabled: true) } |
||||
# end |
||||
# |
||||
# class_methods do |
||||
# ... |
||||
# end |
||||
# end |
||||
# |
||||
# Moreover, it gracefully handles module dependencies. Given a +Foo+ module |
||||
# and a +Bar+ module which depends on the former, we would typically write the |
||||
# following: |
||||
# |
||||
# module Foo |
||||
# def self.included(base) |
||||
# base.class_eval do |
||||
# def self.method_injected_by_foo |
||||
# ... |
||||
# end |
||||
# end |
||||
# end |
||||
# end |
||||
# |
||||
# module Bar |
||||
# def self.included(base) |
||||
# base.method_injected_by_foo |
||||
# end |
||||
# end |
||||
# |
||||
# class Host |
||||
# include Foo # We need to include this dependency for Bar |
||||
# include Bar # Bar is the module that Host really needs |
||||
# end |
||||
# |
||||
# But why should +Host+ care about +Bar+'s dependencies, namely +Foo+? We |
||||
# could try to hide these from +Host+ directly including +Foo+ in +Bar+: |
||||
# |
||||
# module Bar |
||||
# include Foo |
||||
# def self.included(base) |
||||
# base.method_injected_by_foo |
||||
# end |
||||
# end |
||||
# |
||||
# class Host |
||||
# include Bar |
||||
# end |
||||
# |
||||
# Unfortunately this won't work, since when +Foo+ is included, its <tt>base</tt> |
||||
# is the +Bar+ module, not the +Host+ class. With <tt>ActiveSupport::Concern</tt>, |
||||
# module dependencies are properly resolved: |
||||
# |
||||
# require "active_support/concern" |
||||
# |
||||
# module Foo |
||||
# extend ActiveSupport::Concern |
||||
# included do |
||||
# def self.method_injected_by_foo |
||||
# ... |
||||
# end |
||||
# end |
||||
# end |
||||
# |
||||
# module Bar |
||||
# extend ActiveSupport::Concern |
||||
# include Foo |
||||
# |
||||
# included do |
||||
# self.method_injected_by_foo |
||||
# end |
||||
# end |
||||
# |
||||
# class Host |
||||
# include Bar # It works, now Bar takes care of its dependencies |
||||
# end |
||||
# |
||||
# === Prepending concerns |
||||
# |
||||
# Just like <tt>include</tt>, concerns also support <tt>prepend</tt> with a corresponding |
||||
# <tt>prepended do</tt> callback. <tt>module ClassMethods</tt> or <tt>class_methods do</tt> are |
||||
# prepended as well. |
||||
# |
||||
# <tt>prepend</tt> is also used for any dependencies. |
||||
module Concern |
||||
class MultipleIncludedBlocks < StandardError #:nodoc: |
||||
def initialize |
||||
super "Cannot define multiple 'included' blocks for a Concern" |
||||
end |
||||
end |
||||
|
||||
class MultiplePrependBlocks < StandardError #:nodoc: |
||||
def initialize |
||||
super "Cannot define multiple 'prepended' blocks for a Concern" |
||||
end |
||||
end |
||||
|
||||
def self.extended(base) #:nodoc: |
||||
base.instance_variable_set(:@_dependencies, []) |
||||
end |
||||
|
||||
def append_features(base) #:nodoc: |
||||
if base.instance_variable_defined?(:@_dependencies) |
||||
base.instance_variable_get(:@_dependencies) << self |
||||
false |
||||
else |
||||
return false if base < self |
||||
@_dependencies.each { |dep| base.include(dep) } |
||||
super |
||||
base.extend const_get(:ClassMethods) if const_defined?(:ClassMethods) |
||||
base.class_eval(&@_included_block) if instance_variable_defined?(:@_included_block) |
||||
end |
||||
end |
||||
|
||||
def prepend_features(base) #:nodoc: |
||||
if base.instance_variable_defined?(:@_dependencies) |
||||
base.instance_variable_get(:@_dependencies).unshift self |
||||
false |
||||
else |
||||
return false if base < self |
||||
@_dependencies.each { |dep| base.prepend(dep) } |
||||
super |
||||
base.singleton_class.prepend const_get(:ClassMethods) if const_defined?(:ClassMethods) |
||||
base.class_eval(&@_prepended_block) if instance_variable_defined?(:@_prepended_block) |
||||
end |
||||
end |
||||
|
||||
# Evaluate given block in context of base class, |
||||
# so that you can write class macros here. |
||||
# When you define more than one +included+ block, it raises an exception. |
||||
def included(base = nil, &block) |
||||
if base.nil? |
||||
if instance_variable_defined?(:@_included_block) |
||||
if @_included_block.source_location != block.source_location |
||||
raise MultipleIncludedBlocks |
||||
end |
||||
else |
||||
@_included_block = block |
||||
end |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
# Evaluate given block in context of base class, |
||||
# so that you can write class macros here. |
||||
# When you define more than one +prepended+ block, it raises an exception. |
||||
def prepended(base = nil, &block) |
||||
if base.nil? |
||||
if instance_variable_defined?(:@_prepended_block) |
||||
if @_prepended_block.source_location != block.source_location |
||||
raise MultiplePrependBlocks |
||||
end |
||||
else |
||||
@_prepended_block = block |
||||
end |
||||
else |
||||
super |
||||
end |
||||
end |
||||
|
||||
# Define class methods from given block. |
||||
# You can define private class methods as well. |
||||
# |
||||
# module Example |
||||
# extend ActiveSupport::Concern |
||||
# |
||||
# class_methods do |
||||
# def foo; puts 'foo'; end |
||||
# |
||||
# private |
||||
# def bar; puts 'bar'; end |
||||
# end |
||||
# end |
||||
# |
||||
# class Buzz |
||||
# include Example |
||||
# end |
||||
# |
||||
# Buzz.foo # => "foo" |
||||
# Buzz.bar # => private method 'bar' called for Buzz:Class(NoMethodError) |
||||
def class_methods(&class_methods_module_definition) |
||||
mod = const_defined?(:ClassMethods, false) ? |
||||
const_get(:ClassMethods) : |
||||
const_set(:ClassMethods, Module.new) |
||||
|
||||
mod.module_eval(&class_methods_module_definition) |
||||
end |
||||
end |
||||
end |
@ -1,35 +0,0 @@
@@ -1,35 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "monitor" |
||||
|
||||
module ActiveSupport |
||||
module Concurrency |
||||
# A monitor that will permit dependency loading while blocked waiting for |
||||
# the lock. |
||||
class LoadInterlockAwareMonitor < Monitor |
||||
EXCEPTION_NEVER = { Exception => :never }.freeze |
||||
EXCEPTION_IMMEDIATE = { Exception => :immediate }.freeze |
||||
private_constant :EXCEPTION_NEVER, :EXCEPTION_IMMEDIATE |
||||
|
||||
# Enters an exclusive section, but allows dependency loading while blocked |
||||
def mon_enter |
||||
mon_try_enter || |
||||
ActiveSupport::Dependencies.interlock.permit_concurrent_loads { super } |
||||
end |
||||
|
||||
def synchronize |
||||
Thread.handle_interrupt(EXCEPTION_NEVER) do |
||||
mon_enter |
||||
|
||||
begin |
||||
Thread.handle_interrupt(EXCEPTION_IMMEDIATE) do |
||||
yield |
||||
end |
||||
ensure |
||||
mon_exit |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,226 +0,0 @@
@@ -1,226 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "thread" |
||||
require "monitor" |
||||
|
||||
module ActiveSupport |
||||
module Concurrency |
||||
# A share/exclusive lock, otherwise known as a read/write lock. |
||||
# |
||||
# https://en.wikipedia.org/wiki/Readers%E2%80%93writer_lock |
||||
class ShareLock |
||||
include MonitorMixin |
||||
|
||||
# We track Thread objects, instead of just using counters, because |
||||
# we need exclusive locks to be reentrant, and we need to be able |
||||
# to upgrade share locks to exclusive. |
||||
|
||||
def raw_state # :nodoc: |
||||
synchronize do |
||||
threads = @sleeping.keys | @sharing.keys | @waiting.keys |
||||
threads |= [@exclusive_thread] if @exclusive_thread |
||||
|
||||
data = {} |
||||
|
||||
threads.each do |thread| |
||||
purpose, compatible = @waiting[thread] |
||||
|
||||
data[thread] = { |
||||
thread: thread, |
||||
sharing: @sharing[thread], |
||||
exclusive: @exclusive_thread == thread, |
||||
purpose: purpose, |
||||
compatible: compatible, |
||||
waiting: !!@waiting[thread], |
||||
sleeper: @sleeping[thread], |
||||
} |
||||
end |
||||
|
||||
# NB: Yields while holding our *internal* synchronize lock, |
||||
# which is supposed to be used only for a few instructions at |
||||
# a time. This allows the caller to inspect additional state |
||||
# without things changing out from underneath, but would have |
||||
# disastrous effects upon normal operation. Fortunately, this |
||||
# method is only intended to be called when things have |
||||
# already gone wrong. |
||||
yield data |
||||
end |
||||
end |
||||
|
||||
def initialize |
||||
super() |
||||
|
||||
@cv = new_cond |
||||
|
||||
@sharing = Hash.new(0) |
||||
@waiting = {} |
||||
@sleeping = {} |
||||
@exclusive_thread = nil |
||||
@exclusive_depth = 0 |
||||
end |
||||
|
||||
# Returns false if +no_wait+ is set and the lock is not |
||||
# immediately available. Otherwise, returns true after the lock |
||||
# has been acquired. |
||||
# |
||||
# +purpose+ and +compatible+ work together; while this thread is |
||||
# waiting for the exclusive lock, it will yield its share (if any) |
||||
# to any other attempt whose +purpose+ appears in this attempt's |
||||
# +compatible+ list. This allows a "loose" upgrade, which, being |
||||
# less strict, prevents some classes of deadlocks. |
||||
# |
||||
# For many resources, loose upgrades are sufficient: if a thread |
||||
# is awaiting a lock, it is not running any other code. With |
||||
# +purpose+ matching, it is possible to yield only to other |
||||
# threads whose activity will not interfere. |
||||
def start_exclusive(purpose: nil, compatible: [], no_wait: false) |
||||
synchronize do |
||||
unless @exclusive_thread == Thread.current |
||||
if busy_for_exclusive?(purpose) |
||||
return false if no_wait |
||||
|
||||
yield_shares(purpose: purpose, compatible: compatible, block_share: true) do |
||||
wait_for(:start_exclusive) { busy_for_exclusive?(purpose) } |
||||
end |
||||
end |
||||
@exclusive_thread = Thread.current |
||||
end |
||||
@exclusive_depth += 1 |
||||
|
||||
true |
||||
end |
||||
end |
||||
|
||||
# Relinquish the exclusive lock. Must only be called by the thread |
||||
# that called start_exclusive (and currently holds the lock). |
||||
def stop_exclusive(compatible: []) |
||||
synchronize do |
||||
raise "invalid unlock" if @exclusive_thread != Thread.current |
||||
|
||||
@exclusive_depth -= 1 |
||||
if @exclusive_depth == 0 |
||||
@exclusive_thread = nil |
||||
|
||||
if eligible_waiters?(compatible) |
||||
yield_shares(compatible: compatible, block_share: true) do |
||||
wait_for(:stop_exclusive) { @exclusive_thread || eligible_waiters?(compatible) } |
||||
end |
||||
end |
||||
@cv.broadcast |
||||
end |
||||
end |
||||
end |
||||
|
||||
def start_sharing |
||||
synchronize do |
||||
if @sharing[Thread.current] > 0 || @exclusive_thread == Thread.current |
||||
# We already hold a lock; nothing to wait for |
||||
elsif @waiting[Thread.current] |
||||
# We're nested inside a +yield_shares+ call: we'll resume as |
||||
# soon as there isn't an exclusive lock in our way |
||||
wait_for(:start_sharing) { @exclusive_thread } |
||||
else |
||||
# This is an initial / outermost share call: any outstanding |
||||
# requests for an exclusive lock get to go first |
||||
wait_for(:start_sharing) { busy_for_sharing?(false) } |
||||
end |
||||
@sharing[Thread.current] += 1 |
||||
end |
||||
end |
||||
|
||||
def stop_sharing |
||||
synchronize do |
||||
if @sharing[Thread.current] > 1 |
||||
@sharing[Thread.current] -= 1 |
||||
else |
||||
@sharing.delete Thread.current |
||||
@cv.broadcast |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Execute the supplied block while holding the Exclusive lock. If |
||||
# +no_wait+ is set and the lock is not immediately available, |
||||
# returns +nil+ without yielding. Otherwise, returns the result of |
||||
# the block. |
||||
# |
||||
# See +start_exclusive+ for other options. |
||||
def exclusive(purpose: nil, compatible: [], after_compatible: [], no_wait: false) |
||||
if start_exclusive(purpose: purpose, compatible: compatible, no_wait: no_wait) |
||||
begin |
||||
yield |
||||
ensure |
||||
stop_exclusive(compatible: after_compatible) |
||||
end |
||||
end |
||||
end |
||||
|
||||
# Execute the supplied block while holding the Share lock. |
||||
def sharing |
||||
start_sharing |
||||
begin |
||||
yield |
||||
ensure |
||||
stop_sharing |
||||
end |
||||
end |
||||
|
||||
# Temporarily give up all held Share locks while executing the |
||||
# supplied block, allowing any +compatible+ exclusive lock request |
||||
# to proceed. |
||||
def yield_shares(purpose: nil, compatible: [], block_share: false) |
||||
loose_shares = previous_wait = nil |
||||
synchronize do |
||||
if loose_shares = @sharing.delete(Thread.current) |
||||
if previous_wait = @waiting[Thread.current] |
||||
purpose = nil unless purpose == previous_wait[0] |
||||
compatible &= previous_wait[1] |
||||
end |
||||
compatible |= [false] unless block_share |
||||
@waiting[Thread.current] = [purpose, compatible] |
||||
end |
||||
|
||||
@cv.broadcast |
||||
end |
||||
|
||||
begin |
||||
yield |
||||
ensure |
||||
synchronize do |
||||
wait_for(:yield_shares) { @exclusive_thread && @exclusive_thread != Thread.current } |
||||
|
||||
if previous_wait |
||||
@waiting[Thread.current] = previous_wait |
||||
else |
||||
@waiting.delete Thread.current |
||||
end |
||||
@sharing[Thread.current] = loose_shares if loose_shares |
||||
end |
||||
end |
||||
end |
||||
|
||||
private |
||||
# Must be called within synchronize |
||||
def busy_for_exclusive?(purpose) |
||||
busy_for_sharing?(purpose) || |
||||
@sharing.size > (@sharing[Thread.current] > 0 ? 1 : 0) |
||||
end |
||||
|
||||
def busy_for_sharing?(purpose) |
||||
(@exclusive_thread && @exclusive_thread != Thread.current) || |
||||
@waiting.any? { |t, (_, c)| t != Thread.current && !c.include?(purpose) } |
||||
end |
||||
|
||||
def eligible_waiters?(compatible) |
||||
@waiting.any? { |t, (p, _)| compatible.include?(p) && @waiting.all? { |t2, (_, c2)| t == t2 || c2.include?(p) } } |
||||
end |
||||
|
||||
def wait_for(method) |
||||
@sleeping[Thread.current] = method |
||||
@cv.wait_while { yield } |
||||
ensure |
||||
@sleeping.delete Thread.current |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,146 +0,0 @@
@@ -1,146 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/concern" |
||||
require "active_support/ordered_options" |
||||
|
||||
module ActiveSupport |
||||
# Configurable provides a <tt>config</tt> method to store and retrieve |
||||
# configuration options as an <tt>OrderedOptions</tt>. |
||||
module Configurable |
||||
extend ActiveSupport::Concern |
||||
|
||||
class Configuration < ActiveSupport::InheritableOptions |
||||
def compile_methods! |
||||
self.class.compile_methods!(keys) |
||||
end |
||||
|
||||
# Compiles reader methods so we don't have to go through method_missing. |
||||
def self.compile_methods!(keys) |
||||
keys.reject { |m| method_defined?(m) }.each do |key| |
||||
class_eval <<-RUBY, __FILE__, __LINE__ + 1 |
||||
def #{key}; _get(#{key.inspect}); end |
||||
RUBY |
||||
end |
||||
end |
||||
end |
||||
|
||||
module ClassMethods |
||||
def config |
||||
@_config ||= if respond_to?(:superclass) && superclass.respond_to?(:config) |
||||
superclass.config.inheritable_copy |
||||
else |
||||
# create a new "anonymous" class that will host the compiled reader methods |
||||
Class.new(Configuration).new |
||||
end |
||||
end |
||||
|
||||
def configure |
||||
yield config |
||||
end |
||||
|
||||
# Allows you to add shortcut so that you don't have to refer to attribute |
||||
# through config. Also look at the example for config to contrast. |
||||
# |
||||
# Defines both class and instance config accessors. |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# config_accessor :allowed_access |
||||
# end |
||||
# |
||||
# User.allowed_access # => nil |
||||
# User.allowed_access = false |
||||
# User.allowed_access # => false |
||||
# |
||||
# user = User.new |
||||
# user.allowed_access # => false |
||||
# user.allowed_access = true |
||||
# user.allowed_access # => true |
||||
# |
||||
# User.allowed_access # => false |
||||
# |
||||
# The attribute name must be a valid method name in Ruby. |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# config_accessor :"1_Badname" |
||||
# end |
||||
# # => NameError: invalid config attribute name |
||||
# |
||||
# To omit the instance writer method, pass <tt>instance_writer: false</tt>. |
||||
# To omit the instance reader method, pass <tt>instance_reader: false</tt>. |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# config_accessor :allowed_access, instance_reader: false, instance_writer: false |
||||
# end |
||||
# |
||||
# User.allowed_access = false |
||||
# User.allowed_access # => false |
||||
# |
||||
# User.new.allowed_access = true # => NoMethodError |
||||
# User.new.allowed_access # => NoMethodError |
||||
# |
||||
# Or pass <tt>instance_accessor: false</tt>, to omit both instance methods. |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# config_accessor :allowed_access, instance_accessor: false |
||||
# end |
||||
# |
||||
# User.allowed_access = false |
||||
# User.allowed_access # => false |
||||
# |
||||
# User.new.allowed_access = true # => NoMethodError |
||||
# User.new.allowed_access # => NoMethodError |
||||
# |
||||
# Also you can pass a block to set up the attribute with a default value. |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# config_accessor :hair_colors do |
||||
# [:brown, :black, :blonde, :red] |
||||
# end |
||||
# end |
||||
# |
||||
# User.hair_colors # => [:brown, :black, :blonde, :red] |
||||
def config_accessor(*names, instance_reader: true, instance_writer: true, instance_accessor: true) # :doc: |
||||
names.each do |name| |
||||
raise NameError.new("invalid config attribute name") unless /\A[_A-Za-z]\w*\z/.match?(name) |
||||
|
||||
reader, reader_line = "def #{name}; config.#{name}; end", __LINE__ |
||||
writer, writer_line = "def #{name}=(value); config.#{name} = value; end", __LINE__ |
||||
|
||||
singleton_class.class_eval reader, __FILE__, reader_line |
||||
singleton_class.class_eval writer, __FILE__, writer_line |
||||
|
||||
if instance_accessor |
||||
class_eval reader, __FILE__, reader_line if instance_reader |
||||
class_eval writer, __FILE__, writer_line if instance_writer |
||||
end |
||||
send("#{name}=", yield) if block_given? |
||||
end |
||||
end |
||||
private :config_accessor |
||||
end |
||||
|
||||
# Reads and writes attributes from a configuration <tt>OrderedOptions</tt>. |
||||
# |
||||
# require "active_support/configurable" |
||||
# |
||||
# class User |
||||
# include ActiveSupport::Configurable |
||||
# end |
||||
# |
||||
# user = User.new |
||||
# |
||||
# user.config.allowed_access = true |
||||
# user.config.level = 1 |
||||
# |
||||
# user.config.allowed_access # => true |
||||
# user.config.level # => 1 |
||||
def config |
||||
@_config ||= self.class.config.inheritable_copy |
||||
end |
||||
end |
||||
end |
@ -1,51 +0,0 @@
@@ -1,51 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
module ActiveSupport |
||||
# Reads a YAML configuration file, evaluating any ERB, then |
||||
# parsing the resulting YAML. |
||||
# |
||||
# Warns in case of YAML confusing characters, like invisible |
||||
# non-breaking spaces. |
||||
class ConfigurationFile # :nodoc: |
||||
class FormatError < StandardError; end |
||||
|
||||
def initialize(content_path) |
||||
@content_path = content_path.to_s |
||||
@content = read content_path |
||||
end |
||||
|
||||
def self.parse(content_path, **options) |
||||
new(content_path).parse(**options) |
||||
end |
||||
|
||||
def parse(context: nil, **options) |
||||
source = render(context) |
||||
if YAML.respond_to?(:unsafe_load) |
||||
YAML.unsafe_load(source, **options) || {} |
||||
else |
||||
YAML.load(source, **options) || {} |
||||
end |
||||
rescue Psych::SyntaxError => error |
||||
raise "YAML syntax error occurred while parsing #{@content_path}. " \ |
||||
"Please note that YAML must be consistently indented using spaces. Tabs are not allowed. " \ |
||||
"Error: #{error.message}" |
||||
end |
||||
|
||||
private |
||||
def read(content_path) |
||||
require "yaml" |
||||
require "erb" |
||||
|
||||
File.read(content_path).tap do |content| |
||||
if content.include?("\u00A0") |
||||
warn "File contains invisible non-breaking spaces, you may want to remove those" |
||||
end |
||||
end |
||||
end |
||||
|
||||
def render(context) |
||||
erb = ERB.new(@content).tap { |e| e.filename = @content_path } |
||||
context ? erb.result(context) : erb.result |
||||
end |
||||
end |
||||
end |
@ -1,5 +0,0 @@
@@ -1,5 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
Dir.glob(File.expand_path("core_ext/*.rb", __dir__)).sort.each do |path| |
||||
require path |
||||
end |
@ -1,9 +0,0 @@
@@ -1,9 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/core_ext/array/wrap" |
||||
require "active_support/core_ext/array/access" |
||||
require "active_support/core_ext/array/conversions" |
||||
require "active_support/core_ext/array/extract" |
||||
require "active_support/core_ext/array/extract_options" |
||||
require "active_support/core_ext/array/grouping" |
||||
require "active_support/core_ext/array/inquiry" |
@ -1,104 +0,0 @@
@@ -1,104 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
class Array |
||||
# Returns the tail of the array from +position+. |
||||
# |
||||
# %w( a b c d ).from(0) # => ["a", "b", "c", "d"] |
||||
# %w( a b c d ).from(2) # => ["c", "d"] |
||||
# %w( a b c d ).from(10) # => [] |
||||
# %w().from(0) # => [] |
||||
# %w( a b c d ).from(-2) # => ["c", "d"] |
||||
# %w( a b c ).from(-10) # => [] |
||||
def from(position) |
||||
self[position, length] || [] |
||||
end |
||||
|
||||
# Returns the beginning of the array up to +position+. |
||||
# |
||||
# %w( a b c d ).to(0) # => ["a"] |
||||
# %w( a b c d ).to(2) # => ["a", "b", "c"] |
||||
# %w( a b c d ).to(10) # => ["a", "b", "c", "d"] |
||||
# %w().to(0) # => [] |
||||
# %w( a b c d ).to(-2) # => ["a", "b", "c"] |
||||
# %w( a b c ).to(-10) # => [] |
||||
def to(position) |
||||
if position >= 0 |
||||
take position + 1 |
||||
else |
||||
self[0..position] |
||||
end |
||||
end |
||||
|
||||
# Returns a new array that includes the passed elements. |
||||
# |
||||
# [ 1, 2, 3 ].including(4, 5) # => [ 1, 2, 3, 4, 5 ] |
||||
# [ [ 0, 1 ] ].including([ [ 1, 0 ] ]) # => [ [ 0, 1 ], [ 1, 0 ] ] |
||||
def including(*elements) |
||||
self + elements.flatten(1) |
||||
end |
||||
|
||||
# Returns a copy of the Array excluding the specified elements. |
||||
# |
||||
# ["David", "Rafael", "Aaron", "Todd"].excluding("Aaron", "Todd") # => ["David", "Rafael"] |
||||
# [ [ 0, 1 ], [ 1, 0 ] ].excluding([ [ 1, 0 ] ]) # => [ [ 0, 1 ] ] |
||||
# |
||||
# Note: This is an optimization of <tt>Enumerable#excluding</tt> that uses <tt>Array#-</tt> |
||||
# instead of <tt>Array#reject</tt> for performance reasons. |
||||
def excluding(*elements) |
||||
self - elements.flatten(1) |
||||
end |
||||
|
||||
# Alias for #excluding. |
||||
def without(*elements) |
||||
excluding(*elements) |
||||
end |
||||
|
||||
# Equal to <tt>self[1]</tt>. |
||||
# |
||||
# %w( a b c d e ).second # => "b" |
||||
def second |
||||
self[1] |
||||
end |
||||
|
||||
# Equal to <tt>self[2]</tt>. |
||||
# |
||||
# %w( a b c d e ).third # => "c" |
||||
def third |
||||
self[2] |
||||
end |
||||
|
||||
# Equal to <tt>self[3]</tt>. |
||||
# |
||||
# %w( a b c d e ).fourth # => "d" |
||||
def fourth |
||||
self[3] |
||||
end |
||||
|
||||
# Equal to <tt>self[4]</tt>. |
||||
# |
||||
# %w( a b c d e ).fifth # => "e" |
||||
def fifth |
||||
self[4] |
||||
end |
||||
|
||||
# Equal to <tt>self[41]</tt>. Also known as accessing "the reddit". |
||||
# |
||||
# (1..42).to_a.forty_two # => 42 |
||||
def forty_two |
||||
self[41] |
||||
end |
||||
|
||||
# Equal to <tt>self[-3]</tt>. |
||||
# |
||||
# %w( a b c d e ).third_to_last # => "c" |
||||
def third_to_last |
||||
self[-3] |
||||
end |
||||
|
||||
# Equal to <tt>self[-2]</tt>. |
||||
# |
||||
# %w( a b c d e ).second_to_last # => "d" |
||||
def second_to_last |
||||
self[-2] |
||||
end |
||||
end |
@ -1,213 +0,0 @@
@@ -1,213 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/xml_mini" |
||||
require "active_support/core_ext/hash/keys" |
||||
require "active_support/core_ext/string/inflections" |
||||
require "active_support/core_ext/object/to_param" |
||||
require "active_support/core_ext/object/to_query" |
||||
|
||||
class Array |
||||
# Converts the array to a comma-separated sentence where the last element is |
||||
# joined by the connector word. |
||||
# |
||||
# You can pass the following options to change the default behavior. If you |
||||
# pass an option key that doesn't exist in the list below, it will raise an |
||||
# <tt>ArgumentError</tt>. |
||||
# |
||||
# ==== Options |
||||
# |
||||
# * <tt>:words_connector</tt> - The sign or word used to join the elements |
||||
# in arrays with two or more elements (default: ", "). |
||||
# * <tt>:two_words_connector</tt> - The sign or word used to join the elements |
||||
# in arrays with two elements (default: " and "). |
||||
# * <tt>:last_word_connector</tt> - The sign or word used to join the last element |
||||
# in arrays with three or more elements (default: ", and "). |
||||
# * <tt>:locale</tt> - If +i18n+ is available, you can set a locale and use |
||||
# the connector options defined on the 'support.array' namespace in the |
||||
# corresponding dictionary file. |
||||
# |
||||
# ==== Examples |
||||
# |
||||
# [].to_sentence # => "" |
||||
# ['one'].to_sentence # => "one" |
||||
# ['one', 'two'].to_sentence # => "one and two" |
||||
# ['one', 'two', 'three'].to_sentence # => "one, two, and three" |
||||
# |
||||
# ['one', 'two'].to_sentence(passing: 'invalid option') |
||||
# # => ArgumentError: Unknown key: :passing. Valid keys are: :words_connector, :two_words_connector, :last_word_connector, :locale |
||||
# |
||||
# ['one', 'two'].to_sentence(two_words_connector: '-') |
||||
# # => "one-two" |
||||
# |
||||
# ['one', 'two', 'three'].to_sentence(words_connector: ' or ', last_word_connector: ' or at least ') |
||||
# # => "one or two or at least three" |
||||
# |
||||
# Using <tt>:locale</tt> option: |
||||
# |
||||
# # Given this locale dictionary: |
||||
# # |
||||
# # es: |
||||
# # support: |
||||
# # array: |
||||
# # words_connector: " o " |
||||
# # two_words_connector: " y " |
||||
# # last_word_connector: " o al menos " |
||||
# |
||||
# ['uno', 'dos'].to_sentence(locale: :es) |
||||
# # => "uno y dos" |
||||
# |
||||
# ['uno', 'dos', 'tres'].to_sentence(locale: :es) |
||||
# # => "uno o dos o al menos tres" |
||||
def to_sentence(options = {}) |
||||
options.assert_valid_keys(:words_connector, :two_words_connector, :last_word_connector, :locale) |
||||
|
||||
default_connectors = { |
||||
words_connector: ", ", |
||||
two_words_connector: " and ", |
||||
last_word_connector: ", and " |
||||
} |
||||
if defined?(I18n) |
||||
i18n_connectors = I18n.translate(:'support.array', locale: options[:locale], default: {}) |
||||
default_connectors.merge!(i18n_connectors) |
||||
end |
||||
options = default_connectors.merge!(options) |
||||
|
||||
case length |
||||
when 0 |
||||
+"" |
||||
when 1 |
||||
+"#{self[0]}" |
||||
when 2 |
||||
+"#{self[0]}#{options[:two_words_connector]}#{self[1]}" |
||||
else |
||||
+"#{self[0...-1].join(options[:words_connector])}#{options[:last_word_connector]}#{self[-1]}" |
||||
end |
||||
end |
||||
|
||||
# Extends <tt>Array#to_s</tt> to convert a collection of elements into a |
||||
# comma separated id list if <tt>:db</tt> argument is given as the format. |
||||
# |
||||
# Blog.all.to_formatted_s(:db) # => "1,2,3" |
||||
# Blog.none.to_formatted_s(:db) # => "null" |
||||
# [1,2].to_formatted_s # => "[1, 2]" |
||||
def to_formatted_s(format = :default) |
||||
case format |
||||
when :db |
||||
if empty? |
||||
"null" |
||||
else |
||||
collect(&:id).join(",") |
||||
end |
||||
else |
||||
to_default_s |
||||
end |
||||
end |
||||
alias_method :to_default_s, :to_s |
||||
alias_method :to_s, :to_formatted_s |
||||
|
||||
# Returns a string that represents the array in XML by invoking +to_xml+ |
||||
# on each element. Active Record collections delegate their representation |
||||
# in XML to this method. |
||||
# |
||||
# All elements are expected to respond to +to_xml+, if any of them does |
||||
# not then an exception is raised. |
||||
# |
||||
# The root node reflects the class name of the first element in plural |
||||
# if all elements belong to the same type and that's not Hash: |
||||
# |
||||
# customer.projects.to_xml |
||||
# |
||||
# <?xml version="1.0" encoding="UTF-8"?> |
||||
# <projects type="array"> |
||||
# <project> |
||||
# <amount type="decimal">20000.0</amount> |
||||
# <customer-id type="integer">1567</customer-id> |
||||
# <deal-date type="date">2008-04-09</deal-date> |
||||
# ... |
||||
# </project> |
||||
# <project> |
||||
# <amount type="decimal">57230.0</amount> |
||||
# <customer-id type="integer">1567</customer-id> |
||||
# <deal-date type="date">2008-04-15</deal-date> |
||||
# ... |
||||
# </project> |
||||
# </projects> |
||||
# |
||||
# Otherwise the root element is "objects": |
||||
# |
||||
# [{ foo: 1, bar: 2}, { baz: 3}].to_xml |
||||
# |
||||
# <?xml version="1.0" encoding="UTF-8"?> |
||||
# <objects type="array"> |
||||
# <object> |
||||
# <bar type="integer">2</bar> |
||||
# <foo type="integer">1</foo> |
||||
# </object> |
||||
# <object> |
||||
# <baz type="integer">3</baz> |
||||
# </object> |
||||
# </objects> |
||||
# |
||||
# If the collection is empty the root element is "nil-classes" by default: |
||||
# |
||||
# [].to_xml |
||||
# |
||||
# <?xml version="1.0" encoding="UTF-8"?> |
||||
# <nil-classes type="array"/> |
||||
# |
||||
# To ensure a meaningful root element use the <tt>:root</tt> option: |
||||
# |
||||
# customer_with_no_projects.projects.to_xml(root: 'projects') |
||||
# |
||||
# <?xml version="1.0" encoding="UTF-8"?> |
||||
# <projects type="array"/> |
||||
# |
||||
# By default name of the node for the children of root is <tt>root.singularize</tt>. |
||||
# You can change it with the <tt>:children</tt> option. |
||||
# |
||||
# The +options+ hash is passed downwards: |
||||
# |
||||
# Message.all.to_xml(skip_types: true) |
||||
# |
||||
# <?xml version="1.0" encoding="UTF-8"?> |
||||
# <messages> |
||||
# <message> |
||||
# <created-at>2008-03-07T09:58:18+01:00</created-at> |
||||
# <id>1</id> |
||||
# <name>1</name> |
||||
# <updated-at>2008-03-07T09:58:18+01:00</updated-at> |
||||
# <user-id>1</user-id> |
||||
# </message> |
||||
# </messages> |
||||
# |
||||
def to_xml(options = {}) |
||||
require "active_support/builder" unless defined?(Builder::XmlMarkup) |
||||
|
||||
options = options.dup |
||||
options[:indent] ||= 2 |
||||
options[:builder] ||= Builder::XmlMarkup.new(indent: options[:indent]) |
||||
options[:root] ||= \ |
||||
if first.class != Hash && all? { |e| e.is_a?(first.class) } |
||||
underscored = ActiveSupport::Inflector.underscore(first.class.name) |
||||
ActiveSupport::Inflector.pluralize(underscored).tr("/", "_") |
||||
else |
||||
"objects" |
||||
end |
||||
|
||||
builder = options[:builder] |
||||
builder.instruct! unless options.delete(:skip_instruct) |
||||
|
||||
root = ActiveSupport::XmlMini.rename_key(options[:root].to_s, options) |
||||
children = options.delete(:children) || root.singularize |
||||
attributes = options[:skip_types] ? {} : { type: "array" } |
||||
|
||||
if empty? |
||||
builder.tag!(root, attributes) |
||||
else |
||||
builder.tag!(root, attributes) do |
||||
each { |value| ActiveSupport::XmlMini.to_tag(children, value, options) } |
||||
yield builder if block_given? |
||||
end |
||||
end |
||||
end |
||||
end |
@ -1,21 +0,0 @@
@@ -1,21 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
class Array |
||||
# Removes and returns the elements for which the block returns a true value. |
||||
# If no block is given, an Enumerator is returned instead. |
||||
# |
||||
# numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] |
||||
# odd_numbers = numbers.extract! { |number| number.odd? } # => [1, 3, 5, 7, 9] |
||||
# numbers # => [0, 2, 4, 6, 8] |
||||
def extract! |
||||
return to_enum(:extract!) { size } unless block_given? |
||||
|
||||
extracted_elements = [] |
||||
|
||||
reject! do |element| |
||||
extracted_elements << element if yield(element) |
||||
end |
||||
|
||||
extracted_elements |
||||
end |
||||
end |
@ -1,31 +0,0 @@
@@ -1,31 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
class Hash |
||||
# By default, only instances of Hash itself are extractable. |
||||
# Subclasses of Hash may implement this method and return |
||||
# true to declare themselves as extractable. If a Hash |
||||
# is extractable, Array#extract_options! pops it from |
||||
# the Array when it is the last element of the Array. |
||||
def extractable_options? |
||||
instance_of?(Hash) |
||||
end |
||||
end |
||||
|
||||
class Array |
||||
# Extracts options from a set of arguments. Removes and returns the last |
||||
# element in the array if it's a hash, otherwise returns a blank hash. |
||||
# |
||||
# def options(*args) |
||||
# args.extract_options! |
||||
# end |
||||
# |
||||
# options(1, 2) # => {} |
||||
# options(1, 2, a: :b) # => {:a=>:b} |
||||
def extract_options! |
||||
if last.is_a?(Hash) && last.extractable_options? |
||||
pop |
||||
else |
||||
{} |
||||
end |
||||
end |
||||
end |
@ -1,109 +0,0 @@
@@ -1,109 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
class Array |
||||
# Splits or iterates over the array in groups of size +number+, |
||||
# padding any remaining slots with +fill_with+ unless it is +false+. |
||||
# |
||||
# %w(1 2 3 4 5 6 7 8 9 10).in_groups_of(3) {|group| p group} |
||||
# ["1", "2", "3"] |
||||
# ["4", "5", "6"] |
||||
# ["7", "8", "9"] |
||||
# ["10", nil, nil] |
||||
# |
||||
# %w(1 2 3 4 5).in_groups_of(2, ' ') {|group| p group} |
||||
# ["1", "2"] |
||||
# ["3", "4"] |
||||
# ["5", " "] |
||||
# |
||||
# %w(1 2 3 4 5).in_groups_of(2, false) {|group| p group} |
||||
# ["1", "2"] |
||||
# ["3", "4"] |
||||
# ["5"] |
||||
def in_groups_of(number, fill_with = nil) |
||||
if number.to_i <= 0 |
||||
raise ArgumentError, |
||||
"Group size must be a positive integer, was #{number.inspect}" |
||||
end |
||||
|
||||
if fill_with == false |
||||
collection = self |
||||
else |
||||
# size % number gives how many extra we have; |
||||
# subtracting from number gives how many to add; |
||||
# modulo number ensures we don't add group of just fill. |
||||
padding = (number - size % number) % number |
||||
collection = dup.concat(Array.new(padding, fill_with)) |
||||
end |
||||
|
||||
if block_given? |
||||
collection.each_slice(number) { |slice| yield(slice) } |
||||
else |
||||
collection.each_slice(number).to_a |
||||
end |
||||
end |
||||
|
||||
# Splits or iterates over the array in +number+ of groups, padding any |
||||
# remaining slots with +fill_with+ unless it is +false+. |
||||
# |
||||
# %w(1 2 3 4 5 6 7 8 9 10).in_groups(3) {|group| p group} |
||||
# ["1", "2", "3", "4"] |
||||
# ["5", "6", "7", nil] |
||||
# ["8", "9", "10", nil] |
||||
# |
||||
# %w(1 2 3 4 5 6 7 8 9 10).in_groups(3, ' ') {|group| p group} |
||||
# ["1", "2", "3", "4"] |
||||
# ["5", "6", "7", " "] |
||||
# ["8", "9", "10", " "] |
||||
# |
||||
# %w(1 2 3 4 5 6 7).in_groups(3, false) {|group| p group} |
||||
# ["1", "2", "3"] |
||||
# ["4", "5"] |
||||
# ["6", "7"] |
||||
def in_groups(number, fill_with = nil) |
||||
# size.div number gives minor group size; |
||||
# size % number gives how many objects need extra accommodation; |
||||
# each group hold either division or division + 1 items. |
||||
division = size.div number |
||||
modulo = size % number |
||||
|
||||
# create a new array avoiding dup |
||||
groups = [] |
||||
start = 0 |
||||
|
||||
number.times do |index| |
||||
length = division + (modulo > 0 && modulo > index ? 1 : 0) |
||||
groups << last_group = slice(start, length) |
||||
last_group << fill_with if fill_with != false && |
||||
modulo > 0 && length == division |
||||
start += length |
||||
end |
||||
|
||||
if block_given? |
||||
groups.each { |g| yield(g) } |
||||
else |
||||
groups |
||||
end |
||||
end |
||||
|
||||
# Divides the array into one or more subarrays based on a delimiting +value+ |
||||
# or the result of an optional block. |
||||
# |
||||
# [1, 2, 3, 4, 5].split(3) # => [[1, 2], [4, 5]] |
||||
# (1..10).to_a.split { |i| i % 3 == 0 } # => [[1, 2], [4, 5], [7, 8], [10]] |
||||
def split(value = nil) |
||||
arr = dup |
||||
result = [] |
||||
if block_given? |
||||
while (idx = arr.index { |i| yield i }) |
||||
result << arr.shift(idx) |
||||
arr.shift |
||||
end |
||||
else |
||||
while (idx = arr.index(value)) |
||||
result << arr.shift(idx) |
||||
arr.shift |
||||
end |
||||
end |
||||
result << arr |
||||
end |
||||
end |
@ -1,19 +0,0 @@
@@ -1,19 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "active_support/array_inquirer" |
||||
|
||||
class Array |
||||
# Wraps the array in an +ArrayInquirer+ object, which gives a friendlier way |
||||
# to check its string-like contents. |
||||
# |
||||
# pets = [:cat, :dog].inquiry |
||||
# |
||||
# pets.cat? # => true |
||||
# pets.ferret? # => false |
||||
# |
||||
# pets.any?(:cat, :ferret) # => true |
||||
# pets.any?(:ferret, :alligator) # => false |
||||
def inquiry |
||||
ActiveSupport::ArrayInquirer.new(self) |
||||
end |
||||
end |
@ -1,48 +0,0 @@
@@ -1,48 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
class Array |
||||
# Wraps its argument in an array unless it is already an array (or array-like). |
||||
# |
||||
# Specifically: |
||||
# |
||||
# * If the argument is +nil+ an empty array is returned. |
||||
# * Otherwise, if the argument responds to +to_ary+ it is invoked, and its result returned. |
||||
# * Otherwise, returns an array with the argument as its single element. |
||||
# |
||||
# Array.wrap(nil) # => [] |
||||
# Array.wrap([1, 2, 3]) # => [1, 2, 3] |
||||
# Array.wrap(0) # => [0] |
||||
# |
||||
# This method is similar in purpose to <tt>Kernel#Array</tt>, but there are some differences: |
||||
# |
||||
# * If the argument responds to +to_ary+ the method is invoked. <tt>Kernel#Array</tt> |
||||
# moves on to try +to_a+ if the returned value is +nil+, but <tt>Array.wrap</tt> returns |
||||
# an array with the argument as its single element right away. |
||||
# * If the returned value from +to_ary+ is neither +nil+ nor an +Array+ object, <tt>Kernel#Array</tt> |
||||
# raises an exception, while <tt>Array.wrap</tt> does not, it just returns the value. |
||||
# * It does not call +to_a+ on the argument, if the argument does not respond to +to_ary+ |
||||
# it returns an array with the argument as its single element. |
||||
# |
||||
# The last point is easily explained with some enumerables: |
||||
# |
||||
# Array(foo: :bar) # => [[:foo, :bar]] |
||||
# Array.wrap(foo: :bar) # => [{:foo=>:bar}] |
||||
# |
||||
# There's also a related idiom that uses the splat operator: |
||||
# |
||||
# [*object] |
||||
# |
||||
# which returns <tt>[]</tt> for +nil+, but calls to <tt>Array(object)</tt> otherwise. |
||||
# |
||||
# The differences with <tt>Kernel#Array</tt> explained above |
||||
# apply to the rest of <tt>object</tt>s. |
||||
def self.wrap(object) |
||||
if object.nil? |
||||
[] |
||||
elsif object.respond_to?(:to_ary) |
||||
object.to_ary || [object] |
||||
else |
||||
[object] |
||||
end |
||||
end |
||||
end |
@ -1,16 +0,0 @@
@@ -1,16 +0,0 @@
|
||||
# frozen_string_literal: true |
||||
|
||||
require "benchmark" |
||||
|
||||
class << Benchmark |
||||
# Benchmark realtime in milliseconds. |
||||
# |
||||
# Benchmark.realtime { User.all } |
||||
# # => 8.0e-05 |
||||
# |
||||
# Benchmark.ms { User.all } |
||||
# # => 0.074 |
||||
def ms(&block) |
||||
1000 * realtime(&block) |
||||
end |
||||
end |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue